1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost = {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs *ix86_cost = &pentium_cost;
102 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
104 extern FILE *asm_out_file;
105 extern char *strcat ();
107 char *singlemove_string ();
108 char *output_move_const_single ();
109 char *output_fp_cc0_set ();
111 char *hi_reg_name[] = HI_REGISTER_NAMES;
112 char *qi_reg_name[] = QI_REGISTER_NAMES;
113 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
115 /* Array of the smallest class containing reg number REGNO, indexed by
116 REGNO. Used by REGNO_REG_CLASS in i386.h. */
118 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
121 AREG, DREG, CREG, BREG,
123 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
125 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
126 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
131 /* Test and compare insns in i386.md store the information needed to
132 generate branch and scc insns here. */
134 struct rtx_def *i386_compare_op0 = NULL_RTX;
135 struct rtx_def *i386_compare_op1 = NULL_RTX;
136 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
138 /* which cpu are we scheduling for */
139 enum processor_type ix86_cpu;
141 /* which instruction set architecture to use. */
144 /* Strings to hold which cpu and instruction set architecture to use. */
145 char *ix86_cpu_string; /* for -mcpu=<xxx> */
146 char *ix86_arch_string; /* for -march=<xxx> */
148 /* Register allocation order */
149 char *i386_reg_alloc_order;
150 static char regs_allocated[FIRST_PSEUDO_REGISTER];
152 /* # of registers to use to pass arguments. */
153 char *i386_regparm_string; /* # registers to use to pass args */
154 int i386_regparm; /* i386_regparm_string as a number */
156 /* Alignment to use for loops and jumps */
157 char *i386_align_loops_string; /* power of two alignment for loops */
158 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
159 char *i386_align_funcs_string; /* power of two alignment for functions */
160 char *i386_branch_cost_string; /* values 1-5: see jump.c */
162 int i386_align_loops; /* power of two alignment for loops */
163 int i386_align_jumps; /* power of two alignment for non-loop jumps */
164 int i386_align_funcs; /* power of two alignment for functions */
165 int i386_branch_cost; /* values 1-5: see jump.c */
167 /* Sometimes certain combinations of command options do not make
168 sense on a particular target machine. You can define a macro
169 `OVERRIDE_OPTIONS' to take account of this. This macro, if
170 defined, is executed once just after all the command options have
173 Don't use this macro to turn on various extra optimizations for
174 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
185 char *name; /* Canonical processor name. */
186 enum processor_type processor; /* Processor type enum value. */
187 struct processor_costs *cost; /* Processor costs */
188 int target_enable; /* Target flags to enable. */
189 int target_disable; /* Target flags to disable. */
190 } processor_target_table[]
191 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
192 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
193 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
194 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
195 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0},
196 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0}};
198 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
200 #ifdef SUBTARGET_OVERRIDE_OPTIONS
201 SUBTARGET_OVERRIDE_OPTIONS;
204 /* Validate registers in register allocation order */
205 if (i386_reg_alloc_order)
207 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
211 case 'a': regno = 0; break;
212 case 'd': regno = 1; break;
213 case 'c': regno = 2; break;
214 case 'b': regno = 3; break;
215 case 'S': regno = 4; break;
216 case 'D': regno = 5; break;
217 case 'B': regno = 6; break;
219 default: fatal ("Register '%c' is unknown", ch);
222 if (regs_allocated[regno])
223 fatal ("Register '%c' was already specified in the allocation order", ch);
225 regs_allocated[regno] = 1;
229 if (ix86_arch_string == (char *)0)
231 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
232 if (ix86_cpu_string == (char *)0)
233 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
236 for (i = 0; i < ptt_size; i++)
237 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
239 ix86_arch = processor_target_table[i].processor;
240 if (ix86_cpu_string == (char *)0)
241 ix86_cpu_string = processor_target_table[i].name;
247 error ("bad value (%s) for -march= switch", ix86_arch_string);
248 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
249 ix86_arch = PROCESSOR_DEFAULT;
252 if (ix86_cpu_string == (char *)0)
253 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
255 for (j = 0; j < ptt_size; j++)
256 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
258 ix86_cpu = processor_target_table[j].processor;
259 ix86_cost = processor_target_table[j].cost;
260 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
261 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
263 target_flags |= processor_target_table[j].target_enable;
264 target_flags &= ~processor_target_table[j].target_disable;
270 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
271 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
272 ix86_cpu = PROCESSOR_DEFAULT;
274 /* Validate -mregparm= value */
275 if (i386_regparm_string)
277 i386_regparm = atoi (i386_regparm_string);
278 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
279 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
282 /* The 486 suffers more from non-aligned cache line fills, and the larger code
283 size results in a larger cache foot-print and more misses. The 486 has a
284 16 byte cache line, pentium and pentiumpro have a 32 byte cache line */
285 def_align = (TARGET_486) ? 4 : 2;
287 /* Validate -malign-loops= value, or provide default */
288 if (i386_align_loops_string)
290 i386_align_loops = atoi (i386_align_loops_string);
291 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
292 fatal ("-malign-loops=%d is not between 0 and %d",
293 i386_align_loops, MAX_CODE_ALIGN);
296 i386_align_loops = 2;
298 /* Validate -malign-jumps= value, or provide default */
299 if (i386_align_jumps_string)
301 i386_align_jumps = atoi (i386_align_jumps_string);
302 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
303 fatal ("-malign-jumps=%d is not between 0 and %d",
304 i386_align_jumps, MAX_CODE_ALIGN);
307 i386_align_jumps = def_align;
309 /* Validate -malign-functions= value, or provide default */
310 if (i386_align_funcs_string)
312 i386_align_funcs = atoi (i386_align_funcs_string);
313 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
314 fatal ("-malign-functions=%d is not between 0 and %d",
315 i386_align_funcs, MAX_CODE_ALIGN);
318 i386_align_funcs = def_align;
320 /* Validate -mbranch-cost= value, or provide default */
321 if (i386_branch_cost_string)
323 i386_branch_cost = atoi (i386_branch_cost_string);
324 if (i386_branch_cost < 0 || i386_branch_cost > 5)
325 fatal ("-mbranch-cost=%d is not between 0 and 5",
329 i386_branch_cost = 1;
331 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
332 flag_omit_frame_pointer = 1;
334 /* pic references don't explicitly mention pic_offset_table_rtx */
335 /* code threaded into the prologue may conflict with profiling */
336 if (flag_pic || profile_flag || profile_block_flag)
337 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
340 /* A C statement (sans semicolon) to choose the order in which to
341 allocate hard registers for pseudo-registers local to a basic
344 Store the desired register order in the array `reg_alloc_order'.
345 Element 0 should be the register to allocate first; element 1, the
346 next register; and so on.
348 The macro body should not assume anything about the contents of
349 `reg_alloc_order' before execution of the macro.
351 On most machines, it is not necessary to define this macro. */
354 order_regs_for_local_alloc ()
356 int i, ch, order, regno;
358 /* User specified the register allocation order */
359 if (i386_reg_alloc_order)
361 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
365 case 'a': regno = 0; break;
366 case 'd': regno = 1; break;
367 case 'c': regno = 2; break;
368 case 'b': regno = 3; break;
369 case 'S': regno = 4; break;
370 case 'D': regno = 5; break;
371 case 'B': regno = 6; break;
374 reg_alloc_order[order++] = regno;
377 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
379 if (!regs_allocated[i])
380 reg_alloc_order[order++] = i;
384 /* If users did not specify a register allocation order, use natural order */
387 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
388 reg_alloc_order[i] = i;
394 optimization_options (level)
397 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
398 make the problem with not enough registers even worse */
399 #ifdef INSN_SCHEDULING
401 flag_schedule_insns = 0;
405 /* Sign-extend a 16-bit constant */
408 i386_sext16_if_const (op)
411 if (GET_CODE (op) == CONST_INT)
413 HOST_WIDE_INT val = INTVAL (op);
414 HOST_WIDE_INT sext_val;
416 sext_val = val | ~0xffff;
418 sext_val = val & 0xffff;
420 op = GEN_INT (sext_val);
425 /* Return nonzero if the rtx is aligned */
428 i386_aligned_reg_p (regno)
431 return (regno == STACK_POINTER_REGNUM
432 || (!flag_omit_frame_pointer
433 && regno == FRAME_POINTER_REGNUM));
440 /* registers and immediate operands are always "aligned" */
441 if (GET_CODE (op) != MEM)
444 /* Don't even try to do any aligned optimizations with volatiles */
445 if (MEM_VOLATILE_P (op))
448 /* Get address of memory operand */
451 switch (GET_CODE (op))
458 /* match "reg + offset" */
460 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
462 if (INTVAL (XEXP (op, 1)) & 3)
465 if (GET_CODE (op) != REG)
469 return i386_aligned_reg_p (REGNO (op));
474 /* Return nonzero if INSN looks like it won't compute useful cc bits
475 as a side effect. This information is only a hint. */
478 i386_cc_probably_useless_p (insn)
481 return !next_cc0_user (insn);
484 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
485 attribute for DECL. The attributes in ATTRIBUTES have previously been
489 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
498 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
499 attribute for TYPE. The attributes in ATTRIBUTES have previously been
503 i386_valid_type_attribute_p (type, attributes, identifier, args)
509 if (TREE_CODE (type) != FUNCTION_TYPE
510 && TREE_CODE (type) != FIELD_DECL
511 && TREE_CODE (type) != TYPE_DECL)
514 /* Stdcall attribute says callee is responsible for popping arguments
515 if they are not variable. */
516 if (is_attribute_p ("stdcall", identifier))
517 return (args == NULL_TREE);
519 /* Cdecl attribute says the callee is a normal C declaration */
520 if (is_attribute_p ("cdecl", identifier))
521 return (args == NULL_TREE);
523 /* Regparm attribute specifies how many integer arguments are to be
524 passed in registers */
525 if (is_attribute_p ("regparm", identifier))
529 if (!args || TREE_CODE (args) != TREE_LIST
530 || TREE_CHAIN (args) != NULL_TREE
531 || TREE_VALUE (args) == NULL_TREE)
534 cst = TREE_VALUE (args);
535 if (TREE_CODE (cst) != INTEGER_CST)
538 if (TREE_INT_CST_HIGH (cst) != 0
539 || TREE_INT_CST_LOW (cst) < 0
540 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
549 /* Return 0 if the attributes for two types are incompatible, 1 if they
550 are compatible, and 2 if they are nearly compatible (which causes a
551 warning to be generated). */
554 i386_comp_type_attributes (type1, type2)
562 /* Value is the number of bytes of arguments automatically
563 popped when returning from a subroutine call.
564 FUNDECL is the declaration node of the function (as a tree),
565 FUNTYPE is the data type of the function (as a tree),
566 or for a library call it is an identifier node for the subroutine name.
567 SIZE is the number of bytes of arguments passed on the stack.
569 On the 80386, the RTD insn may be used to pop them if the number
570 of args is fixed, but if the number is variable then the caller
571 must pop them all. RTD can't be used for library calls now
572 because the library is compiled with the Unix compiler.
573 Use of RTD is a selectable option, since it is incompatible with
574 standard Unix calling sequences. If the option is not selected,
575 the caller must always pop the args.
577 The attribute stdcall is equivalent to RTD on a per module basis. */
580 i386_return_pops_args (fundecl, funtype, size)
585 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
587 /* Cdecl functions override -mrtd, and never pop the stack */
588 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
590 /* Stdcall functions will pop the stack if not variable args */
591 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
595 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
596 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
600 /* Lose any fake structure return argument */
601 if (aggregate_value_p (TREE_TYPE (funtype)))
602 return GET_MODE_SIZE (Pmode);
608 /* Argument support functions. */
610 /* Initialize a variable CUM of type CUMULATIVE_ARGS
611 for a call to a function whose data type is FNTYPE.
612 For a library call, FNTYPE is 0. */
615 init_cumulative_args (cum, fntype, libname)
616 CUMULATIVE_ARGS *cum; /* argument info to initialize */
617 tree fntype; /* tree ptr for function decl */
618 rtx libname; /* SYMBOL_REF of library name or 0 */
620 static CUMULATIVE_ARGS zero_cum;
621 tree param, next_param;
623 if (TARGET_DEBUG_ARG)
625 fprintf (stderr, "\ninit_cumulative_args (");
628 tree ret_type = TREE_TYPE (fntype);
629 fprintf (stderr, "fntype code = %s, ret code = %s",
630 tree_code_name[ (int)TREE_CODE (fntype) ],
631 tree_code_name[ (int)TREE_CODE (ret_type) ]);
634 fprintf (stderr, "no fntype");
637 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
642 /* Set up the number of registers to use for passing arguments. */
643 cum->nregs = i386_regparm;
646 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
648 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
651 /* Determine if this function has variable arguments. This is
652 indicated by the last argument being 'void_type_mode' if there
653 are no variable arguments. If there are variable arguments, then
654 we won't pass anything in registers */
658 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
662 next_param = TREE_CHAIN (param);
663 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
668 if (TARGET_DEBUG_ARG)
669 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
674 /* Update the data in CUM to advance over an argument
675 of mode MODE and data type TYPE.
676 (TYPE is null for libcalls where that information may not be available.) */
679 function_arg_advance (cum, mode, type, named)
680 CUMULATIVE_ARGS *cum; /* current arg information */
681 enum machine_mode mode; /* current arg mode */
682 tree type; /* type of the argument or 0 if lib support */
683 int named; /* whether or not the argument was named */
685 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
686 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
688 if (TARGET_DEBUG_ARG)
690 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
691 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
706 /* Define where to put the arguments to a function.
707 Value is zero to push the argument on the stack,
708 or a hard register in which to store the argument.
710 MODE is the argument's machine mode.
711 TYPE is the data type of the argument (as a tree).
712 This is null for libcalls where that information may
714 CUM is a variable of type CUMULATIVE_ARGS which gives info about
715 the preceding args and about the function being called.
716 NAMED is nonzero if this argument is a named parameter
717 (otherwise it is an extra parameter matching an ellipsis). */
720 function_arg (cum, mode, type, named)
721 CUMULATIVE_ARGS *cum; /* current arg information */
722 enum machine_mode mode; /* current arg mode */
723 tree type; /* type of the argument or 0 if lib support */
724 int named; /* != 0 for normal args, == 0 for ... args */
727 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
728 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
732 default: /* for now, pass fp/complex values on the stack */
740 if (words <= cum->nregs)
741 ret = gen_rtx (REG, mode, cum->regno);
745 if (TARGET_DEBUG_ARG)
748 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
749 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
752 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
754 fprintf (stderr, ", stack");
756 fprintf (stderr, " )\n");
762 /* For an arg passed partly in registers and partly in memory,
763 this is the number of registers used.
764 For args passed entirely in registers or entirely in memory, zero. */
767 function_arg_partial_nregs (cum, mode, type, named)
768 CUMULATIVE_ARGS *cum; /* current arg information */
769 enum machine_mode mode; /* current arg mode */
770 tree type; /* type of the argument or 0 if lib support */
771 int named; /* != 0 for normal args, == 0 for ... args */
777 /* Output an insn whose source is a 386 integer register. SRC is the
778 rtx for the register, and TEMPLATE is the op-code template. SRC may
779 be either SImode or DImode.
781 The template will be output with operands[0] as SRC, and operands[1]
782 as a pointer to the top of the 386 stack. So a call from floatsidf2
783 would look like this:
785 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
787 where %z0 corresponds to the caller's operands[1], and is used to
788 emit the proper size suffix.
790 ??? Extend this to handle HImode - a 387 can load and store HImode
794 output_op_from_reg (src, template)
799 int size = GET_MODE_SIZE (GET_MODE (src));
802 xops[1] = AT_SP (Pmode);
803 xops[2] = GEN_INT (size);
804 xops[3] = stack_pointer_rtx;
806 if (size > UNITS_PER_WORD)
809 if (size > 2 * UNITS_PER_WORD)
811 high = gen_rtx (REG, SImode, REGNO (src) + 2);
812 output_asm_insn (AS1 (push%L0,%0), &high);
814 high = gen_rtx (REG, SImode, REGNO (src) + 1);
815 output_asm_insn (AS1 (push%L0,%0), &high);
817 output_asm_insn (AS1 (push%L0,%0), &src);
819 output_asm_insn (template, xops);
821 output_asm_insn (AS2 (add%L3,%2,%3), xops);
824 /* Output an insn to pop an value from the 387 top-of-stack to 386
825 register DEST. The 387 register stack is popped if DIES is true. If
826 the mode of DEST is an integer mode, a `fist' integer store is done,
827 otherwise a `fst' float store is done. */
830 output_to_reg (dest, dies)
835 int size = GET_MODE_SIZE (GET_MODE (dest));
837 xops[0] = AT_SP (Pmode);
838 xops[1] = stack_pointer_rtx;
839 xops[2] = GEN_INT (size);
842 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
844 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
847 output_asm_insn (AS1 (fistp%z3,%y0), xops);
849 output_asm_insn (AS1 (fist%z3,%y0), xops);
851 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
854 output_asm_insn (AS1 (fstp%z3,%y0), xops);
857 if (GET_MODE (dest) == XFmode)
859 output_asm_insn (AS1 (fstp%z3,%y0), xops);
860 output_asm_insn (AS1 (fld%z3,%y0), xops);
863 output_asm_insn (AS1 (fst%z3,%y0), xops);
869 output_asm_insn (AS1 (pop%L0,%0), &dest);
871 if (size > UNITS_PER_WORD)
873 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
874 output_asm_insn (AS1 (pop%L0,%0), &dest);
875 if (size > 2 * UNITS_PER_WORD)
877 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
878 output_asm_insn (AS1 (pop%L0,%0), &dest);
884 singlemove_string (operands)
888 if (GET_CODE (operands[0]) == MEM
889 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
891 if (XEXP (x, 0) != stack_pointer_rtx)
895 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
897 return output_move_const_single (operands);
899 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
900 return AS2 (mov%L0,%1,%0);
901 else if (CONSTANT_P (operands[1]))
902 return AS2 (mov%L0,%1,%0);
905 output_asm_insn ("push%L1 %1", operands);
910 /* Return a REG that occurs in ADDR with coefficient 1.
911 ADDR can be effectively incremented by incrementing REG. */
917 while (GET_CODE (addr) == PLUS)
919 if (GET_CODE (XEXP (addr, 0)) == REG)
920 addr = XEXP (addr, 0);
921 else if (GET_CODE (XEXP (addr, 1)) == REG)
922 addr = XEXP (addr, 1);
923 else if (CONSTANT_P (XEXP (addr, 0)))
924 addr = XEXP (addr, 1);
925 else if (CONSTANT_P (XEXP (addr, 1)))
926 addr = XEXP (addr, 0);
930 if (GET_CODE (addr) == REG)
936 /* Output an insn to add the constant N to the register X. */
947 output_asm_insn (AS1 (dec%L0,%0), xops);
949 output_asm_insn (AS1 (inc%L0,%0), xops);
950 else if (n < 0 || n == 128)
952 xops[1] = GEN_INT (-n);
953 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
957 xops[1] = GEN_INT (n);
958 output_asm_insn (AS2 (add%L0,%1,%0), xops);
963 /* Output assembler code to perform a doubleword move insn
964 with operands OPERANDS. */
967 output_move_double (operands)
970 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
974 rtx addreg0 = 0, addreg1 = 0;
975 int dest_overlapped_low = 0;
976 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
981 /* First classify both operands. */
983 if (REG_P (operands[0]))
985 else if (offsettable_memref_p (operands[0]))
987 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
989 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
991 else if (GET_CODE (operands[0]) == MEM)
996 if (REG_P (operands[1]))
998 else if (CONSTANT_P (operands[1]))
1000 else if (offsettable_memref_p (operands[1]))
1002 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1004 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1006 else if (GET_CODE (operands[1]) == MEM)
1011 /* Check for the cases that the operand constraints are not
1012 supposed to allow to happen. Abort if we get one,
1013 because generating code for these cases is painful. */
1015 if (optype0 == RNDOP || optype1 == RNDOP)
1018 /* If one operand is decrementing and one is incrementing
1019 decrement the former register explicitly
1020 and change that operand into ordinary indexing. */
1022 if (optype0 == PUSHOP && optype1 == POPOP)
1024 /* ??? Can this ever happen on i386? */
1025 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1026 asm_add (-size, operands[0]);
1027 if (GET_MODE (operands[1]) == XFmode)
1028 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
1029 else if (GET_MODE (operands[0]) == DFmode)
1030 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
1032 operands[0] = gen_rtx (MEM, DImode, operands[0]);
1036 if (optype0 == POPOP && optype1 == PUSHOP)
1038 /* ??? Can this ever happen on i386? */
1039 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1040 asm_add (-size, operands[1]);
1041 if (GET_MODE (operands[1]) == XFmode)
1042 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
1043 else if (GET_MODE (operands[1]) == DFmode)
1044 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
1046 operands[1] = gen_rtx (MEM, DImode, operands[1]);
1050 /* If an operand is an unoffsettable memory ref, find a register
1051 we can increment temporarily to make it refer to the second word. */
1053 if (optype0 == MEMOP)
1054 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1056 if (optype1 == MEMOP)
1057 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1059 /* Ok, we can do one word at a time.
1060 Normally we do the low-numbered word first,
1061 but if either operand is autodecrementing then we
1062 do the high-numbered word first.
1064 In either case, set up in LATEHALF the operands to use
1065 for the high-numbered word and in some cases alter the
1066 operands in OPERANDS to be suitable for the low-numbered word. */
1070 if (optype0 == REGOP)
1072 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1073 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
1075 else if (optype0 == OFFSOP)
1077 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1078 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1082 middlehalf[0] = operands[0];
1083 latehalf[0] = operands[0];
1086 if (optype1 == REGOP)
1088 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1089 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
1091 else if (optype1 == OFFSOP)
1093 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1094 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1096 else if (optype1 == CNSTOP)
1098 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1100 REAL_VALUE_TYPE r; long l[3];
1102 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1103 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1104 operands[1] = GEN_INT (l[0]);
1105 middlehalf[1] = GEN_INT (l[1]);
1106 latehalf[1] = GEN_INT (l[2]);
1108 else if (CONSTANT_P (operands[1]))
1109 /* No non-CONST_DOUBLE constant should ever appear here. */
1114 middlehalf[1] = operands[1];
1115 latehalf[1] = operands[1];
1118 else /* size is not 12: */
1120 if (optype0 == REGOP)
1121 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1122 else if (optype0 == OFFSOP)
1123 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1125 latehalf[0] = operands[0];
1127 if (optype1 == REGOP)
1128 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1129 else if (optype1 == OFFSOP)
1130 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1131 else if (optype1 == CNSTOP)
1132 split_double (operands[1], &operands[1], &latehalf[1]);
1134 latehalf[1] = operands[1];
1137 /* If insn is effectively movd N (sp),-(sp) then we will do the
1138 high word first. We should use the adjusted operand 1
1139 (which is N+4 (sp) or N+8 (sp))
1140 for the low word and middle word as well,
1141 to compensate for the first decrement of sp. */
1142 if (optype0 == PUSHOP
1143 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1144 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1145 middlehalf[1] = operands[1] = latehalf[1];
1147 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1148 if the upper part of reg N does not appear in the MEM, arrange to
1149 emit the move late-half first. Otherwise, compute the MEM address
1150 into the upper part of N and use that as a pointer to the memory
1152 if (optype0 == REGOP
1153 && (optype1 == OFFSOP || optype1 == MEMOP))
1155 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1156 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1158 /* If both halves of dest are used in the src memory address,
1159 compute the address into latehalf of dest. */
1161 xops[0] = latehalf[0];
1162 xops[1] = XEXP (operands[1], 0);
1163 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1164 if( GET_MODE (operands[1]) == XFmode )
1167 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1168 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1169 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1173 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1174 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1178 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1180 /* Check for two regs used by both source and dest. */
1181 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1182 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1185 /* JRV says this can't happen: */
1186 if (addreg0 || addreg1)
1189 /* Only the middle reg conflicts; simply put it last. */
1190 output_asm_insn (singlemove_string (operands), operands);
1191 output_asm_insn (singlemove_string (latehalf), latehalf);
1192 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1195 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1196 /* If the low half of dest is mentioned in the source memory
1197 address, the arrange to emit the move late half first. */
1198 dest_overlapped_low = 1;
1201 /* If one or both operands autodecrementing,
1202 do the two words, high-numbered first. */
1204 /* Likewise, the first move would clobber the source of the second one,
1205 do them in the other order. This happens only for registers;
1206 such overlap can't happen in memory unless the user explicitly
1207 sets it up, and that is an undefined circumstance. */
1210 if (optype0 == PUSHOP || optype1 == PUSHOP
1211 || (optype0 == REGOP && optype1 == REGOP
1212 && REGNO (operands[0]) == REGNO (latehalf[1]))
1213 || dest_overlapped_low)
1215 if (optype0 == PUSHOP || optype1 == PUSHOP
1216 || (optype0 == REGOP && optype1 == REGOP
1217 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1218 || REGNO (operands[0]) == REGNO (latehalf[1])))
1219 || dest_overlapped_low)
1221 /* Make any unoffsettable addresses point at high-numbered word. */
1223 asm_add (size-4, addreg0);
1225 asm_add (size-4, addreg1);
1228 output_asm_insn (singlemove_string (latehalf), latehalf);
1230 /* Undo the adds we just did. */
1232 asm_add (-4, addreg0);
1234 asm_add (-4, addreg1);
1238 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1240 asm_add (-4, addreg0);
1242 asm_add (-4, addreg1);
1245 /* Do low-numbered word. */
1246 return singlemove_string (operands);
1249 /* Normal case: do the two words, low-numbered first. */
1251 output_asm_insn (singlemove_string (operands), operands);
1253 /* Do the middle one of the three words for long double */
1257 asm_add (4, addreg0);
1259 asm_add (4, addreg1);
1261 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1264 /* Make any unoffsettable addresses point at high-numbered word. */
1266 asm_add (4, addreg0);
1268 asm_add (4, addreg1);
1271 output_asm_insn (singlemove_string (latehalf), latehalf);
1273 /* Undo the adds we just did. */
1275 asm_add (4-size, addreg0);
1277 asm_add (4-size, addreg1);
1283 #define MAX_TMPS 2 /* max temporary registers used */
1285 /* Output the appropriate code to move push memory on the stack */
1288 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1300 } tmp_info[MAX_TMPS];
1302 rtx src = operands[1];
1305 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1306 int stack_offset = 0;
1310 if (!offsettable_memref_p (src))
1311 fatal_insn ("Source is not offsettable", insn);
1313 if ((length & 3) != 0)
1314 fatal_insn ("Pushing non-word aligned size", insn);
1316 /* Figure out which temporary registers we have available */
1317 for (i = tmp_start; i < n_operands; i++)
1319 if (GET_CODE (operands[i]) == REG)
1321 if (reg_overlap_mentioned_p (operands[i], src))
1324 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1325 if (max_tmps == MAX_TMPS)
1331 for (offset = length - 4; offset >= 0; offset -= 4)
1333 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1334 output_asm_insn (AS1(push%L0,%0), xops);
1340 for (offset = length - 4; offset >= 0; )
1342 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1344 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1345 tmp_info[num_tmps].push = AS1(push%L0,%1);
1346 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1350 for (i = 0; i < num_tmps; i++)
1351 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1353 for (i = 0; i < num_tmps; i++)
1354 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1357 stack_offset += 4*num_tmps;
1365 /* Output the appropriate code to move data between two memory locations */
1368 output_move_memory (operands, insn, length, tmp_start, n_operands)
1379 } tmp_info[MAX_TMPS];
1381 rtx dest = operands[0];
1382 rtx src = operands[1];
1383 rtx qi_tmp = NULL_RTX;
1389 if (GET_CODE (dest) == MEM
1390 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1391 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1392 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1394 if (!offsettable_memref_p (src))
1395 fatal_insn ("Source is not offsettable", insn);
1397 if (!offsettable_memref_p (dest))
1398 fatal_insn ("Destination is not offsettable", insn);
1400 /* Figure out which temporary registers we have available */
1401 for (i = tmp_start; i < n_operands; i++)
1403 if (GET_CODE (operands[i]) == REG)
1405 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1406 qi_tmp = operands[i];
1408 if (reg_overlap_mentioned_p (operands[i], dest))
1409 fatal_insn ("Temporary register overlaps the destination", insn);
1411 if (reg_overlap_mentioned_p (operands[i], src))
1412 fatal_insn ("Temporary register overlaps the source", insn);
1414 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1415 if (max_tmps == MAX_TMPS)
1421 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1423 if ((length & 1) != 0)
1426 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1431 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1435 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1436 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1437 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1438 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1442 else if (length >= 2)
1444 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1445 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1446 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1447 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1455 for (i = 0; i < num_tmps; i++)
1456 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1458 for (i = 0; i < num_tmps; i++)
1459 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1464 xops[0] = adj_offsettable_operand (dest, offset);
1465 xops[1] = adj_offsettable_operand (src, offset);
1467 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1468 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1476 standard_80387_constant_p (x)
1479 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1484 if (setjmp (handler))
1487 set_float_handler (handler);
1488 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1489 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1490 is1 = REAL_VALUES_EQUAL (d, dconst1);
1491 set_float_handler (NULL_PTR);
1499 /* Note that on the 80387, other constants, such as pi,
1500 are much slower to load as standard constants
1501 than to load from doubles in memory! */
1508 output_move_const_single (operands)
1511 if (FP_REG_P (operands[0]))
1513 int conval = standard_80387_constant_p (operands[1]);
1521 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1523 REAL_VALUE_TYPE r; long l;
1525 if (GET_MODE (operands[1]) == XFmode)
1528 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1529 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1530 operands[1] = GEN_INT (l);
1532 return singlemove_string (operands);
1535 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1536 reference and a constant. */
1539 symbolic_operand (op, mode)
1541 enum machine_mode mode;
1543 switch (GET_CODE (op))
1550 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1551 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1552 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1558 /* Test for a valid operand for a call instruction.
1559 Don't allow the arg pointer register or virtual regs
1560 since they may change into reg + const, which the patterns
1561 can't handle yet. */
1564 call_insn_operand (op, mode)
1566 enum machine_mode mode;
1568 if (GET_CODE (op) == MEM
1569 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1570 /* This makes a difference for PIC. */
1571 && general_operand (XEXP (op, 0), Pmode))
1572 || (GET_CODE (XEXP (op, 0)) == REG
1573 && XEXP (op, 0) != arg_pointer_rtx
1574 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1575 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1580 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1584 expander_call_insn_operand (op, mode)
1586 enum machine_mode mode;
1588 if (GET_CODE (op) == MEM
1589 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1590 || (GET_CODE (XEXP (op, 0)) == REG
1591 && XEXP (op, 0) != arg_pointer_rtx
1592 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1593 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1598 /* Return 1 if OP is a comparison operator that can use the condition code
1599 generated by an arithmetic operation. */
1602 arithmetic_comparison_operator (op, mode)
1604 enum machine_mode mode;
1608 if (mode != VOIDmode && mode != GET_MODE (op))
1610 code = GET_CODE (op);
1611 if (GET_RTX_CLASS (code) != '<')
1614 return (code != GT && code != LE);
1617 /* Returns 1 if OP contains a symbol reference */
1620 symbolic_reference_mentioned_p (op)
1626 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1629 fmt = GET_RTX_FORMAT (GET_CODE (op));
1630 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1636 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1637 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1640 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1647 /* Attempt to expand a binary operator. Make the expansion closer to the
1648 actual machine, then just general_operand, which will allow 3 separate
1649 memory references (one output, two input) in a single insn. Return
1650 whether the insn fails, or succeeds. */
1653 ix86_expand_binary_operator (code, mode, operands)
1655 enum machine_mode mode;
1662 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1663 if (GET_RTX_CLASS (code) == 'c'
1664 && (rtx_equal_p (operands[0], operands[2])
1665 || immediate_operand (operands[1], mode)))
1667 rtx temp = operands[1];
1668 operands[1] = operands[2];
1672 /* If optimizing, copy to regs to improve CSE */
1673 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1675 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1676 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1678 if (GET_CODE (operands[2]) == MEM)
1679 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1681 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1683 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1684 emit_move_insn (temp, operands[1]);
1690 if (!ix86_binary_operator_ok (code, mode, operands))
1692 /* If not optimizing, try to make a valid insn (optimize code previously did
1693 this above to improve chances of CSE) */
1695 if ((!TARGET_PSEUDO || !optimize)
1696 && ((reload_in_progress | reload_completed) == 0)
1697 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1700 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1702 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1706 if (GET_CODE (operands[2]) == MEM)
1708 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1712 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1714 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1715 emit_move_insn (temp, operands[1]);
1720 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1730 /* Return TRUE or FALSE depending on whether the binary operator meets the
1731 appropriate constraints. */
1734 ix86_binary_operator_ok (code, mode, operands)
1736 enum machine_mode mode;
1739 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1740 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1743 /* Attempt to expand a unary operator. Make the expansion closer to the
1744 actual machine, then just general_operand, which will allow 2 separate
1745 memory references (one output, one input) in a single insn. Return
1746 whether the insn fails, or succeeds. */
1749 ix86_expand_unary_operator (code, mode, operands)
1751 enum machine_mode mode;
1756 /* If optimizing, copy to regs to improve CSE */
1759 && ((reload_in_progress | reload_completed) == 0)
1760 && GET_CODE (operands[1]) == MEM)
1762 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1765 if (!ix86_unary_operator_ok (code, mode, operands))
1767 if ((!TARGET_PSEUDO || !optimize)
1768 && ((reload_in_progress | reload_completed) == 0)
1769 && GET_CODE (operands[1]) == MEM)
1771 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1772 if (!ix86_unary_operator_ok (code, mode, operands))
1782 /* Return TRUE or FALSE depending on whether the unary operator meets the
1783 appropriate constraints. */
1786 ix86_unary_operator_ok (code, mode, operands)
1788 enum machine_mode mode;
1796 static rtx pic_label_rtx;
1797 static char pic_label_name [256];
1798 static int pic_label_no = 0;
1800 /* This function generates code for -fpic that loads %ebx with
1801 with the return address of the caller and then returns. */
1803 asm_output_function_prefix (file, name)
1808 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1809 || current_function_uses_const_pool);
1810 xops[0] = pic_offset_table_rtx;
1811 xops[1] = stack_pointer_rtx;
1813 /* deep branch prediction favors having a return for every call */
1814 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1818 if (pic_label_rtx == 0)
1820 pic_label_rtx = (rtx) gen_label_rtx ();
1821 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1822 LABEL_NAME (pic_label_rtx) = pic_label_name;
1824 prologue_node = make_node (FUNCTION_DECL);
1825 DECL_RESULT (prologue_node) = 0;
1826 #ifdef ASM_DECLARE_FUNCTION_NAME
1827 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1829 output_asm_insn ("movl (%1),%0", xops);
1830 output_asm_insn ("ret", xops);
1834 /* Set up the stack and frame (if desired) for the function. */
1837 function_prologue (file, size)
1844 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1845 || current_function_uses_const_pool);
1846 long tsize = get_frame_size ();
1848 /* pic references don't explicitly mention pic_offset_table_rtx */
1849 if (TARGET_SCHEDULE_PROLOGUE)
1855 xops[0] = stack_pointer_rtx;
1856 xops[1] = frame_pointer_rtx;
1857 xops[2] = GEN_INT (tsize);
1859 if (frame_pointer_needed)
1861 output_asm_insn ("push%L1 %1", xops);
1862 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1867 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1868 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1871 xops[3] = gen_rtx (REG, SImode, 0);
1872 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1874 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1875 output_asm_insn (AS1 (call,%P3), xops);
1878 /* Note If use enter it is NOT reversed args.
1879 This one is not reversed from intel!!
1880 I think enter is slower. Also sdb doesn't like it.
1881 But if you want it the code is:
1883 xops[3] = const0_rtx;
1884 output_asm_insn ("enter %2,%3", xops);
1887 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1888 for (regno = limit - 1; regno >= 0; regno--)
1889 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1890 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1892 xops[0] = gen_rtx (REG, SImode, regno);
1893 output_asm_insn ("push%L0 %0", xops);
1896 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1898 xops[0] = pic_offset_table_rtx;
1899 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1901 output_asm_insn (AS1 (call,%P1), xops);
1902 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1905 else if (pic_reg_used)
1907 xops[0] = pic_offset_table_rtx;
1908 xops[1] = (rtx) gen_label_rtx ();
1910 output_asm_insn (AS1 (call,%P1), xops);
1911 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1912 output_asm_insn (AS1 (pop%L0,%0), xops);
1913 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1917 /* This function generates the assembly code for function entry.
1918 FILE is an stdio stream to output the code to.
1919 SIZE is an int: how many units of temporary storage to allocate. */
1922 ix86_expand_prologue ()
1927 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1928 || current_function_uses_const_pool);
1929 long tsize = get_frame_size ();
1932 if (!TARGET_SCHEDULE_PROLOGUE)
1935 xops[0] = stack_pointer_rtx;
1936 xops[1] = frame_pointer_rtx;
1937 xops[2] = GEN_INT (tsize);
1938 if (frame_pointer_needed)
1942 gen_rtx (MEM, SImode,
1943 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1944 frame_pointer_rtx));
1945 RTX_FRAME_RELATED_P (insn) = 1;
1946 insn = emit_move_insn (xops[1], xops[0]);
1947 RTX_FRAME_RELATED_P (insn) = 1;
1952 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1954 insn = emit_insn (gen_subsi3 (xops[0], xops[0], xops[2]));
1955 RTX_FRAME_RELATED_P (insn) = 1;
1959 xops[3] = gen_rtx (REG, SImode, 0);
1960 emit_move_insn (xops[3], xops[2]);
1961 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
1962 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
1963 emit_call_insn (gen_rtx (CALL, VOIDmode,
1964 xops[3], const0_rtx));
1967 /* Note If use enter it is NOT reversed args.
1968 This one is not reversed from intel!!
1969 I think enter is slower. Also sdb doesn't like it.
1970 But if you want it the code is:
1972 xops[3] = const0_rtx;
1973 output_asm_insn ("enter %2,%3", xops);
1976 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1977 for (regno = limit - 1; regno >= 0; regno--)
1978 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1979 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1981 xops[0] = gen_rtx (REG, SImode, regno);
1984 gen_rtx (MEM, SImode,
1985 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1988 RTX_FRAME_RELATED_P (insn) = 1;
1991 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1993 xops[0] = pic_offset_table_rtx;
1994 if (pic_label_rtx == 0)
1996 pic_label_rtx = (rtx) gen_label_rtx ();
1997 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1998 LABEL_NAME (pic_label_rtx) = pic_label_name;
2000 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
2002 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2003 emit_insn (gen_prologue_set_got (xops[0],
2004 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2005 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2007 else if (pic_reg_used)
2009 xops[0] = pic_offset_table_rtx;
2010 xops[1] = (rtx) gen_label_rtx ();
2012 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2013 emit_insn (gen_pop (xops[0]));
2014 emit_insn (gen_prologue_set_got (xops[0],
2015 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2016 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
2020 /* Restore function stack, frame, and registers. */
2023 function_epilogue (file, size)
2029 /* Return 1 if it is appropriate to emit `ret' instructions in the
2030 body of a function. Do this only if the epilogue is simple, needing a
2031 couple of insns. Prior to reloading, we can't tell how many registers
2032 must be saved, so return 0 then. Return 0 if there is no frame
2033 marker to de-allocate.
2035 If NON_SAVING_SETJMP is defined and true, then it is not possible
2036 for the epilogue to be simple, so return 0. This is a special case
2037 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2038 until final, but jump_optimize may need to know sooner if a
2042 ix86_can_use_return_insn_p ()
2046 int reglimit = (frame_pointer_needed
2047 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2048 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2049 || current_function_uses_const_pool);
2051 #ifdef NON_SAVING_SETJMP
2052 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2056 if (! reload_completed)
2059 for (regno = reglimit - 1; regno >= 0; regno--)
2060 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2061 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2064 return nregs == 0 || ! frame_pointer_needed;
2068 /* This function generates the assembly code for function exit.
2069 FILE is an stdio stream to output the code to.
2070 SIZE is an int: how many units of temporary storage to deallocate. */
2073 ix86_expand_epilogue ()
2076 register int nregs, limit;
2079 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2080 || current_function_uses_const_pool);
2081 long tsize = get_frame_size ();
2083 /* Compute the number of registers to pop */
2085 limit = (frame_pointer_needed
2086 ? FRAME_POINTER_REGNUM
2087 : STACK_POINTER_REGNUM);
2091 for (regno = limit - 1; regno >= 0; regno--)
2092 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2093 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2096 /* sp is often unreliable so we must go off the frame pointer,
2099 /* In reality, we may not care if sp is unreliable, because we can
2100 restore the register relative to the frame pointer. In theory,
2101 since each move is the same speed as a pop, and we don't need the
2102 leal, this is faster. For now restore multiple registers the old
2105 offset = -tsize - (nregs * UNITS_PER_WORD);
2107 xops[2] = stack_pointer_rtx;
2109 if (nregs > 1 || ! frame_pointer_needed)
2111 if (frame_pointer_needed)
2113 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2114 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2115 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2118 for (regno = 0; regno < limit; regno++)
2119 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2120 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2122 xops[0] = gen_rtx (REG, SImode, regno);
2123 emit_insn (gen_pop (xops[0]));
2124 /* output_asm_insn ("pop%L0 %0", xops);*/
2128 for (regno = 0; regno < limit; regno++)
2129 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2130 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2132 xops[0] = gen_rtx (REG, SImode, regno);
2133 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2134 emit_move_insn (xops[0], xops[1]);
2135 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2139 if (frame_pointer_needed)
2141 /* If not an i386, mov & pop is faster than "leave". */
2143 if (TARGET_USE_LEAVE)
2144 emit_insn (gen_leave());
2145 /* output_asm_insn ("leave", xops);*/
2148 xops[0] = frame_pointer_rtx;
2149 xops[1] = stack_pointer_rtx;
2150 emit_insn (gen_epilogue_set_stack_ptr());
2151 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2152 emit_insn (gen_pop (xops[0]));
2153 /* output_asm_insn ("pop%L0 %0", xops);*/
2158 /* If there is no frame pointer, we must still release the frame. */
2160 xops[0] = GEN_INT (tsize);
2161 emit_insn (gen_rtx (SET, SImode,
2163 gen_rtx (PLUS, SImode,
2166 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2169 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2170 if (profile_block_flag == 2)
2172 FUNCTION_BLOCK_PROFILER_EXIT(file);
2176 if (current_function_pops_args && current_function_args_size)
2178 xops[1] = GEN_INT (current_function_pops_args);
2180 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2181 asked to pop more, pop return address, do explicit add, and jump
2182 indirectly to the caller. */
2184 if (current_function_pops_args >= 32768)
2186 /* ??? Which register to use here? */
2187 xops[0] = gen_rtx (REG, SImode, 2);
2188 emit_insn (gen_pop (xops[0]));
2189 /* output_asm_insn ("pop%L0 %0", xops);*/
2190 emit_insn (gen_rtx (SET, SImode,
2192 gen_rtx (PLUS, SImode,
2195 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2196 emit_jump_insn (xops[0]);
2197 /* output_asm_insn ("jmp %*%0", xops);*/
2200 emit_jump_insn (gen_return_pop_internal (xops[1]));
2201 /* output_asm_insn ("ret %1", xops);*/
2204 /* output_asm_insn ("ret", xops);*/
2205 emit_jump_insn (gen_return_internal ());
2209 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2210 that is a valid memory address for an instruction.
2211 The MODE argument is the machine mode for the MEM expression
2212 that wants to use this address.
2214 On x86, legitimate addresses are:
2215 base movl (base),reg
2216 displacement movl disp,reg
2217 base + displacement movl disp(base),reg
2218 index + base movl (base,index),reg
2219 (index + base) + displacement movl disp(base,index),reg
2220 index*scale movl (,index,scale),reg
2221 index*scale + disp movl disp(,index,scale),reg
2222 index*scale + base movl (base,index,scale),reg
2223 (index*scale + base) + disp movl disp(base,index,scale),reg
2225 In each case, scale can be 1, 2, 4, 8. */
2227 /* This is exactly the same as print_operand_addr, except that
2228 it recognizes addresses instead of printing them.
2230 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2231 convert common non-canonical forms to canonical form so that they will
2234 #define ADDR_INVALID(msg,insn) \
2236 if (TARGET_DEBUG_ADDR) \
2238 fprintf (stderr, msg); \
2244 legitimate_address_p (mode, addr, strict)
2245 enum machine_mode mode;
2249 rtx base = NULL_RTX;
2250 rtx indx = NULL_RTX;
2251 rtx scale = NULL_RTX;
2252 rtx disp = NULL_RTX;
2254 if (TARGET_DEBUG_ADDR)
2257 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2258 GET_MODE_NAME (mode), strict);
2263 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2264 base = addr; /* base reg */
2266 else if (GET_CODE (addr) == PLUS)
2268 rtx op0 = XEXP (addr, 0);
2269 rtx op1 = XEXP (addr, 1);
2270 enum rtx_code code0 = GET_CODE (op0);
2271 enum rtx_code code1 = GET_CODE (op1);
2273 if (code0 == REG || code0 == SUBREG)
2275 if (code1 == REG || code1 == SUBREG)
2277 indx = op0; /* index + base */
2283 base = op0; /* base + displacement */
2288 else if (code0 == MULT)
2290 indx = XEXP (op0, 0);
2291 scale = XEXP (op0, 1);
2293 if (code1 == REG || code1 == SUBREG)
2294 base = op1; /* index*scale + base */
2297 disp = op1; /* index*scale + disp */
2300 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2302 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2303 scale = XEXP (XEXP (op0, 0), 1);
2304 base = XEXP (op0, 1);
2308 else if (code0 == PLUS)
2310 indx = XEXP (op0, 0); /* index + base + disp */
2311 base = XEXP (op0, 1);
2317 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2322 else if (GET_CODE (addr) == MULT)
2324 indx = XEXP (addr, 0); /* index*scale */
2325 scale = XEXP (addr, 1);
2329 disp = addr; /* displacement */
2331 /* Allow arg pointer and stack pointer as index if there is not scaling */
2332 if (base && indx && !scale
2333 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2340 /* Validate base register */
2341 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2342 is one word out of a two word structure, which is represented internally
2346 if (GET_CODE (base) != REG)
2348 ADDR_INVALID ("Base is not a register.\n", base);
2352 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2353 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2355 ADDR_INVALID ("Base is not valid.\n", base);
2360 /* Validate index register */
2361 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2362 is one word out of a two word structure, which is represented internally
2366 if (GET_CODE (indx) != REG)
2368 ADDR_INVALID ("Index is not a register.\n", indx);
2372 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2373 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2375 ADDR_INVALID ("Index is not valid.\n", indx);
2380 abort (); /* scale w/o index invalid */
2382 /* Validate scale factor */
2385 HOST_WIDE_INT value;
2387 if (GET_CODE (scale) != CONST_INT)
2389 ADDR_INVALID ("Scale is not valid.\n", scale);
2393 value = INTVAL (scale);
2394 if (value != 1 && value != 2 && value != 4 && value != 8)
2396 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2401 /* Validate displacement
2402 Constant pool addresses must be handled special. They are
2403 considered legitimate addresses, but only if not used with regs.
2404 When printed, the output routines know to print the reference with the
2405 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2408 if (GET_CODE (disp) == SYMBOL_REF
2409 && CONSTANT_POOL_ADDRESS_P (disp)
2414 else if (!CONSTANT_ADDRESS_P (disp))
2416 ADDR_INVALID ("Displacement is not valid.\n", disp);
2420 else if (GET_CODE (disp) == CONST_DOUBLE)
2422 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2426 else if (flag_pic && SYMBOLIC_CONST (disp)
2427 && base != pic_offset_table_rtx
2428 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2430 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2434 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2435 && (base != NULL_RTX || indx != NULL_RTX))
2437 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2442 if (TARGET_DEBUG_ADDR)
2443 fprintf (stderr, "Address is valid.\n");
2445 /* Everything looks valid, return true */
2450 /* Return a legitimate reference for ORIG (an address) using the
2451 register REG. If REG is 0, a new pseudo is generated.
2453 There are three types of references that must be handled:
2455 1. Global data references must load the address from the GOT, via
2456 the PIC reg. An insn is emitted to do this load, and the reg is
2459 2. Static data references must compute the address as an offset
2460 from the GOT, whose base is in the PIC reg. An insn is emitted to
2461 compute the address into a reg, and the reg is returned. Static
2462 data objects have SYMBOL_REF_FLAG set to differentiate them from
2463 global data objects.
2465 3. Constant pool addresses must be handled special. They are
2466 considered legitimate addresses, but only if not used with regs.
2467 When printed, the output routines know to print the reference with the
2468 PIC reg, even though the PIC reg doesn't appear in the RTL.
2470 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2471 reg also appears in the address (except for constant pool references,
2474 "switch" statements also require special handling when generating
2475 PIC code. See comments by the `casesi' insn in i386.md for details. */
2478 legitimize_pic_address (orig, reg)
2485 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2487 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2492 reg = gen_reg_rtx (Pmode);
2494 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2495 || GET_CODE (addr) == LABEL_REF)
2496 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2498 new = gen_rtx (MEM, Pmode,
2499 gen_rtx (PLUS, Pmode,
2500 pic_offset_table_rtx, orig));
2502 emit_move_insn (reg, new);
2504 current_function_uses_pic_offset_table = 1;
2507 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2511 if (GET_CODE (addr) == CONST)
2513 addr = XEXP (addr, 0);
2514 if (GET_CODE (addr) != PLUS)
2518 if (XEXP (addr, 0) == pic_offset_table_rtx)
2522 reg = gen_reg_rtx (Pmode);
2524 base = legitimize_pic_address (XEXP (addr, 0), reg);
2525 addr = legitimize_pic_address (XEXP (addr, 1),
2526 base == reg ? NULL_RTX : reg);
2528 if (GET_CODE (addr) == CONST_INT)
2529 return plus_constant (base, INTVAL (addr));
2531 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2533 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2534 addr = XEXP (addr, 1);
2536 return gen_rtx (PLUS, Pmode, base, addr);
2542 /* Emit insns to move operands[1] into operands[0]. */
2545 emit_pic_move (operands, mode)
2547 enum machine_mode mode;
2549 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2551 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2552 operands[1] = (rtx) force_reg (SImode, operands[1]);
2554 operands[1] = legitimize_pic_address (operands[1], temp);
2558 /* Try machine-dependent ways of modifying an illegitimate address
2559 to be legitimate. If we find one, return the new, valid address.
2560 This macro is used in only one place: `memory_address' in explow.c.
2562 OLDX is the address as it was before break_out_memory_refs was called.
2563 In some cases it is useful to look at this to decide what needs to be done.
2565 MODE and WIN are passed so that this macro can use
2566 GO_IF_LEGITIMATE_ADDRESS.
2568 It is always safe for this macro to do nothing. It exists to recognize
2569 opportunities to optimize the output.
2571 For the 80386, we handle X+REG by loading X into a register R and
2572 using R+REG. R will go in a general reg and indexing will be used.
2573 However, if REG is a broken-out memory address or multiplication,
2574 nothing needs to be done because REG can certainly go in a general reg.
2576 When -fpic is used, special handling is needed for symbolic references.
2577 See comments by legitimize_pic_address in i386.c for details. */
2580 legitimize_address (x, oldx, mode)
2583 enum machine_mode mode;
2588 if (TARGET_DEBUG_ADDR)
2590 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2594 if (flag_pic && SYMBOLIC_CONST (x))
2595 return legitimize_pic_address (x, 0);
2597 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2598 if (GET_CODE (x) == ASHIFT
2599 && GET_CODE (XEXP (x, 1)) == CONST_INT
2600 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2603 x = gen_rtx (MULT, Pmode,
2604 force_reg (Pmode, XEXP (x, 0)),
2605 GEN_INT (1 << log));
2608 if (GET_CODE (x) == PLUS)
2610 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2611 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2612 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2613 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2616 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2617 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2618 GEN_INT (1 << log));
2621 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2622 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2623 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2626 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2627 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2628 GEN_INT (1 << log));
2631 /* Put multiply first if it isn't already */
2632 if (GET_CODE (XEXP (x, 1)) == MULT)
2634 rtx tmp = XEXP (x, 0);
2635 XEXP (x, 0) = XEXP (x, 1);
2640 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2641 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2642 created by virtual register instantiation, register elimination, and
2643 similar optimizations. */
2644 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2647 x = gen_rtx (PLUS, Pmode,
2648 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2649 XEXP (XEXP (x, 1), 1));
2652 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2653 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2654 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2655 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2656 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2657 && CONSTANT_P (XEXP (x, 1)))
2659 rtx constant, other;
2661 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2663 constant = XEXP (x, 1);
2664 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2666 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2668 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2669 other = XEXP (x, 1);
2677 x = gen_rtx (PLUS, Pmode,
2678 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2679 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2680 plus_constant (other, INTVAL (constant)));
2684 if (changed && legitimate_address_p (mode, x, FALSE))
2687 if (GET_CODE (XEXP (x, 0)) == MULT)
2690 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2693 if (GET_CODE (XEXP (x, 1)) == MULT)
2696 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2700 && GET_CODE (XEXP (x, 1)) == REG
2701 && GET_CODE (XEXP (x, 0)) == REG)
2704 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2707 x = legitimize_pic_address (x, 0);
2710 if (changed && legitimate_address_p (mode, x, FALSE))
2713 if (GET_CODE (XEXP (x, 0)) == REG)
2715 register rtx temp = gen_reg_rtx (Pmode);
2716 register rtx val = force_operand (XEXP (x, 1), temp);
2718 emit_move_insn (temp, val);
2724 else if (GET_CODE (XEXP (x, 1)) == REG)
2726 register rtx temp = gen_reg_rtx (Pmode);
2727 register rtx val = force_operand (XEXP (x, 0), temp);
2729 emit_move_insn (temp, val);
2740 /* Print an integer constant expression in assembler syntax. Addition
2741 and subtraction are the only arithmetic that may appear in these
2742 expressions. FILE is the stdio stream to write to, X is the rtx, and
2743 CODE is the operand print code from the output string. */
2746 output_pic_addr_const (file, x, code)
2753 switch (GET_CODE (x))
2764 if (GET_CODE (x) == SYMBOL_REF)
2765 assemble_name (file, XSTR (x, 0));
2768 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2769 CODE_LABEL_NUMBER (XEXP (x, 0)));
2770 assemble_name (asm_out_file, buf);
2773 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2774 fprintf (file, "@GOTOFF(%%ebx)");
2775 else if (code == 'P')
2776 fprintf (file, "@PLT");
2777 else if (GET_CODE (x) == LABEL_REF)
2778 fprintf (file, "@GOTOFF");
2779 else if (! SYMBOL_REF_FLAG (x))
2780 fprintf (file, "@GOT");
2782 fprintf (file, "@GOTOFF");
2787 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2788 assemble_name (asm_out_file, buf);
2792 fprintf (file, "%d", INTVAL (x));
2796 /* This used to output parentheses around the expression,
2797 but that does not work on the 386 (either ATT or BSD assembler). */
2798 output_pic_addr_const (file, XEXP (x, 0), code);
2802 if (GET_MODE (x) == VOIDmode)
2804 /* We can use %d if the number is <32 bits and positive. */
2805 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2806 fprintf (file, "0x%x%08x",
2807 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2809 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2812 /* We can't handle floating point constants;
2813 PRINT_OPERAND must handle them. */
2814 output_operand_lossage ("floating constant misused");
2818 /* Some assemblers need integer constants to appear last (eg masm). */
2819 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2821 output_pic_addr_const (file, XEXP (x, 1), code);
2822 if (INTVAL (XEXP (x, 0)) >= 0)
2823 fprintf (file, "+");
2824 output_pic_addr_const (file, XEXP (x, 0), code);
2828 output_pic_addr_const (file, XEXP (x, 0), code);
2829 if (INTVAL (XEXP (x, 1)) >= 0)
2830 fprintf (file, "+");
2831 output_pic_addr_const (file, XEXP (x, 1), code);
2836 output_pic_addr_const (file, XEXP (x, 0), code);
2837 fprintf (file, "-");
2838 output_pic_addr_const (file, XEXP (x, 1), code);
2842 output_operand_lossage ("invalid expression as operand");
2846 /* Append the correct conditional move suffix which corresponds to CODE */
2849 put_condition_code (code, mode, file)
2851 enum mode_class mode;
2854 if (mode == MODE_INT)
2858 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2864 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2870 fputs ("ge", file); return;
2872 fputs ("g", file); return;
2874 fputs ("le", file); return;
2876 fputs ("l", file); return;
2878 fputs ("ae", file); return;
2880 fputs ("a", file); return;
2882 fputs ("be", file); return;
2884 fputs ("b", file); return;
2885 default: output_operand_lossage ("Invalid %%C operand");
2887 else if (mode == MODE_FLOAT)
2891 fputs ("ne", file); return;
2893 fputs ("e", file); return;
2895 fputs ("nb", file); return;
2897 fputs ("nbe", file); return;
2899 fputs ("be", file); return;
2901 fputs ("b", file); return;
2903 fputs ("nb", file); return;
2905 fputs ("nbe", file); return;
2907 fputs ("be", file); return;
2909 fputs ("b", file); return;
2910 default: output_operand_lossage ("Invalid %%C operand");
2915 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2916 C -- print opcode suffix for set/cmov insn.
2917 c -- like C, but print reversed condition
2918 F -- print opcode suffix for fcmov insn.
2919 f -- like C, but print reversed condition
2920 R -- print the prefix for register names.
2921 z -- print the opcode suffix for the size of the current operand.
2922 * -- print a star (in certain assembler syntax)
2923 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2924 c -- don't print special prefixes before constant operands.
2925 J -- print the appropriate jump operand.
2926 s -- print a shift double count, followed by the assemblers argument
2928 b -- print the QImode name of the register for the indicated operand.
2929 %b0 would print %al if operands[0] is reg 0.
2930 w -- likewise, print the HImode name of the register.
2931 k -- likewise, print the SImode name of the register.
2932 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2933 y -- print "st(0)" instead of "st" as a register.
2934 P -- print as a PIC constant
2938 print_operand (file, x, code)
2953 PUT_OP_SIZE (code, 'l', file);
2957 PUT_OP_SIZE (code, 'w', file);
2961 PUT_OP_SIZE (code, 'b', file);
2965 PUT_OP_SIZE (code, 'l', file);
2969 PUT_OP_SIZE (code, 's', file);
2973 PUT_OP_SIZE (code, 't', file);
2977 /* 387 opcodes don't get size suffixes if the operands are
2980 if (STACK_REG_P (x))
2983 /* this is the size of op from size of operand */
2984 switch (GET_MODE_SIZE (GET_MODE (x)))
2987 PUT_OP_SIZE ('B', 'b', file);
2991 PUT_OP_SIZE ('W', 'w', file);
2995 if (GET_MODE (x) == SFmode)
2997 PUT_OP_SIZE ('S', 's', file);
3001 PUT_OP_SIZE ('L', 'l', file);
3005 PUT_OP_SIZE ('T', 't', file);
3009 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3011 #ifdef GAS_MNEMONICS
3012 PUT_OP_SIZE ('Q', 'q', file);
3015 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3019 PUT_OP_SIZE ('Q', 'l', file);
3032 switch (GET_CODE (x))
3034 /* These conditions are appropriate for testing the result
3035 of an arithmetic operation, not for a compare operation.
3036 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3037 CC_Z_IN_NOT_C false and not floating point. */
3038 case NE: fputs ("jne", file); return;
3039 case EQ: fputs ("je", file); return;
3040 case GE: fputs ("jns", file); return;
3041 case LT: fputs ("js", file); return;
3042 case GEU: fputs ("jmp", file); return;
3043 case GTU: fputs ("jne", file); return;
3044 case LEU: fputs ("je", file); return;
3045 case LTU: fputs ("#branch never", file); return;
3047 /* no matching branches for GT nor LE */
3052 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3054 PRINT_OPERAND (file, x, 0);
3055 fputs (AS2C (,) + 1, file);
3059 /* This is used by the conditional move instructions. */
3061 put_condition_code (GET_CODE (x), MODE_INT, file);
3064 /* like above, but reverse condition */
3066 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
3070 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
3073 /* like above, but reverse condition */
3075 put_condition_code (reverse_condition (GET_CODE (x)),
3083 sprintf (str, "invalid operand code `%c'", code);
3084 output_operand_lossage (str);
3088 if (GET_CODE (x) == REG)
3090 PRINT_REG (x, code, file);
3092 else if (GET_CODE (x) == MEM)
3094 PRINT_PTR (x, file);
3095 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3098 output_pic_addr_const (file, XEXP (x, 0), code);
3100 output_addr_const (file, XEXP (x, 0));
3103 output_address (XEXP (x, 0));
3105 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3107 REAL_VALUE_TYPE r; long l;
3108 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3109 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3110 PRINT_IMMED_PREFIX (file);
3111 fprintf (file, "0x%x", l);
3113 /* These float cases don't actually occur as immediate operands. */
3114 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3116 REAL_VALUE_TYPE r; char dstr[30];
3117 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3118 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3119 fprintf (file, "%s", dstr);
3121 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3123 REAL_VALUE_TYPE r; char dstr[30];
3124 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3125 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3126 fprintf (file, "%s", dstr);
3132 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3133 PRINT_IMMED_PREFIX (file);
3134 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3135 || GET_CODE (x) == LABEL_REF)
3136 PRINT_OFFSET_PREFIX (file);
3139 output_pic_addr_const (file, x, code);
3141 output_addr_const (file, x);
3145 /* Print a memory operand whose address is ADDR. */
3148 print_operand_address (file, addr)
3152 register rtx reg1, reg2, breg, ireg;
3155 switch (GET_CODE (addr))
3159 fprintf (file, "%se", RP);
3160 fputs (hi_reg_name[REGNO (addr)], file);
3170 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3172 offset = XEXP (addr, 0);
3173 addr = XEXP (addr, 1);
3175 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3177 offset = XEXP (addr, 1);
3178 addr = XEXP (addr, 0);
3180 if (GET_CODE (addr) != PLUS) ;
3181 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3183 reg1 = XEXP (addr, 0);
3184 addr = XEXP (addr, 1);
3186 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3188 reg1 = XEXP (addr, 1);
3189 addr = XEXP (addr, 0);
3191 else if (GET_CODE (XEXP (addr, 0)) == REG)
3193 reg1 = XEXP (addr, 0);
3194 addr = XEXP (addr, 1);
3196 else if (GET_CODE (XEXP (addr, 1)) == REG)
3198 reg1 = XEXP (addr, 1);
3199 addr = XEXP (addr, 0);
3201 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3203 if (reg1 == 0) reg1 = addr;
3209 if (addr != 0) abort ();
3212 if ((reg1 && GET_CODE (reg1) == MULT)
3213 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3218 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3224 if (ireg != 0 || breg != 0)
3231 output_pic_addr_const (file, addr, 0);
3233 else if (GET_CODE (addr) == LABEL_REF)
3234 output_asm_label (addr);
3237 output_addr_const (file, addr);
3240 if (ireg != 0 && GET_CODE (ireg) == MULT)
3242 scale = INTVAL (XEXP (ireg, 1));
3243 ireg = XEXP (ireg, 0);
3246 /* The stack pointer can only appear as a base register,
3247 never an index register, so exchange the regs if it is wrong. */
3249 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3258 /* output breg+ireg*scale */
3259 PRINT_B_I_S (breg, ireg, scale, file);
3266 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3268 scale = INTVAL (XEXP (addr, 0));
3269 ireg = XEXP (addr, 1);
3273 scale = INTVAL (XEXP (addr, 1));
3274 ireg = XEXP (addr, 0);
3276 output_addr_const (file, const0_rtx);
3277 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3282 if (GET_CODE (addr) == CONST_INT
3283 && INTVAL (addr) < 0x8000
3284 && INTVAL (addr) >= -0x8000)
3285 fprintf (file, "%d", INTVAL (addr));
3289 output_pic_addr_const (file, addr, 0);
3291 output_addr_const (file, addr);
3296 /* Set the cc_status for the results of an insn whose pattern is EXP.
3297 On the 80386, we assume that only test and compare insns, as well
3298 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3299 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3300 Also, we assume that jumps, moves and sCOND don't affect the condition
3301 codes. All else clobbers the condition codes, by assumption.
3303 We assume that ALL integer add, minus, etc. instructions effect the
3304 condition codes. This MUST be consistent with i386.md.
3306 We don't record any float test or compare - the redundant test &
3307 compare check in final.c does not handle stack-like regs correctly. */
3310 notice_update_cc (exp)
3313 if (GET_CODE (exp) == SET)
3315 /* Jumps do not alter the cc's. */
3316 if (SET_DEST (exp) == pc_rtx)
3318 #ifdef IS_STACK_MODE
3319 /* Moving into a memory of stack_mode may have been moved
3320 in between the use and set of cc0 by loop_spl(). So
3321 old value of cc.status must be retained */
3322 if(GET_CODE(SET_DEST(exp))==MEM
3323 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3328 /* Moving register or memory into a register:
3329 it doesn't alter the cc's, but it might invalidate
3330 the RTX's which we remember the cc's came from.
3331 (Note that moving a constant 0 or 1 MAY set the cc's). */
3332 if (REG_P (SET_DEST (exp))
3333 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3334 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3336 if (cc_status.value1
3337 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3338 cc_status.value1 = 0;
3339 if (cc_status.value2
3340 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3341 cc_status.value2 = 0;
3344 /* Moving register into memory doesn't alter the cc's.
3345 It may invalidate the RTX's which we remember the cc's came from. */
3346 if (GET_CODE (SET_DEST (exp)) == MEM
3347 && (REG_P (SET_SRC (exp))
3348 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3350 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3351 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3352 cc_status.value1 = 0;
3353 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3354 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3355 cc_status.value2 = 0;
3358 /* Function calls clobber the cc's. */
3359 else if (GET_CODE (SET_SRC (exp)) == CALL)
3364 /* Tests and compares set the cc's in predictable ways. */
3365 else if (SET_DEST (exp) == cc0_rtx)
3368 cc_status.value1 = SET_SRC (exp);
3371 /* Certain instructions effect the condition codes. */
3372 else if (GET_MODE (SET_SRC (exp)) == SImode
3373 || GET_MODE (SET_SRC (exp)) == HImode
3374 || GET_MODE (SET_SRC (exp)) == QImode)
3375 switch (GET_CODE (SET_SRC (exp)))
3377 case ASHIFTRT: case LSHIFTRT:
3379 /* Shifts on the 386 don't set the condition codes if the
3380 shift count is zero. */
3381 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3386 /* We assume that the CONST_INT is non-zero (this rtx would
3387 have been deleted if it were zero. */
3389 case PLUS: case MINUS: case NEG:
3390 case AND: case IOR: case XOR:
3391 cc_status.flags = CC_NO_OVERFLOW;
3392 cc_status.value1 = SET_SRC (exp);
3393 cc_status.value2 = SET_DEST (exp);
3404 else if (GET_CODE (exp) == PARALLEL
3405 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3407 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3409 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3412 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3413 cc_status.flags |= CC_IN_80387;
3415 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3426 /* Split one or more DImode RTL references into pairs of SImode
3427 references. The RTL can be REG, offsettable MEM, integer constant, or
3428 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3429 split and "num" is its length. lo_half and hi_half are output arrays
3430 that parallel "operands". */
3433 split_di (operands, num, lo_half, hi_half)
3436 rtx lo_half[], hi_half[];
3440 if (GET_CODE (operands[num]) == REG)
3442 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3443 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3445 else if (CONSTANT_P (operands[num]))
3447 split_double (operands[num], &lo_half[num], &hi_half[num]);
3449 else if (offsettable_memref_p (operands[num]))
3451 lo_half[num] = operands[num];
3452 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3459 /* Return 1 if this is a valid binary operation on a 387.
3460 OP is the expression matched, and MODE is its mode. */
3463 binary_387_op (op, mode)
3465 enum machine_mode mode;
3467 if (mode != VOIDmode && mode != GET_MODE (op))
3470 switch (GET_CODE (op))
3476 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3484 /* Return 1 if this is a valid shift or rotate operation on a 386.
3485 OP is the expression matched, and MODE is its mode. */
3490 enum machine_mode mode;
3492 rtx operand = XEXP (op, 0);
3494 if (mode != VOIDmode && mode != GET_MODE (op))
3497 if (GET_MODE (operand) != GET_MODE (op)
3498 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3501 return (GET_CODE (op) == ASHIFT
3502 || GET_CODE (op) == ASHIFTRT
3503 || GET_CODE (op) == LSHIFTRT
3504 || GET_CODE (op) == ROTATE
3505 || GET_CODE (op) == ROTATERT);
3508 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3509 MODE is not used. */
3512 VOIDmode_compare_op (op, mode)
3514 enum machine_mode mode;
3516 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3519 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3520 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3521 is the expression of the binary operation. The output may either be
3522 emitted here, or returned to the caller, like all output_* functions.
3524 There is no guarantee that the operands are the same mode, as they
3525 might be within FLOAT or FLOAT_EXTEND expressions. */
3528 output_387_binary_op (insn, operands)
3534 static char buf[100];
3536 switch (GET_CODE (operands[3]))
3539 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3540 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3547 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3548 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3555 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3556 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3563 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3564 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3574 strcpy (buf, base_op);
3576 switch (GET_CODE (operands[3]))
3580 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3583 operands[2] = operands[1];
3587 if (GET_CODE (operands[2]) == MEM)
3588 return strcat (buf, AS1 (%z2,%2));
3590 if (NON_STACK_REG_P (operands[1]))
3592 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3595 else if (NON_STACK_REG_P (operands[2]))
3597 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3601 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3602 return strcat (buf, AS2 (p,%2,%0));
3604 if (STACK_TOP_P (operands[0]))
3605 return strcat (buf, AS2C (%y2,%0));
3607 return strcat (buf, AS2C (%2,%0));
3611 if (GET_CODE (operands[1]) == MEM)
3612 return strcat (buf, AS1 (r%z1,%1));
3614 if (GET_CODE (operands[2]) == MEM)
3615 return strcat (buf, AS1 (%z2,%2));
3617 if (NON_STACK_REG_P (operands[1]))
3619 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3622 else if (NON_STACK_REG_P (operands[2]))
3624 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3628 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3631 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3632 return strcat (buf, AS2 (rp,%2,%0));
3634 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3635 return strcat (buf, AS2 (p,%1,%0));
3637 if (STACK_TOP_P (operands[0]))
3639 if (STACK_TOP_P (operands[1]))
3640 return strcat (buf, AS2C (%y2,%0));
3642 return strcat (buf, AS2 (r,%y1,%0));
3644 else if (STACK_TOP_P (operands[1]))
3645 return strcat (buf, AS2C (%1,%0));
3647 return strcat (buf, AS2 (r,%2,%0));
3654 /* Output code for INSN to convert a float to a signed int. OPERANDS
3655 are the insn operands. The output may be SFmode or DFmode and the
3656 input operand may be SImode or DImode. As a special case, make sure
3657 that the 387 stack top dies if the output mode is DImode, because the
3658 hardware requires this. */
3661 output_fix_trunc (insn, operands)
3665 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3668 if (! STACK_TOP_P (operands[1]) ||
3669 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3672 xops[0] = GEN_INT (12);
3673 xops[1] = operands[4];
3675 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3676 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3677 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3678 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3679 output_asm_insn (AS1 (fldc%W3,%3), operands);
3681 if (NON_STACK_REG_P (operands[0]))
3682 output_to_reg (operands[0], stack_top_dies);
3683 else if (GET_CODE (operands[0]) == MEM)
3686 output_asm_insn (AS1 (fistp%z0,%0), operands);
3688 output_asm_insn (AS1 (fist%z0,%0), operands);
3693 return AS1 (fldc%W2,%2);
3696 /* Output code for INSN to compare OPERANDS. The two operands might
3697 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3698 expression. If the compare is in mode CCFPEQmode, use an opcode that
3699 will not fault if a qNaN is present. */
3702 output_float_compare (insn, operands)
3707 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3708 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3711 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3713 cc_status.flags |= CC_FCOMI;
3714 cc_prev_status.flags &= ~CC_TEST_AX;
3717 if (! STACK_TOP_P (operands[0]))
3720 operands[0] = operands[1];
3722 cc_status.flags |= CC_REVERSED;
3725 if (! STACK_TOP_P (operands[0]))
3728 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3730 if (STACK_REG_P (operands[1])
3732 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3733 && REGNO (operands[1]) != FIRST_STACK_REG)
3735 /* If both the top of the 387 stack dies, and the other operand
3736 is also a stack register that dies, then this must be a
3737 `fcompp' float compare */
3739 if (unordered_compare)
3740 if (cc_status.flags & CC_FCOMI)
3742 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
3743 output_asm_insn (AS1 (fstp, %y0), operands);
3747 output_asm_insn ("fucompp", operands);
3750 if (cc_status.flags & CC_FCOMI)
3752 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
3753 output_asm_insn (AS1 (fstp, %y0), operands);
3757 output_asm_insn ("fcompp", operands);
3762 static char buf[100];
3764 /* Decide if this is the integer or float compare opcode, or the
3765 unordered float compare. */
3767 if (unordered_compare)
3768 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3769 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3770 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3772 strcpy (buf, "ficom");
3774 /* Modify the opcode if the 387 stack is to be popped. */
3779 if (NON_STACK_REG_P (operands[1]))
3780 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3781 else if (cc_status.flags & CC_FCOMI)
3783 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
3787 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3790 /* Now retrieve the condition code. */
3792 return output_fp_cc0_set (insn);
3795 /* Output opcodes to transfer the results of FP compare or test INSN
3796 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3797 result of the compare or test is unordered, no comparison operator
3798 succeeds except NE. Return an output template, if any. */
3801 output_fp_cc0_set (insn)
3805 rtx unordered_label;
3809 xops[0] = gen_rtx (REG, HImode, 0);
3810 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3812 if (! TARGET_IEEE_FP)
3814 if (!(cc_status.flags & CC_REVERSED))
3816 next = next_cc0_user (insn);
3818 if (GET_CODE (next) == JUMP_INSN
3819 && GET_CODE (PATTERN (next)) == SET
3820 && SET_DEST (PATTERN (next)) == pc_rtx
3821 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3823 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3825 else if (GET_CODE (PATTERN (next)) == SET)
3827 code = GET_CODE (SET_SRC (PATTERN (next)));
3833 if (code == GT || code == LT || code == EQ || code == NE
3834 || code == LE || code == GE)
3835 { /* We will test eax directly */
3836 cc_status.flags |= CC_TEST_AX;
3843 next = next_cc0_user (insn);
3844 if (next == NULL_RTX)
3847 if (GET_CODE (next) == JUMP_INSN
3848 && GET_CODE (PATTERN (next)) == SET
3849 && SET_DEST (PATTERN (next)) == pc_rtx
3850 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3852 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3854 else if (GET_CODE (PATTERN (next)) == SET)
3856 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3857 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3858 else code = GET_CODE (SET_SRC (PATTERN (next)));
3860 else if (GET_CODE (PATTERN (next)) == PARALLEL
3861 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3863 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3864 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3865 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3870 xops[0] = gen_rtx (REG, QImode, 0);
3875 xops[1] = GEN_INT (0x45);
3876 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3881 xops[1] = GEN_INT (0x45);
3882 xops[2] = GEN_INT (0x01);
3883 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3884 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3889 xops[1] = GEN_INT (0x05);
3890 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3895 xops[1] = GEN_INT (0x45);
3896 xops[2] = GEN_INT (0x40);
3897 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3898 output_asm_insn (AS1 (dec%B0,%h0), xops);
3899 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3904 xops[1] = GEN_INT (0x45);
3905 xops[2] = GEN_INT (0x40);
3906 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3907 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3912 xops[1] = GEN_INT (0x44);
3913 xops[2] = GEN_INT (0x40);
3914 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3915 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3929 #define MAX_386_STACK_LOCALS 2
3931 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3933 /* Define the structure for the machine field in struct function. */
3934 struct machine_function
3936 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3939 /* Functions to save and restore i386_stack_locals.
3940 These will be called, via pointer variables,
3941 from push_function_context and pop_function_context. */
3944 save_386_machine_status (p)
3947 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3948 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3949 sizeof i386_stack_locals);
3953 restore_386_machine_status (p)
3956 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3957 sizeof i386_stack_locals);
3961 /* Clear stack slot assignments remembered from previous functions.
3962 This is called from INIT_EXPANDERS once before RTL is emitted for each
3966 clear_386_stack_locals ()
3968 enum machine_mode mode;
3971 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3972 mode = (enum machine_mode) ((int) mode + 1))
3973 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3974 i386_stack_locals[(int) mode][n] = NULL_RTX;
3976 /* Arrange to save and restore i386_stack_locals around nested functions. */
3977 save_machine_status = save_386_machine_status;
3978 restore_machine_status = restore_386_machine_status;
3981 /* Return a MEM corresponding to a stack slot with mode MODE.
3982 Allocate a new slot if necessary.
3984 The RTL for a function can have several slots available: N is
3985 which slot to use. */
3988 assign_386_stack_local (mode, n)
3989 enum machine_mode mode;
3992 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3995 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3996 i386_stack_locals[(int) mode][n]
3997 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3999 return i386_stack_locals[(int) mode][n];
4005 enum machine_mode mode;
4007 return (GET_CODE (op) == MULT);
4012 enum machine_mode mode;
4014 return (GET_CODE (op) == DIV);
4019 /* Create a new copy of an rtx.
4020 Recursively copies the operands of the rtx,
4021 except for those few rtx codes that are sharable.
4022 Doesn't share CONST */
4030 register RTX_CODE code;
4031 register char *format_ptr;
4033 code = GET_CODE (orig);
4046 /* SCRATCH must be shared because they represent distinct values. */
4051 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4052 a LABEL_REF, it isn't sharable. */
4053 if (GET_CODE (XEXP (orig, 0)) == PLUS
4054 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4055 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4059 /* A MEM with a constant address is not sharable. The problem is that
4060 the constant address may need to be reloaded. If the mem is shared,
4061 then reloading one copy of this mem will cause all copies to appear
4062 to have been reloaded. */
4065 copy = rtx_alloc (code);
4066 PUT_MODE (copy, GET_MODE (orig));
4067 copy->in_struct = orig->in_struct;
4068 copy->volatil = orig->volatil;
4069 copy->unchanging = orig->unchanging;
4070 copy->integrated = orig->integrated;
4072 copy->is_spill_rtx = orig->is_spill_rtx;
4074 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4076 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4078 switch (*format_ptr++)
4081 XEXP (copy, i) = XEXP (orig, i);
4082 if (XEXP (orig, i) != NULL)
4083 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4088 XEXP (copy, i) = XEXP (orig, i);
4093 XVEC (copy, i) = XVEC (orig, i);
4094 if (XVEC (orig, i) != NULL)
4096 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4097 for (j = 0; j < XVECLEN (copy, i); j++)
4098 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4103 XWINT (copy, i) = XWINT (orig, i);
4107 XINT (copy, i) = XINT (orig, i);
4112 XSTR (copy, i) = XSTR (orig, i);
4123 /* try to rewrite a memory address to make it valid */
4125 rewrite_address (mem_rtx)
4128 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4130 int offset_adjust = 0;
4131 int was_only_offset = 0;
4132 rtx mem_addr = XEXP (mem_rtx, 0);
4133 char *storage = (char *) oballoc (0);
4135 int is_spill_rtx = 0;
4137 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4138 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4140 if (GET_CODE (mem_addr) == PLUS &&
4141 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4142 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4143 { /* this part is utilized by the combiner */
4145 gen_rtx (PLUS, GET_MODE (mem_addr),
4146 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4148 XEXP (XEXP (mem_addr, 1), 0)),
4149 XEXP (XEXP (mem_addr, 1), 1));
4150 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4152 XEXP (mem_rtx, 0) = ret_rtx;
4153 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4159 /* this part is utilized by loop.c */
4160 /* If the address contains PLUS (reg,const) and this pattern is invalid
4161 in this case - try to rewrite the address to make it valid intel1
4163 storage = (char *) oballoc (0);
4164 index_rtx = base_rtx = offset_rtx = NULL;
4165 /* find the base index and offset elements of the memory address */
4166 if (GET_CODE (mem_addr) == PLUS)
4168 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4170 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4172 base_rtx = XEXP (mem_addr, 1);
4173 index_rtx = XEXP (mem_addr, 0);
4177 base_rtx = XEXP (mem_addr, 0);
4178 offset_rtx = XEXP (mem_addr, 1);
4181 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4183 index_rtx = XEXP (mem_addr, 0);
4184 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4186 base_rtx = XEXP (mem_addr, 1);
4190 offset_rtx = XEXP (mem_addr, 1);
4193 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4196 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4197 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4198 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4199 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4200 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4201 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4202 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4204 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4205 offset_rtx = XEXP (mem_addr, 1);
4206 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4207 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4211 offset_rtx = XEXP (mem_addr, 1);
4212 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4213 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4216 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4218 was_only_offset = 1;
4221 offset_rtx = XEXP (mem_addr, 1);
4222 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4223 if (offset_adjust == 0)
4225 XEXP (mem_rtx, 0) = offset_rtx;
4226 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4236 else if (GET_CODE (mem_addr) == MULT)
4238 index_rtx = mem_addr;
4245 if (index_rtx && GET_CODE (index_rtx) == MULT)
4247 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4252 scale_rtx = XEXP (index_rtx, 1);
4253 scale = INTVAL (scale_rtx);
4254 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4256 /* now find which of the elements are invalid and try to fix them */
4257 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4259 offset_adjust = INTVAL (index_rtx) * scale;
4260 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4261 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4263 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4264 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4266 offset_rtx = copy_all_rtx (offset_rtx);
4267 XEXP (XEXP (offset_rtx, 0), 1) =
4268 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4269 if (!CONSTANT_P (offset_rtx))
4276 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4279 gen_rtx (CONST, GET_MODE (offset_rtx),
4280 gen_rtx (PLUS, GET_MODE (offset_rtx),
4282 gen_rtx (CONST_INT, 0, offset_adjust)));
4283 if (!CONSTANT_P (offset_rtx))
4289 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4291 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4293 else if (!offset_rtx)
4295 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4297 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4298 XEXP (mem_rtx, 0) = offset_rtx;
4301 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4302 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4303 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4305 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4306 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4308 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4310 offset_adjust += INTVAL (base_rtx);
4313 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4314 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4315 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4317 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4318 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4322 if (!LEGITIMATE_INDEX_P (index_rtx)
4323 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4331 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4337 if (offset_adjust != 0)
4341 if (GET_CODE (offset_rtx) == CONST &&
4342 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4344 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4345 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4347 offset_rtx = copy_all_rtx (offset_rtx);
4348 XEXP (XEXP (offset_rtx, 0), 1) =
4349 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4350 if (!CONSTANT_P (offset_rtx))
4357 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4360 gen_rtx (CONST, GET_MODE (offset_rtx),
4361 gen_rtx (PLUS, GET_MODE (offset_rtx),
4363 gen_rtx (CONST_INT, 0, offset_adjust)));
4364 if (!CONSTANT_P (offset_rtx))
4370 else if (GET_CODE (offset_rtx) == CONST_INT)
4372 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4382 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4390 if (GET_CODE (offset_rtx) == CONST_INT &&
4391 INTVAL (offset_rtx) == 0)
4393 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4394 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4400 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4401 gen_rtx (PLUS, GET_MODE (base_rtx),
4402 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4410 if (GET_CODE (offset_rtx) == CONST_INT &&
4411 INTVAL (offset_rtx) == 0)
4413 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4417 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4418 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4428 if (GET_CODE (offset_rtx) == CONST_INT &&
4429 INTVAL (offset_rtx) == 0)
4431 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4436 gen_rtx (PLUS, GET_MODE (offset_rtx),
4437 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4444 if (GET_CODE (offset_rtx) == CONST_INT &&
4445 INTVAL (offset_rtx) == 0)
4447 ret_rtx = index_rtx;
4451 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4460 if (GET_CODE (offset_rtx) == CONST_INT &&
4461 INTVAL (offset_rtx) == 0)
4467 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4470 else if (was_only_offset)
4472 ret_rtx = offset_rtx;
4480 XEXP (mem_rtx, 0) = ret_rtx;
4481 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4493 /* return 1 if the first insn to set cc before insn also sets the register
4494 reg_rtx - otherwise return 0 */
4496 last_to_set_cc (reg_rtx, insn)
4499 rtx prev_insn = PREV_INSN (insn);
4503 if (GET_CODE (prev_insn) == NOTE)
4506 else if (GET_CODE (prev_insn) == INSN)
4508 if (GET_CODE (PATTERN (prev_insn)) != SET)
4511 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4513 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4519 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4526 prev_insn = PREV_INSN (prev_insn);
4534 doesnt_set_condition_code (pat)
4537 switch (GET_CODE (pat))
4551 sets_condition_code (pat)
4554 switch (GET_CODE (pat))
4578 str_immediate_operand (op, mode)
4580 enum machine_mode mode;
4582 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4594 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4595 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4596 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4597 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4606 Return 1 if the mode of the SET_DEST of insn is floating point
4607 and it is not an fld or a move from memory to memory.
4608 Otherwise return 0 */
4613 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4614 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4615 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4616 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4617 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4618 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4619 && GET_CODE (SET_SRC (insn)) != MEM)
4628 Return 1 if the mode of the SET_DEST floating point and is memory
4629 and the source is a register.
4635 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4636 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4637 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4638 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4639 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4640 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4650 Return 1 if dep_insn sets a register which insn uses as a base
4651 or index to reference memory.
4652 otherwise return 0 */
4655 agi_dependent (insn, dep_insn)
4658 if (GET_CODE (dep_insn) == INSN
4659 && GET_CODE (PATTERN (dep_insn)) == SET
4660 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4662 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4665 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4666 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4667 && push_operand (SET_DEST (PATTERN (dep_insn)),
4668 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4670 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4678 Return 1 if reg is used in rtl as a base or index for a memory ref
4679 otherwise return 0. */
4682 reg_mentioned_in_mem (reg, rtl)
4687 register enum rtx_code code;
4692 code = GET_CODE (rtl);
4710 if (code == MEM && reg_mentioned_p (reg, rtl))
4713 fmt = GET_RTX_FORMAT (code);
4714 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4719 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4721 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4726 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4733 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4735 operands[0] = result, initialized with the startaddress
4736 operands[1] = alignment of the address.
4737 operands[2] = scratch register, initialized with the startaddress when
4738 not aligned, otherwise undefined
4740 This is just the body. It needs the initialisations mentioned above and
4741 some address computing at the end. These things are done in i386.md. */
4744 output_strlen_unroll (operands)
4749 xops[0] = operands[0]; /* Result */
4750 /* operands[1]; * Alignment */
4751 xops[1] = operands[2]; /* Scratch */
4752 xops[2] = GEN_INT (0);
4753 xops[3] = GEN_INT (2);
4754 xops[4] = GEN_INT (3);
4755 xops[5] = GEN_INT (4);
4756 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4757 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4758 xops[8] = gen_label_rtx (); /* label of main loop */
4759 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4760 xops[9] = gen_label_rtx (); /* pentium optimisation */
4761 xops[10] = gen_label_rtx (); /* end label 2 */
4762 xops[11] = gen_label_rtx (); /* end label 1 */
4763 xops[12] = gen_label_rtx (); /* end label */
4764 /* xops[13] * Temporary used */
4765 xops[14] = GEN_INT (0xff);
4766 xops[15] = GEN_INT (0xff00);
4767 xops[16] = GEN_INT (0xff0000);
4768 xops[17] = GEN_INT (0xff000000);
4770 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4772 /* is there a known alignment and is it less then 4 */
4773 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4775 /* is there a known alignment and is it not 2 */
4776 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4778 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4779 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4781 /* leave just the 3 lower bits */
4782 /* if this is a q-register, then the high part is used later */
4783 /* therefore user andl rather than andb */
4784 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4785 /* is aligned to 4-byte adress when zero */
4786 output_asm_insn (AS1 (je,%l8), xops);
4787 /* side-effect even Parity when %eax == 3 */
4788 output_asm_insn (AS1 (jp,%6), xops);
4790 /* is it aligned to 2 bytes ? */
4791 if (QI_REG_P (xops[1]))
4792 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4794 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4795 output_asm_insn (AS1 (je,%7), xops);
4799 /* since the alignment is 2, we have to check 2 or 0 bytes */
4801 /* check if is aligned to 4 - byte */
4802 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4803 /* is aligned to 4-byte adress when zero */
4804 output_asm_insn (AS1 (je,%l8), xops);
4807 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4808 /* now, compare the bytes */
4809 /* compare with the high part of a q-reg gives shorter code */
4810 if (QI_REG_P (xops[1]))
4812 /* compare the first n unaligned byte on a byte per byte basis */
4813 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4814 /* when zero we reached the end */
4815 output_asm_insn (AS1 (je,%l12), xops);
4816 /* increment the address */
4817 output_asm_insn (AS1 (inc%L0,%0), xops);
4819 /* not needed with an alignment of 2 */
4820 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4822 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4823 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4824 output_asm_insn (AS1 (je,%l12), xops);
4825 output_asm_insn (AS1 (inc%L0,%0), xops);
4827 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4829 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4833 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4834 output_asm_insn (AS1 (je,%l12), xops);
4835 output_asm_insn (AS1 (inc%L0,%0), xops);
4837 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4838 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4839 output_asm_insn (AS1 (je,%l12), xops);
4840 output_asm_insn (AS1 (inc%L0,%0), xops);
4842 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4843 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4845 output_asm_insn (AS1 (je,%l12), xops);
4846 output_asm_insn (AS1 (inc%L0,%0), xops);
4849 /* Generate loop to check 4 bytes at a time */
4850 /* IMHO it is not a good idea to align this loop. It gives only */
4851 /* huge programs, but does not help to speed up */
4852 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4853 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4855 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4856 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4858 if (QI_REG_P (xops[1]))
4860 /* On i586 it is faster to combine the hi- and lo- part as
4861 a kind of lookahead. If anding both yields zero, then one
4862 of both *could* be zero, otherwise none of both is zero;
4863 this saves one instruction, on i486 this is slower
4864 tested with P-90, i486DX2-66, AMD486DX2-66 */
4867 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4868 output_asm_insn (AS1 (jne,%l9), xops);
4871 /* check first byte */
4872 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4873 output_asm_insn (AS1 (je,%l12), xops);
4875 /* check second byte */
4876 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4877 output_asm_insn (AS1 (je,%l11), xops);
4880 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4884 /* check first byte */
4885 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4886 output_asm_insn (AS1 (je,%l12), xops);
4888 /* check second byte */
4889 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4890 output_asm_insn (AS1 (je,%l11), xops);
4893 /* check third byte */
4894 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4895 output_asm_insn (AS1 (je,%l10), xops);
4897 /* check fourth byte and increment address */
4898 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4899 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4900 output_asm_insn (AS1 (jne,%l8), xops);
4902 /* now generate fixups when the compare stops within a 4-byte word */
4903 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4905 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4906 output_asm_insn (AS1 (inc%L0,%0), xops);
4908 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4909 output_asm_insn (AS1 (inc%L0,%0), xops);
4911 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));