1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 4, /* variable shift costs */
84 1, /* constant shift costs */
85 11, /* cost of starting a multiply */
86 0, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs pentiumpro_cost = {
91 1, /* cost of an add instruction */
92 1, /* cost of a lea instruction */
93 3, /* variable shift costs */
94 1, /* constant shift costs */
95 4, /* cost of starting a multiply */
96 0, /* cost of multiply per each bit set */
97 17 /* cost of a divide/mod */
100 struct processor_costs *ix86_cost = &pentium_cost;
102 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
104 extern FILE *asm_out_file;
105 extern char *strcat ();
107 char *singlemove_string ();
108 char *output_move_const_single ();
109 char *output_fp_cc0_set ();
111 char *hi_reg_name[] = HI_REGISTER_NAMES;
112 char *qi_reg_name[] = QI_REGISTER_NAMES;
113 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
115 /* Array of the smallest class containing reg number REGNO, indexed by
116 REGNO. Used by REGNO_REG_CLASS in i386.h. */
118 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
121 AREG, DREG, CREG, BREG,
123 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
125 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
126 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
131 /* Test and compare insns in i386.md store the information needed to
132 generate branch and scc insns here. */
134 struct rtx_def *i386_compare_op0 = NULL_RTX;
135 struct rtx_def *i386_compare_op1 = NULL_RTX;
136 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
138 /* which cpu are we scheduling for */
139 enum processor_type ix86_cpu;
141 /* which instruction set architecture to use. */
144 /* Strings to hold which cpu and instruction set architecture to use. */
145 char *ix86_cpu_string; /* for -mcpu=<xxx> */
146 char *ix86_arch_string; /* for -march=<xxx> */
148 /* Register allocation order */
149 char *i386_reg_alloc_order;
150 static char regs_allocated[FIRST_PSEUDO_REGISTER];
152 /* # of registers to use to pass arguments. */
153 char *i386_regparm_string; /* # registers to use to pass args */
154 int i386_regparm; /* i386_regparm_string as a number */
156 /* Alignment to use for loops and jumps */
157 char *i386_align_loops_string; /* power of two alignment for loops */
158 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
159 char *i386_align_funcs_string; /* power of two alignment for functions */
160 char *i386_branch_cost_string; /* values 1-5: see jump.c */
162 int i386_align_loops; /* power of two alignment for loops */
163 int i386_align_jumps; /* power of two alignment for non-loop jumps */
164 int i386_align_funcs; /* power of two alignment for functions */
165 int i386_branch_cost; /* values 1-5: see jump.c */
167 /* Sometimes certain combinations of command options do not make
168 sense on a particular target machine. You can define a macro
169 `OVERRIDE_OPTIONS' to take account of this. This macro, if
170 defined, is executed once just after all the command options have
173 Don't use this macro to turn on various extra optimizations for
174 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
185 char *name; /* Canonical processor name. */
186 enum processor_type processor; /* Processor type enum value. */
187 struct processor_costs *cost; /* Processor costs */
188 int target_enable; /* Target flags to enable. */
189 int target_disable; /* Target flags to disable. */
190 } processor_target_table[]
191 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
192 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
193 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
194 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
195 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0},
196 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost, 0, 0}};
198 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
200 #ifdef SUBTARGET_OVERRIDE_OPTIONS
201 SUBTARGET_OVERRIDE_OPTIONS;
204 /* Validate registers in register allocation order */
205 if (i386_reg_alloc_order)
207 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
211 case 'a': regno = 0; break;
212 case 'd': regno = 1; break;
213 case 'c': regno = 2; break;
214 case 'b': regno = 3; break;
215 case 'S': regno = 4; break;
216 case 'D': regno = 5; break;
217 case 'B': regno = 6; break;
219 default: fatal ("Register '%c' is unknown", ch);
222 if (regs_allocated[regno])
223 fatal ("Register '%c' was already specified in the allocation order", ch);
225 regs_allocated[regno] = 1;
229 if (ix86_arch_string == (char *)0)
231 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
232 if (ix86_cpu_string == (char *)0)
233 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
236 for (i = 0; i < ptt_size; i++)
237 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
239 ix86_arch = processor_target_table[i].processor;
240 if (ix86_cpu_string == (char *)0)
241 ix86_cpu_string = processor_target_table[i].name;
247 error ("bad value (%s) for -march= switch", ix86_arch_string);
248 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
249 ix86_arch = PROCESSOR_DEFAULT;
252 if (ix86_cpu_string == (char *)0)
253 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
255 for (j = 0; j < ptt_size; j++)
256 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
258 ix86_cpu = processor_target_table[j].processor;
259 ix86_cost = processor_target_table[j].cost;
260 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
261 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
263 target_flags |= processor_target_table[j].target_enable;
264 target_flags &= ~processor_target_table[j].target_disable;
270 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
271 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
272 ix86_cpu = PROCESSOR_DEFAULT;
274 /* Validate -mregparm= value */
275 if (i386_regparm_string)
277 i386_regparm = atoi (i386_regparm_string);
278 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
279 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
282 /* The 486 suffers more from non-aligned cache line fills, and the larger code
283 size results in a larger cache foot-print and more misses. The 486 has a
284 16 byte cache line, pentium and pentiumpro have a 32 byte cache line */
285 def_align = (TARGET_486) ? 4 : 2;
287 /* Validate -malign-loops= value, or provide default */
288 if (i386_align_loops_string)
290 i386_align_loops = atoi (i386_align_loops_string);
291 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
292 fatal ("-malign-loops=%d is not between 0 and %d",
293 i386_align_loops, MAX_CODE_ALIGN);
296 i386_align_loops = 2;
298 /* Validate -malign-jumps= value, or provide default */
299 if (i386_align_jumps_string)
301 i386_align_jumps = atoi (i386_align_jumps_string);
302 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
303 fatal ("-malign-jumps=%d is not between 0 and %d",
304 i386_align_jumps, MAX_CODE_ALIGN);
307 i386_align_jumps = def_align;
309 /* Validate -malign-functions= value, or provide default */
310 if (i386_align_funcs_string)
312 i386_align_funcs = atoi (i386_align_funcs_string);
313 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
314 fatal ("-malign-functions=%d is not between 0 and %d",
315 i386_align_funcs, MAX_CODE_ALIGN);
318 i386_align_funcs = def_align;
320 /* Validate -mbranch-cost= value, or provide default */
321 if (i386_branch_cost_string)
323 i386_branch_cost = atoi (i386_branch_cost_string);
324 if (i386_branch_cost < 0 || i386_branch_cost > 5)
325 fatal ("-mbranch-cost=%d is not between 0 and 5",
329 i386_branch_cost = 1;
331 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
332 flag_omit_frame_pointer = 1;
334 /* pic references don't explicitly mention pic_offset_table_rtx */
335 /* code threaded into the prologue may conflict with profiling */
336 if (flag_pic || profile_flag || profile_block_flag)
337 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
340 /* A C statement (sans semicolon) to choose the order in which to
341 allocate hard registers for pseudo-registers local to a basic
344 Store the desired register order in the array `reg_alloc_order'.
345 Element 0 should be the register to allocate first; element 1, the
346 next register; and so on.
348 The macro body should not assume anything about the contents of
349 `reg_alloc_order' before execution of the macro.
351 On most machines, it is not necessary to define this macro. */
354 order_regs_for_local_alloc ()
356 int i, ch, order, regno;
358 /* User specified the register allocation order */
359 if (i386_reg_alloc_order)
361 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
365 case 'a': regno = 0; break;
366 case 'd': regno = 1; break;
367 case 'c': regno = 2; break;
368 case 'b': regno = 3; break;
369 case 'S': regno = 4; break;
370 case 'D': regno = 5; break;
371 case 'B': regno = 6; break;
374 reg_alloc_order[order++] = regno;
377 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
379 if (!regs_allocated[i])
380 reg_alloc_order[order++] = i;
384 /* If users did not specify a register allocation order, use natural order */
387 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
388 reg_alloc_order[i] = i;
394 optimization_options (level)
397 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
398 make the problem with not enough registers even worse */
399 #ifdef INSN_SCHEDULING
401 flag_schedule_insns = 0;
405 /* Sign-extend a 16-bit constant */
408 i386_sext16_if_const (op)
411 if (GET_CODE (op) == CONST_INT)
413 HOST_WIDE_INT val = INTVAL (op);
414 HOST_WIDE_INT sext_val;
416 sext_val = val | ~0xffff;
418 sext_val = val & 0xffff;
420 op = GEN_INT (sext_val);
425 /* Return nonzero if the rtx is aligned */
428 i386_aligned_reg_p (regno)
431 return (regno == STACK_POINTER_REGNUM
432 || (!flag_omit_frame_pointer
433 && regno == FRAME_POINTER_REGNUM));
440 /* registers and immediate operands are always "aligned" */
441 if (GET_CODE (op) != MEM)
444 /* Don't even try to do any aligned optimizations with volatiles */
445 if (MEM_VOLATILE_P (op))
448 /* Get address of memory operand */
451 switch (GET_CODE (op))
458 /* match "reg + offset" */
460 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
462 if (INTVAL (XEXP (op, 1)) & 3)
465 if (GET_CODE (op) != REG)
469 return i386_aligned_reg_p (REGNO (op));
474 /* Return nonzero if INSN looks like it won't compute useful cc bits
475 as a side effect. This information is only a hint. */
478 i386_cc_probably_useless_p (insn)
481 return !next_cc0_user (insn);
484 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
485 attribute for DECL. The attributes in ATTRIBUTES have previously been
489 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
498 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
499 attribute for TYPE. The attributes in ATTRIBUTES have previously been
503 i386_valid_type_attribute_p (type, attributes, identifier, args)
509 if (TREE_CODE (type) != FUNCTION_TYPE
510 && TREE_CODE (type) != FIELD_DECL
511 && TREE_CODE (type) != TYPE_DECL)
514 /* Stdcall attribute says callee is responsible for popping arguments
515 if they are not variable. */
516 if (is_attribute_p ("stdcall", identifier))
517 return (args == NULL_TREE);
519 /* Cdecl attribute says the callee is a normal C declaration */
520 if (is_attribute_p ("cdecl", identifier))
521 return (args == NULL_TREE);
523 /* Regparm attribute specifies how many integer arguments are to be
524 passed in registers */
525 if (is_attribute_p ("regparm", identifier))
529 if (!args || TREE_CODE (args) != TREE_LIST
530 || TREE_CHAIN (args) != NULL_TREE
531 || TREE_VALUE (args) == NULL_TREE)
534 cst = TREE_VALUE (args);
535 if (TREE_CODE (cst) != INTEGER_CST)
538 if (TREE_INT_CST_HIGH (cst) != 0
539 || TREE_INT_CST_LOW (cst) < 0
540 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
549 /* Return 0 if the attributes for two types are incompatible, 1 if they
550 are compatible, and 2 if they are nearly compatible (which causes a
551 warning to be generated). */
554 i386_comp_type_attributes (type1, type2)
562 /* Value is the number of bytes of arguments automatically
563 popped when returning from a subroutine call.
564 FUNDECL is the declaration node of the function (as a tree),
565 FUNTYPE is the data type of the function (as a tree),
566 or for a library call it is an identifier node for the subroutine name.
567 SIZE is the number of bytes of arguments passed on the stack.
569 On the 80386, the RTD insn may be used to pop them if the number
570 of args is fixed, but if the number is variable then the caller
571 must pop them all. RTD can't be used for library calls now
572 because the library is compiled with the Unix compiler.
573 Use of RTD is a selectable option, since it is incompatible with
574 standard Unix calling sequences. If the option is not selected,
575 the caller must always pop the args.
577 The attribute stdcall is equivalent to RTD on a per module basis. */
580 i386_return_pops_args (fundecl, funtype, size)
585 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
587 /* Cdecl functions override -mrtd, and never pop the stack */
588 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
590 /* Stdcall functions will pop the stack if not variable args */
591 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
595 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
596 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
600 /* Lose any fake structure return argument */
601 if (aggregate_value_p (TREE_TYPE (funtype)))
602 return GET_MODE_SIZE (Pmode);
608 /* Argument support functions. */
610 /* Initialize a variable CUM of type CUMULATIVE_ARGS
611 for a call to a function whose data type is FNTYPE.
612 For a library call, FNTYPE is 0. */
615 init_cumulative_args (cum, fntype, libname)
616 CUMULATIVE_ARGS *cum; /* argument info to initialize */
617 tree fntype; /* tree ptr for function decl */
618 rtx libname; /* SYMBOL_REF of library name or 0 */
620 static CUMULATIVE_ARGS zero_cum;
621 tree param, next_param;
623 if (TARGET_DEBUG_ARG)
625 fprintf (stderr, "\ninit_cumulative_args (");
628 tree ret_type = TREE_TYPE (fntype);
629 fprintf (stderr, "fntype code = %s, ret code = %s",
630 tree_code_name[ (int)TREE_CODE (fntype) ],
631 tree_code_name[ (int)TREE_CODE (ret_type) ]);
634 fprintf (stderr, "no fntype");
637 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
642 /* Set up the number of registers to use for passing arguments. */
643 cum->nregs = i386_regparm;
646 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
648 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
651 /* Determine if this function has variable arguments. This is
652 indicated by the last argument being 'void_type_mode' if there
653 are no variable arguments. If there are variable arguments, then
654 we won't pass anything in registers */
658 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
662 next_param = TREE_CHAIN (param);
663 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
668 if (TARGET_DEBUG_ARG)
669 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
674 /* Update the data in CUM to advance over an argument
675 of mode MODE and data type TYPE.
676 (TYPE is null for libcalls where that information may not be available.) */
679 function_arg_advance (cum, mode, type, named)
680 CUMULATIVE_ARGS *cum; /* current arg information */
681 enum machine_mode mode; /* current arg mode */
682 tree type; /* type of the argument or 0 if lib support */
683 int named; /* whether or not the argument was named */
685 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
686 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
688 if (TARGET_DEBUG_ARG)
690 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
691 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
706 /* Define where to put the arguments to a function.
707 Value is zero to push the argument on the stack,
708 or a hard register in which to store the argument.
710 MODE is the argument's machine mode.
711 TYPE is the data type of the argument (as a tree).
712 This is null for libcalls where that information may
714 CUM is a variable of type CUMULATIVE_ARGS which gives info about
715 the preceding args and about the function being called.
716 NAMED is nonzero if this argument is a named parameter
717 (otherwise it is an extra parameter matching an ellipsis). */
720 function_arg (cum, mode, type, named)
721 CUMULATIVE_ARGS *cum; /* current arg information */
722 enum machine_mode mode; /* current arg mode */
723 tree type; /* type of the argument or 0 if lib support */
724 int named; /* != 0 for normal args, == 0 for ... args */
727 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
728 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
732 default: /* for now, pass fp/complex values on the stack */
740 if (words <= cum->nregs)
741 ret = gen_rtx (REG, mode, cum->regno);
745 if (TARGET_DEBUG_ARG)
748 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
749 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
752 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
754 fprintf (stderr, ", stack");
756 fprintf (stderr, " )\n");
762 /* For an arg passed partly in registers and partly in memory,
763 this is the number of registers used.
764 For args passed entirely in registers or entirely in memory, zero. */
767 function_arg_partial_nregs (cum, mode, type, named)
768 CUMULATIVE_ARGS *cum; /* current arg information */
769 enum machine_mode mode; /* current arg mode */
770 tree type; /* type of the argument or 0 if lib support */
771 int named; /* != 0 for normal args, == 0 for ... args */
777 /* Output an insn whose source is a 386 integer register. SRC is the
778 rtx for the register, and TEMPLATE is the op-code template. SRC may
779 be either SImode or DImode.
781 The template will be output with operands[0] as SRC, and operands[1]
782 as a pointer to the top of the 386 stack. So a call from floatsidf2
783 would look like this:
785 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
787 where %z0 corresponds to the caller's operands[1], and is used to
788 emit the proper size suffix.
790 ??? Extend this to handle HImode - a 387 can load and store HImode
794 output_op_from_reg (src, template)
799 int size = GET_MODE_SIZE (GET_MODE (src));
802 xops[1] = AT_SP (Pmode);
803 xops[2] = GEN_INT (size);
804 xops[3] = stack_pointer_rtx;
806 if (size > UNITS_PER_WORD)
809 if (size > 2 * UNITS_PER_WORD)
811 high = gen_rtx (REG, SImode, REGNO (src) + 2);
812 output_asm_insn (AS1 (push%L0,%0), &high);
814 high = gen_rtx (REG, SImode, REGNO (src) + 1);
815 output_asm_insn (AS1 (push%L0,%0), &high);
817 output_asm_insn (AS1 (push%L0,%0), &src);
819 output_asm_insn (template, xops);
821 output_asm_insn (AS2 (add%L3,%2,%3), xops);
824 /* Output an insn to pop an value from the 387 top-of-stack to 386
825 register DEST. The 387 register stack is popped if DIES is true. If
826 the mode of DEST is an integer mode, a `fist' integer store is done,
827 otherwise a `fst' float store is done. */
830 output_to_reg (dest, dies, scratch_mem)
836 int size = GET_MODE_SIZE (GET_MODE (dest));
839 xops[0] = AT_SP (Pmode);
841 xops[0] = scratch_mem;
842 xops[1] = stack_pointer_rtx;
843 xops[2] = GEN_INT (size);
847 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
849 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
852 output_asm_insn (AS1 (fistp%z3,%y0), xops);
854 output_asm_insn (AS1 (fist%z3,%y0), xops);
856 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
859 output_asm_insn (AS1 (fstp%z3,%y0), xops);
862 if (GET_MODE (dest) == XFmode)
864 output_asm_insn (AS1 (fstp%z3,%y0), xops);
865 output_asm_insn (AS1 (fld%z3,%y0), xops);
868 output_asm_insn (AS1 (fst%z3,%y0), xops);
875 output_asm_insn (AS1 (pop%L0,%0), &dest);
877 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
880 if (size > UNITS_PER_WORD)
882 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
884 output_asm_insn (AS1 (pop%L0,%0), &dest);
887 xops[0] = adj_offsettable_operand (xops[0], 4);
889 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
891 if (size > 2 * UNITS_PER_WORD)
893 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
895 output_asm_insn (AS1 (pop%L0,%0), &dest);
898 xops[0] = adj_offsettable_operand (xops[0], 4);
899 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
906 singlemove_string (operands)
910 if (GET_CODE (operands[0]) == MEM
911 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
913 if (XEXP (x, 0) != stack_pointer_rtx)
917 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
919 return output_move_const_single (operands);
921 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
922 return AS2 (mov%L0,%1,%0);
923 else if (CONSTANT_P (operands[1]))
924 return AS2 (mov%L0,%1,%0);
927 output_asm_insn ("push%L1 %1", operands);
932 /* Return a REG that occurs in ADDR with coefficient 1.
933 ADDR can be effectively incremented by incrementing REG. */
939 while (GET_CODE (addr) == PLUS)
941 if (GET_CODE (XEXP (addr, 0)) == REG)
942 addr = XEXP (addr, 0);
943 else if (GET_CODE (XEXP (addr, 1)) == REG)
944 addr = XEXP (addr, 1);
945 else if (CONSTANT_P (XEXP (addr, 0)))
946 addr = XEXP (addr, 1);
947 else if (CONSTANT_P (XEXP (addr, 1)))
948 addr = XEXP (addr, 0);
952 if (GET_CODE (addr) == REG)
958 /* Output an insn to add the constant N to the register X. */
969 output_asm_insn (AS1 (dec%L0,%0), xops);
971 output_asm_insn (AS1 (inc%L0,%0), xops);
972 else if (n < 0 || n == 128)
974 xops[1] = GEN_INT (-n);
975 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
979 xops[1] = GEN_INT (n);
980 output_asm_insn (AS2 (add%L0,%1,%0), xops);
985 /* Output assembler code to perform a doubleword move insn
986 with operands OPERANDS. */
989 output_move_double (operands)
992 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
996 rtx addreg0 = 0, addreg1 = 0;
997 int dest_overlapped_low = 0;
998 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1003 /* First classify both operands. */
1005 if (REG_P (operands[0]))
1007 else if (offsettable_memref_p (operands[0]))
1009 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1011 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1013 else if (GET_CODE (operands[0]) == MEM)
1018 if (REG_P (operands[1]))
1020 else if (CONSTANT_P (operands[1]))
1022 else if (offsettable_memref_p (operands[1]))
1024 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1026 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1028 else if (GET_CODE (operands[1]) == MEM)
1033 /* Check for the cases that the operand constraints are not
1034 supposed to allow to happen. Abort if we get one,
1035 because generating code for these cases is painful. */
1037 if (optype0 == RNDOP || optype1 == RNDOP)
1040 /* If one operand is decrementing and one is incrementing
1041 decrement the former register explicitly
1042 and change that operand into ordinary indexing. */
1044 if (optype0 == PUSHOP && optype1 == POPOP)
1046 /* ??? Can this ever happen on i386? */
1047 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1048 asm_add (-size, operands[0]);
1049 if (GET_MODE (operands[1]) == XFmode)
1050 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
1051 else if (GET_MODE (operands[0]) == DFmode)
1052 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
1054 operands[0] = gen_rtx (MEM, DImode, operands[0]);
1058 if (optype0 == POPOP && optype1 == PUSHOP)
1060 /* ??? Can this ever happen on i386? */
1061 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1062 asm_add (-size, operands[1]);
1063 if (GET_MODE (operands[1]) == XFmode)
1064 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
1065 else if (GET_MODE (operands[1]) == DFmode)
1066 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
1068 operands[1] = gen_rtx (MEM, DImode, operands[1]);
1072 /* If an operand is an unoffsettable memory ref, find a register
1073 we can increment temporarily to make it refer to the second word. */
1075 if (optype0 == MEMOP)
1076 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1078 if (optype1 == MEMOP)
1079 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1081 /* Ok, we can do one word at a time.
1082 Normally we do the low-numbered word first,
1083 but if either operand is autodecrementing then we
1084 do the high-numbered word first.
1086 In either case, set up in LATEHALF the operands to use
1087 for the high-numbered word and in some cases alter the
1088 operands in OPERANDS to be suitable for the low-numbered word. */
1092 if (optype0 == REGOP)
1094 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1095 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
1097 else if (optype0 == OFFSOP)
1099 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1100 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1104 middlehalf[0] = operands[0];
1105 latehalf[0] = operands[0];
1108 if (optype1 == REGOP)
1110 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1111 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
1113 else if (optype1 == OFFSOP)
1115 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1116 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1118 else if (optype1 == CNSTOP)
1120 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1122 REAL_VALUE_TYPE r; long l[3];
1124 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1125 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1126 operands[1] = GEN_INT (l[0]);
1127 middlehalf[1] = GEN_INT (l[1]);
1128 latehalf[1] = GEN_INT (l[2]);
1130 else if (CONSTANT_P (operands[1]))
1131 /* No non-CONST_DOUBLE constant should ever appear here. */
1136 middlehalf[1] = operands[1];
1137 latehalf[1] = operands[1];
1140 else /* size is not 12: */
1142 if (optype0 == REGOP)
1143 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1144 else if (optype0 == OFFSOP)
1145 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1147 latehalf[0] = operands[0];
1149 if (optype1 == REGOP)
1150 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1151 else if (optype1 == OFFSOP)
1152 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1153 else if (optype1 == CNSTOP)
1154 split_double (operands[1], &operands[1], &latehalf[1]);
1156 latehalf[1] = operands[1];
1159 /* If insn is effectively movd N (sp),-(sp) then we will do the
1160 high word first. We should use the adjusted operand 1
1161 (which is N+4 (sp) or N+8 (sp))
1162 for the low word and middle word as well,
1163 to compensate for the first decrement of sp. */
1164 if (optype0 == PUSHOP
1165 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1166 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1167 middlehalf[1] = operands[1] = latehalf[1];
1169 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1170 if the upper part of reg N does not appear in the MEM, arrange to
1171 emit the move late-half first. Otherwise, compute the MEM address
1172 into the upper part of N and use that as a pointer to the memory
1174 if (optype0 == REGOP
1175 && (optype1 == OFFSOP || optype1 == MEMOP))
1177 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1178 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1180 /* If both halves of dest are used in the src memory address,
1181 compute the address into latehalf of dest. */
1183 xops[0] = latehalf[0];
1184 xops[1] = XEXP (operands[1], 0);
1185 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1186 if( GET_MODE (operands[1]) == XFmode )
1189 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1190 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1191 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1195 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1196 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1200 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1202 /* Check for two regs used by both source and dest. */
1203 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1204 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1207 /* JRV says this can't happen: */
1208 if (addreg0 || addreg1)
1211 /* Only the middle reg conflicts; simply put it last. */
1212 output_asm_insn (singlemove_string (operands), operands);
1213 output_asm_insn (singlemove_string (latehalf), latehalf);
1214 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1217 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1218 /* If the low half of dest is mentioned in the source memory
1219 address, the arrange to emit the move late half first. */
1220 dest_overlapped_low = 1;
1223 /* If one or both operands autodecrementing,
1224 do the two words, high-numbered first. */
1226 /* Likewise, the first move would clobber the source of the second one,
1227 do them in the other order. This happens only for registers;
1228 such overlap can't happen in memory unless the user explicitly
1229 sets it up, and that is an undefined circumstance. */
1232 if (optype0 == PUSHOP || optype1 == PUSHOP
1233 || (optype0 == REGOP && optype1 == REGOP
1234 && REGNO (operands[0]) == REGNO (latehalf[1]))
1235 || dest_overlapped_low)
1237 if (optype0 == PUSHOP || optype1 == PUSHOP
1238 || (optype0 == REGOP && optype1 == REGOP
1239 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1240 || REGNO (operands[0]) == REGNO (latehalf[1])))
1241 || dest_overlapped_low)
1243 /* Make any unoffsettable addresses point at high-numbered word. */
1245 asm_add (size-4, addreg0);
1247 asm_add (size-4, addreg1);
1250 output_asm_insn (singlemove_string (latehalf), latehalf);
1252 /* Undo the adds we just did. */
1254 asm_add (-4, addreg0);
1256 asm_add (-4, addreg1);
1260 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1262 asm_add (-4, addreg0);
1264 asm_add (-4, addreg1);
1267 /* Do low-numbered word. */
1268 return singlemove_string (operands);
1271 /* Normal case: do the two words, low-numbered first. */
1273 output_asm_insn (singlemove_string (operands), operands);
1275 /* Do the middle one of the three words for long double */
1279 asm_add (4, addreg0);
1281 asm_add (4, addreg1);
1283 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1286 /* Make any unoffsettable addresses point at high-numbered word. */
1288 asm_add (4, addreg0);
1290 asm_add (4, addreg1);
1293 output_asm_insn (singlemove_string (latehalf), latehalf);
1295 /* Undo the adds we just did. */
1297 asm_add (4-size, addreg0);
1299 asm_add (4-size, addreg1);
1305 #define MAX_TMPS 2 /* max temporary registers used */
1307 /* Output the appropriate code to move push memory on the stack */
1310 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1322 } tmp_info[MAX_TMPS];
1324 rtx src = operands[1];
1327 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1328 int stack_offset = 0;
1332 if (!offsettable_memref_p (src))
1333 fatal_insn ("Source is not offsettable", insn);
1335 if ((length & 3) != 0)
1336 fatal_insn ("Pushing non-word aligned size", insn);
1338 /* Figure out which temporary registers we have available */
1339 for (i = tmp_start; i < n_operands; i++)
1341 if (GET_CODE (operands[i]) == REG)
1343 if (reg_overlap_mentioned_p (operands[i], src))
1346 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1347 if (max_tmps == MAX_TMPS)
1353 for (offset = length - 4; offset >= 0; offset -= 4)
1355 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1356 output_asm_insn (AS1(push%L0,%0), xops);
1362 for (offset = length - 4; offset >= 0; )
1364 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1366 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1367 tmp_info[num_tmps].push = AS1(push%L0,%1);
1368 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1372 for (i = 0; i < num_tmps; i++)
1373 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1375 for (i = 0; i < num_tmps; i++)
1376 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1379 stack_offset += 4*num_tmps;
1387 /* Output the appropriate code to move data between two memory locations */
1390 output_move_memory (operands, insn, length, tmp_start, n_operands)
1401 } tmp_info[MAX_TMPS];
1403 rtx dest = operands[0];
1404 rtx src = operands[1];
1405 rtx qi_tmp = NULL_RTX;
1411 if (GET_CODE (dest) == MEM
1412 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1413 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1414 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1416 if (!offsettable_memref_p (src))
1417 fatal_insn ("Source is not offsettable", insn);
1419 if (!offsettable_memref_p (dest))
1420 fatal_insn ("Destination is not offsettable", insn);
1422 /* Figure out which temporary registers we have available */
1423 for (i = tmp_start; i < n_operands; i++)
1425 if (GET_CODE (operands[i]) == REG)
1427 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1428 qi_tmp = operands[i];
1430 if (reg_overlap_mentioned_p (operands[i], dest))
1431 fatal_insn ("Temporary register overlaps the destination", insn);
1433 if (reg_overlap_mentioned_p (operands[i], src))
1434 fatal_insn ("Temporary register overlaps the source", insn);
1436 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1437 if (max_tmps == MAX_TMPS)
1443 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1445 if ((length & 1) != 0)
1448 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1453 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1457 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1458 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1459 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1460 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1464 else if (length >= 2)
1466 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1467 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1468 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1469 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1477 for (i = 0; i < num_tmps; i++)
1478 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1480 for (i = 0; i < num_tmps; i++)
1481 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1486 xops[0] = adj_offsettable_operand (dest, offset);
1487 xops[1] = adj_offsettable_operand (src, offset);
1489 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1490 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1498 standard_80387_constant_p (x)
1501 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1506 if (setjmp (handler))
1509 set_float_handler (handler);
1510 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1511 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1512 is1 = REAL_VALUES_EQUAL (d, dconst1);
1513 set_float_handler (NULL_PTR);
1521 /* Note that on the 80387, other constants, such as pi,
1522 are much slower to load as standard constants
1523 than to load from doubles in memory! */
1530 output_move_const_single (operands)
1533 if (FP_REG_P (operands[0]))
1535 int conval = standard_80387_constant_p (operands[1]);
1543 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1545 REAL_VALUE_TYPE r; long l;
1547 if (GET_MODE (operands[1]) == XFmode)
1550 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1551 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1552 operands[1] = GEN_INT (l);
1554 return singlemove_string (operands);
1557 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1558 reference and a constant. */
1561 symbolic_operand (op, mode)
1563 enum machine_mode mode;
1565 switch (GET_CODE (op))
1572 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1573 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1574 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1580 /* Test for a valid operand for a call instruction.
1581 Don't allow the arg pointer register or virtual regs
1582 since they may change into reg + const, which the patterns
1583 can't handle yet. */
1586 call_insn_operand (op, mode)
1588 enum machine_mode mode;
1590 if (GET_CODE (op) == MEM
1591 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1592 /* This makes a difference for PIC. */
1593 && general_operand (XEXP (op, 0), Pmode))
1594 || (GET_CODE (XEXP (op, 0)) == REG
1595 && XEXP (op, 0) != arg_pointer_rtx
1596 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1597 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1602 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1606 expander_call_insn_operand (op, mode)
1608 enum machine_mode mode;
1610 if (GET_CODE (op) == MEM
1611 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1612 || (GET_CODE (XEXP (op, 0)) == REG
1613 && XEXP (op, 0) != arg_pointer_rtx
1614 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1615 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1620 /* Return 1 if OP is a comparison operator that can use the condition code
1621 generated by an arithmetic operation. */
1624 arithmetic_comparison_operator (op, mode)
1626 enum machine_mode mode;
1630 if (mode != VOIDmode && mode != GET_MODE (op))
1632 code = GET_CODE (op);
1633 if (GET_RTX_CLASS (code) != '<')
1636 return (code != GT && code != LE);
1639 /* Returns 1 if OP contains a symbol reference */
1642 symbolic_reference_mentioned_p (op)
1648 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1651 fmt = GET_RTX_FORMAT (GET_CODE (op));
1652 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1658 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1659 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1662 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1669 /* Attempt to expand a binary operator. Make the expansion closer to the
1670 actual machine, then just general_operand, which will allow 3 separate
1671 memory references (one output, two input) in a single insn. Return
1672 whether the insn fails, or succeeds. */
1675 ix86_expand_binary_operator (code, mode, operands)
1677 enum machine_mode mode;
1684 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1685 if (GET_RTX_CLASS (code) == 'c'
1686 && (rtx_equal_p (operands[0], operands[2])
1687 || immediate_operand (operands[1], mode)))
1689 rtx temp = operands[1];
1690 operands[1] = operands[2];
1694 /* If optimizing, copy to regs to improve CSE */
1695 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1697 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1698 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1700 if (GET_CODE (operands[2]) == MEM)
1701 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1703 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1705 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1706 emit_move_insn (temp, operands[1]);
1712 if (!ix86_binary_operator_ok (code, mode, operands))
1714 /* If not optimizing, try to make a valid insn (optimize code previously did
1715 this above to improve chances of CSE) */
1717 if ((!TARGET_PSEUDO || !optimize)
1718 && ((reload_in_progress | reload_completed) == 0)
1719 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1722 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1724 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1728 if (GET_CODE (operands[2]) == MEM)
1730 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1734 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1736 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1737 emit_move_insn (temp, operands[1]);
1742 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1752 /* Return TRUE or FALSE depending on whether the binary operator meets the
1753 appropriate constraints. */
1756 ix86_binary_operator_ok (code, mode, operands)
1758 enum machine_mode mode;
1761 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1762 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1765 /* Attempt to expand a unary operator. Make the expansion closer to the
1766 actual machine, then just general_operand, which will allow 2 separate
1767 memory references (one output, one input) in a single insn. Return
1768 whether the insn fails, or succeeds. */
1771 ix86_expand_unary_operator (code, mode, operands)
1773 enum machine_mode mode;
1778 /* If optimizing, copy to regs to improve CSE */
1781 && ((reload_in_progress | reload_completed) == 0)
1782 && GET_CODE (operands[1]) == MEM)
1784 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1787 if (!ix86_unary_operator_ok (code, mode, operands))
1789 if ((!TARGET_PSEUDO || !optimize)
1790 && ((reload_in_progress | reload_completed) == 0)
1791 && GET_CODE (operands[1]) == MEM)
1793 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1794 if (!ix86_unary_operator_ok (code, mode, operands))
1804 /* Return TRUE or FALSE depending on whether the unary operator meets the
1805 appropriate constraints. */
1808 ix86_unary_operator_ok (code, mode, operands)
1810 enum machine_mode mode;
1818 static rtx pic_label_rtx;
1819 static char pic_label_name [256];
1820 static int pic_label_no = 0;
1822 /* This function generates code for -fpic that loads %ebx with
1823 with the return address of the caller and then returns. */
1825 asm_output_function_prefix (file, name)
1830 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1831 || current_function_uses_const_pool);
1832 xops[0] = pic_offset_table_rtx;
1833 xops[1] = stack_pointer_rtx;
1835 /* deep branch prediction favors having a return for every call */
1836 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1840 if (pic_label_rtx == 0)
1842 pic_label_rtx = (rtx) gen_label_rtx ();
1843 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1844 LABEL_NAME (pic_label_rtx) = pic_label_name;
1846 prologue_node = make_node (FUNCTION_DECL);
1847 DECL_RESULT (prologue_node) = 0;
1848 #ifdef ASM_DECLARE_FUNCTION_NAME
1849 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1851 output_asm_insn ("movl (%1),%0", xops);
1852 output_asm_insn ("ret", xops);
1856 /* Set up the stack and frame (if desired) for the function. */
1859 function_prologue (file, size)
1866 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1867 || current_function_uses_const_pool);
1868 long tsize = get_frame_size ();
1870 /* pic references don't explicitly mention pic_offset_table_rtx */
1871 if (TARGET_SCHEDULE_PROLOGUE)
1877 xops[0] = stack_pointer_rtx;
1878 xops[1] = frame_pointer_rtx;
1879 xops[2] = GEN_INT (tsize);
1881 if (frame_pointer_needed)
1883 output_asm_insn ("push%L1 %1", xops);
1884 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1889 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1890 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1893 xops[3] = gen_rtx (REG, SImode, 0);
1894 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1896 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1897 output_asm_insn (AS1 (call,%P3), xops);
1900 /* Note If use enter it is NOT reversed args.
1901 This one is not reversed from intel!!
1902 I think enter is slower. Also sdb doesn't like it.
1903 But if you want it the code is:
1905 xops[3] = const0_rtx;
1906 output_asm_insn ("enter %2,%3", xops);
1909 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1910 for (regno = limit - 1; regno >= 0; regno--)
1911 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1912 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1914 xops[0] = gen_rtx (REG, SImode, regno);
1915 output_asm_insn ("push%L0 %0", xops);
1918 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1920 xops[0] = pic_offset_table_rtx;
1921 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1923 output_asm_insn (AS1 (call,%P1), xops);
1924 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1927 else if (pic_reg_used)
1929 xops[0] = pic_offset_table_rtx;
1930 xops[1] = (rtx) gen_label_rtx ();
1932 output_asm_insn (AS1 (call,%P1), xops);
1933 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1934 output_asm_insn (AS1 (pop%L0,%0), xops);
1935 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1939 /* This function generates the assembly code for function entry.
1940 FILE is an stdio stream to output the code to.
1941 SIZE is an int: how many units of temporary storage to allocate. */
1944 ix86_expand_prologue ()
1949 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1950 || current_function_uses_const_pool);
1951 long tsize = get_frame_size ();
1954 if (!TARGET_SCHEDULE_PROLOGUE)
1957 xops[0] = stack_pointer_rtx;
1958 xops[1] = frame_pointer_rtx;
1959 xops[2] = GEN_INT (tsize);
1960 if (frame_pointer_needed)
1964 gen_rtx (MEM, SImode,
1965 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1966 frame_pointer_rtx));
1967 RTX_FRAME_RELATED_P (insn) = 1;
1968 insn = emit_move_insn (xops[1], xops[0]);
1969 RTX_FRAME_RELATED_P (insn) = 1;
1974 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1976 insn = emit_insn (gen_subsi3 (xops[0], xops[0], xops[2]));
1977 RTX_FRAME_RELATED_P (insn) = 1;
1981 xops[3] = gen_rtx (REG, SImode, 0);
1982 emit_move_insn (xops[3], xops[2]);
1983 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
1984 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
1985 emit_call_insn (gen_rtx (CALL, VOIDmode,
1986 xops[3], const0_rtx));
1989 /* Note If use enter it is NOT reversed args.
1990 This one is not reversed from intel!!
1991 I think enter is slower. Also sdb doesn't like it.
1992 But if you want it the code is:
1994 xops[3] = const0_rtx;
1995 output_asm_insn ("enter %2,%3", xops);
1998 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1999 for (regno = limit - 1; regno >= 0; regno--)
2000 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2001 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2003 xops[0] = gen_rtx (REG, SImode, regno);
2006 gen_rtx (MEM, SImode,
2007 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
2010 RTX_FRAME_RELATED_P (insn) = 1;
2013 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
2015 xops[0] = pic_offset_table_rtx;
2016 if (pic_label_rtx == 0)
2018 pic_label_rtx = (rtx) gen_label_rtx ();
2019 sprintf (pic_label_name, "LPR%d", pic_label_no++);
2020 LABEL_NAME (pic_label_rtx) = pic_label_name;
2022 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
2024 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2025 emit_insn (gen_prologue_set_got (xops[0],
2026 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2027 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2029 else if (pic_reg_used)
2031 xops[0] = pic_offset_table_rtx;
2032 xops[1] = (rtx) gen_label_rtx ();
2034 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
2035 emit_insn (gen_pop (xops[0]));
2036 emit_insn (gen_prologue_set_got (xops[0],
2037 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
2038 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
2042 /* Restore function stack, frame, and registers. */
2045 function_epilogue (file, size)
2051 /* Return 1 if it is appropriate to emit `ret' instructions in the
2052 body of a function. Do this only if the epilogue is simple, needing a
2053 couple of insns. Prior to reloading, we can't tell how many registers
2054 must be saved, so return 0 then. Return 0 if there is no frame
2055 marker to de-allocate.
2057 If NON_SAVING_SETJMP is defined and true, then it is not possible
2058 for the epilogue to be simple, so return 0. This is a special case
2059 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2060 until final, but jump_optimize may need to know sooner if a
2064 ix86_can_use_return_insn_p ()
2068 int reglimit = (frame_pointer_needed
2069 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2070 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2071 || current_function_uses_const_pool);
2073 #ifdef NON_SAVING_SETJMP
2074 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2078 if (! reload_completed)
2081 for (regno = reglimit - 1; regno >= 0; regno--)
2082 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2083 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2086 return nregs == 0 || ! frame_pointer_needed;
2090 /* This function generates the assembly code for function exit.
2091 FILE is an stdio stream to output the code to.
2092 SIZE is an int: how many units of temporary storage to deallocate. */
2095 ix86_expand_epilogue ()
2098 register int nregs, limit;
2101 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2102 || current_function_uses_const_pool);
2103 long tsize = get_frame_size ();
2105 /* Compute the number of registers to pop */
2107 limit = (frame_pointer_needed
2108 ? FRAME_POINTER_REGNUM
2109 : STACK_POINTER_REGNUM);
2113 for (regno = limit - 1; regno >= 0; regno--)
2114 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2115 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2118 /* sp is often unreliable so we must go off the frame pointer,
2121 /* In reality, we may not care if sp is unreliable, because we can
2122 restore the register relative to the frame pointer. In theory,
2123 since each move is the same speed as a pop, and we don't need the
2124 leal, this is faster. For now restore multiple registers the old
2127 offset = -tsize - (nregs * UNITS_PER_WORD);
2129 xops[2] = stack_pointer_rtx;
2131 if (nregs > 1 || ! frame_pointer_needed)
2133 if (frame_pointer_needed)
2135 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2136 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2137 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2140 for (regno = 0; regno < limit; regno++)
2141 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2142 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2144 xops[0] = gen_rtx (REG, SImode, regno);
2145 emit_insn (gen_pop (xops[0]));
2146 /* output_asm_insn ("pop%L0 %0", xops);*/
2150 for (regno = 0; regno < limit; regno++)
2151 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2152 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2154 xops[0] = gen_rtx (REG, SImode, regno);
2155 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2156 emit_move_insn (xops[0], xops[1]);
2157 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2161 if (frame_pointer_needed)
2163 /* If not an i386, mov & pop is faster than "leave". */
2165 if (TARGET_USE_LEAVE)
2166 emit_insn (gen_leave());
2167 /* output_asm_insn ("leave", xops);*/
2170 xops[0] = frame_pointer_rtx;
2171 xops[1] = stack_pointer_rtx;
2172 emit_insn (gen_epilogue_set_stack_ptr());
2173 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2174 emit_insn (gen_pop (xops[0]));
2175 /* output_asm_insn ("pop%L0 %0", xops);*/
2180 /* If there is no frame pointer, we must still release the frame. */
2182 xops[0] = GEN_INT (tsize);
2183 emit_insn (gen_rtx (SET, SImode,
2185 gen_rtx (PLUS, SImode,
2188 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2191 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2192 if (profile_block_flag == 2)
2194 FUNCTION_BLOCK_PROFILER_EXIT(file);
2198 if (current_function_pops_args && current_function_args_size)
2200 xops[1] = GEN_INT (current_function_pops_args);
2202 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2203 asked to pop more, pop return address, do explicit add, and jump
2204 indirectly to the caller. */
2206 if (current_function_pops_args >= 32768)
2208 /* ??? Which register to use here? */
2209 xops[0] = gen_rtx (REG, SImode, 2);
2210 emit_insn (gen_pop (xops[0]));
2211 /* output_asm_insn ("pop%L0 %0", xops);*/
2212 emit_insn (gen_rtx (SET, SImode,
2214 gen_rtx (PLUS, SImode,
2217 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2218 emit_jump_insn (xops[0]);
2219 /* output_asm_insn ("jmp %*%0", xops);*/
2222 emit_jump_insn (gen_return_pop_internal (xops[1]));
2223 /* output_asm_insn ("ret %1", xops);*/
2226 /* output_asm_insn ("ret", xops);*/
2227 emit_jump_insn (gen_return_internal ());
2231 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2232 that is a valid memory address for an instruction.
2233 The MODE argument is the machine mode for the MEM expression
2234 that wants to use this address.
2236 On x86, legitimate addresses are:
2237 base movl (base),reg
2238 displacement movl disp,reg
2239 base + displacement movl disp(base),reg
2240 index + base movl (base,index),reg
2241 (index + base) + displacement movl disp(base,index),reg
2242 index*scale movl (,index,scale),reg
2243 index*scale + disp movl disp(,index,scale),reg
2244 index*scale + base movl (base,index,scale),reg
2245 (index*scale + base) + disp movl disp(base,index,scale),reg
2247 In each case, scale can be 1, 2, 4, 8. */
2249 /* This is exactly the same as print_operand_addr, except that
2250 it recognizes addresses instead of printing them.
2252 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2253 convert common non-canonical forms to canonical form so that they will
2256 #define ADDR_INVALID(msg,insn) \
2258 if (TARGET_DEBUG_ADDR) \
2260 fprintf (stderr, msg); \
2266 legitimate_address_p (mode, addr, strict)
2267 enum machine_mode mode;
2271 rtx base = NULL_RTX;
2272 rtx indx = NULL_RTX;
2273 rtx scale = NULL_RTX;
2274 rtx disp = NULL_RTX;
2276 if (TARGET_DEBUG_ADDR)
2279 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2280 GET_MODE_NAME (mode), strict);
2285 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2286 base = addr; /* base reg */
2288 else if (GET_CODE (addr) == PLUS)
2290 rtx op0 = XEXP (addr, 0);
2291 rtx op1 = XEXP (addr, 1);
2292 enum rtx_code code0 = GET_CODE (op0);
2293 enum rtx_code code1 = GET_CODE (op1);
2295 if (code0 == REG || code0 == SUBREG)
2297 if (code1 == REG || code1 == SUBREG)
2299 indx = op0; /* index + base */
2305 base = op0; /* base + displacement */
2310 else if (code0 == MULT)
2312 indx = XEXP (op0, 0);
2313 scale = XEXP (op0, 1);
2315 if (code1 == REG || code1 == SUBREG)
2316 base = op1; /* index*scale + base */
2319 disp = op1; /* index*scale + disp */
2322 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2324 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2325 scale = XEXP (XEXP (op0, 0), 1);
2326 base = XEXP (op0, 1);
2330 else if (code0 == PLUS)
2332 indx = XEXP (op0, 0); /* index + base + disp */
2333 base = XEXP (op0, 1);
2339 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2344 else if (GET_CODE (addr) == MULT)
2346 indx = XEXP (addr, 0); /* index*scale */
2347 scale = XEXP (addr, 1);
2351 disp = addr; /* displacement */
2353 /* Allow arg pointer and stack pointer as index if there is not scaling */
2354 if (base && indx && !scale
2355 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2362 /* Validate base register */
2363 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2364 is one word out of a two word structure, which is represented internally
2368 if (GET_CODE (base) != REG)
2370 ADDR_INVALID ("Base is not a register.\n", base);
2374 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2375 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2377 ADDR_INVALID ("Base is not valid.\n", base);
2382 /* Validate index register */
2383 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2384 is one word out of a two word structure, which is represented internally
2388 if (GET_CODE (indx) != REG)
2390 ADDR_INVALID ("Index is not a register.\n", indx);
2394 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2395 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2397 ADDR_INVALID ("Index is not valid.\n", indx);
2402 abort (); /* scale w/o index invalid */
2404 /* Validate scale factor */
2407 HOST_WIDE_INT value;
2409 if (GET_CODE (scale) != CONST_INT)
2411 ADDR_INVALID ("Scale is not valid.\n", scale);
2415 value = INTVAL (scale);
2416 if (value != 1 && value != 2 && value != 4 && value != 8)
2418 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2423 /* Validate displacement
2424 Constant pool addresses must be handled special. They are
2425 considered legitimate addresses, but only if not used with regs.
2426 When printed, the output routines know to print the reference with the
2427 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2430 if (GET_CODE (disp) == SYMBOL_REF
2431 && CONSTANT_POOL_ADDRESS_P (disp)
2436 else if (!CONSTANT_ADDRESS_P (disp))
2438 ADDR_INVALID ("Displacement is not valid.\n", disp);
2442 else if (GET_CODE (disp) == CONST_DOUBLE)
2444 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2448 else if (flag_pic && SYMBOLIC_CONST (disp)
2449 && base != pic_offset_table_rtx
2450 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2452 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2456 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2457 && (base != NULL_RTX || indx != NULL_RTX))
2459 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2464 if (TARGET_DEBUG_ADDR)
2465 fprintf (stderr, "Address is valid.\n");
2467 /* Everything looks valid, return true */
2472 /* Return a legitimate reference for ORIG (an address) using the
2473 register REG. If REG is 0, a new pseudo is generated.
2475 There are three types of references that must be handled:
2477 1. Global data references must load the address from the GOT, via
2478 the PIC reg. An insn is emitted to do this load, and the reg is
2481 2. Static data references must compute the address as an offset
2482 from the GOT, whose base is in the PIC reg. An insn is emitted to
2483 compute the address into a reg, and the reg is returned. Static
2484 data objects have SYMBOL_REF_FLAG set to differentiate them from
2485 global data objects.
2487 3. Constant pool addresses must be handled special. They are
2488 considered legitimate addresses, but only if not used with regs.
2489 When printed, the output routines know to print the reference with the
2490 PIC reg, even though the PIC reg doesn't appear in the RTL.
2492 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2493 reg also appears in the address (except for constant pool references,
2496 "switch" statements also require special handling when generating
2497 PIC code. See comments by the `casesi' insn in i386.md for details. */
2500 legitimize_pic_address (orig, reg)
2507 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2509 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2514 reg = gen_reg_rtx (Pmode);
2516 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2517 || GET_CODE (addr) == LABEL_REF)
2518 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2520 new = gen_rtx (MEM, Pmode,
2521 gen_rtx (PLUS, Pmode,
2522 pic_offset_table_rtx, orig));
2524 emit_move_insn (reg, new);
2526 current_function_uses_pic_offset_table = 1;
2529 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2533 if (GET_CODE (addr) == CONST)
2535 addr = XEXP (addr, 0);
2536 if (GET_CODE (addr) != PLUS)
2540 if (XEXP (addr, 0) == pic_offset_table_rtx)
2544 reg = gen_reg_rtx (Pmode);
2546 base = legitimize_pic_address (XEXP (addr, 0), reg);
2547 addr = legitimize_pic_address (XEXP (addr, 1),
2548 base == reg ? NULL_RTX : reg);
2550 if (GET_CODE (addr) == CONST_INT)
2551 return plus_constant (base, INTVAL (addr));
2553 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2555 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2556 addr = XEXP (addr, 1);
2558 return gen_rtx (PLUS, Pmode, base, addr);
2564 /* Emit insns to move operands[1] into operands[0]. */
2567 emit_pic_move (operands, mode)
2569 enum machine_mode mode;
2571 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2573 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2574 operands[1] = (rtx) force_reg (SImode, operands[1]);
2576 operands[1] = legitimize_pic_address (operands[1], temp);
2580 /* Try machine-dependent ways of modifying an illegitimate address
2581 to be legitimate. If we find one, return the new, valid address.
2582 This macro is used in only one place: `memory_address' in explow.c.
2584 OLDX is the address as it was before break_out_memory_refs was called.
2585 In some cases it is useful to look at this to decide what needs to be done.
2587 MODE and WIN are passed so that this macro can use
2588 GO_IF_LEGITIMATE_ADDRESS.
2590 It is always safe for this macro to do nothing. It exists to recognize
2591 opportunities to optimize the output.
2593 For the 80386, we handle X+REG by loading X into a register R and
2594 using R+REG. R will go in a general reg and indexing will be used.
2595 However, if REG is a broken-out memory address or multiplication,
2596 nothing needs to be done because REG can certainly go in a general reg.
2598 When -fpic is used, special handling is needed for symbolic references.
2599 See comments by legitimize_pic_address in i386.c for details. */
2602 legitimize_address (x, oldx, mode)
2605 enum machine_mode mode;
2610 if (TARGET_DEBUG_ADDR)
2612 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2616 if (flag_pic && SYMBOLIC_CONST (x))
2617 return legitimize_pic_address (x, 0);
2619 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2620 if (GET_CODE (x) == ASHIFT
2621 && GET_CODE (XEXP (x, 1)) == CONST_INT
2622 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2625 x = gen_rtx (MULT, Pmode,
2626 force_reg (Pmode, XEXP (x, 0)),
2627 GEN_INT (1 << log));
2630 if (GET_CODE (x) == PLUS)
2632 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2633 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2634 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2635 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2638 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2639 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2640 GEN_INT (1 << log));
2643 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2644 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2645 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2648 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2649 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2650 GEN_INT (1 << log));
2653 /* Put multiply first if it isn't already */
2654 if (GET_CODE (XEXP (x, 1)) == MULT)
2656 rtx tmp = XEXP (x, 0);
2657 XEXP (x, 0) = XEXP (x, 1);
2662 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2663 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2664 created by virtual register instantiation, register elimination, and
2665 similar optimizations. */
2666 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2669 x = gen_rtx (PLUS, Pmode,
2670 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2671 XEXP (XEXP (x, 1), 1));
2674 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2675 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2676 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2677 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2678 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2679 && CONSTANT_P (XEXP (x, 1)))
2681 rtx constant, other;
2683 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2685 constant = XEXP (x, 1);
2686 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2688 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2690 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2691 other = XEXP (x, 1);
2699 x = gen_rtx (PLUS, Pmode,
2700 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2701 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2702 plus_constant (other, INTVAL (constant)));
2706 if (changed && legitimate_address_p (mode, x, FALSE))
2709 if (GET_CODE (XEXP (x, 0)) == MULT)
2712 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2715 if (GET_CODE (XEXP (x, 1)) == MULT)
2718 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2722 && GET_CODE (XEXP (x, 1)) == REG
2723 && GET_CODE (XEXP (x, 0)) == REG)
2726 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2729 x = legitimize_pic_address (x, 0);
2732 if (changed && legitimate_address_p (mode, x, FALSE))
2735 if (GET_CODE (XEXP (x, 0)) == REG)
2737 register rtx temp = gen_reg_rtx (Pmode);
2738 register rtx val = force_operand (XEXP (x, 1), temp);
2740 emit_move_insn (temp, val);
2746 else if (GET_CODE (XEXP (x, 1)) == REG)
2748 register rtx temp = gen_reg_rtx (Pmode);
2749 register rtx val = force_operand (XEXP (x, 0), temp);
2751 emit_move_insn (temp, val);
2762 /* Print an integer constant expression in assembler syntax. Addition
2763 and subtraction are the only arithmetic that may appear in these
2764 expressions. FILE is the stdio stream to write to, X is the rtx, and
2765 CODE is the operand print code from the output string. */
2768 output_pic_addr_const (file, x, code)
2775 switch (GET_CODE (x))
2786 if (GET_CODE (x) == SYMBOL_REF)
2787 assemble_name (file, XSTR (x, 0));
2790 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2791 CODE_LABEL_NUMBER (XEXP (x, 0)));
2792 assemble_name (asm_out_file, buf);
2795 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2796 fprintf (file, "@GOTOFF(%%ebx)");
2797 else if (code == 'P')
2798 fprintf (file, "@PLT");
2799 else if (GET_CODE (x) == LABEL_REF)
2800 fprintf (file, "@GOTOFF");
2801 else if (! SYMBOL_REF_FLAG (x))
2802 fprintf (file, "@GOT");
2804 fprintf (file, "@GOTOFF");
2809 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2810 assemble_name (asm_out_file, buf);
2814 fprintf (file, "%d", INTVAL (x));
2818 /* This used to output parentheses around the expression,
2819 but that does not work on the 386 (either ATT or BSD assembler). */
2820 output_pic_addr_const (file, XEXP (x, 0), code);
2824 if (GET_MODE (x) == VOIDmode)
2826 /* We can use %d if the number is <32 bits and positive. */
2827 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2828 fprintf (file, "0x%x%08x",
2829 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2831 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2834 /* We can't handle floating point constants;
2835 PRINT_OPERAND must handle them. */
2836 output_operand_lossage ("floating constant misused");
2840 /* Some assemblers need integer constants to appear last (eg masm). */
2841 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2843 output_pic_addr_const (file, XEXP (x, 1), code);
2844 if (INTVAL (XEXP (x, 0)) >= 0)
2845 fprintf (file, "+");
2846 output_pic_addr_const (file, XEXP (x, 0), code);
2850 output_pic_addr_const (file, XEXP (x, 0), code);
2851 if (INTVAL (XEXP (x, 1)) >= 0)
2852 fprintf (file, "+");
2853 output_pic_addr_const (file, XEXP (x, 1), code);
2858 output_pic_addr_const (file, XEXP (x, 0), code);
2859 fprintf (file, "-");
2860 output_pic_addr_const (file, XEXP (x, 1), code);
2864 output_operand_lossage ("invalid expression as operand");
2868 /* Append the correct conditional move suffix which corresponds to CODE */
2871 put_condition_code (code, reverse_cc, mode, file)
2874 enum mode_class mode;
2878 ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
2879 && ! (cc_prev_status.flags & CC_FCOMI));
2880 if (reverse_cc && ! ieee)
2881 code = reverse_condition (code);
2883 if (mode == MODE_INT)
2887 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2893 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2899 fputs ("ge", file); return;
2901 fputs ("g", file); return;
2903 fputs ("le", file); return;
2905 fputs ("l", file); return;
2907 fputs ("ae", file); return;
2909 fputs ("a", file); return;
2911 fputs ("be", file); return;
2913 fputs ("b", file); return;
2914 default: output_operand_lossage ("Invalid %%C operand");
2916 else if (mode == MODE_FLOAT)
2920 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file); return;
2922 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file); return;
2924 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file); return;
2926 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file); return;
2928 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file); return;
2930 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file); return;
2932 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file); return;
2934 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file); return;
2936 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file); return;
2938 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file); return;
2939 default: output_operand_lossage ("Invalid %%C operand");
2944 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2945 C -- print opcode suffix for set/cmov insn.
2946 c -- like C, but print reversed condition
2947 F -- print opcode suffix for fcmov insn.
2948 f -- like C, but print reversed condition
2949 R -- print the prefix for register names.
2950 z -- print the opcode suffix for the size of the current operand.
2951 * -- print a star (in certain assembler syntax)
2952 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2953 c -- don't print special prefixes before constant operands.
2954 J -- print the appropriate jump operand.
2955 s -- print a shift double count, followed by the assemblers argument
2957 b -- print the QImode name of the register for the indicated operand.
2958 %b0 would print %al if operands[0] is reg 0.
2959 w -- likewise, print the HImode name of the register.
2960 k -- likewise, print the SImode name of the register.
2961 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2962 y -- print "st(0)" instead of "st" as a register.
2963 P -- print as a PIC constant
2967 print_operand (file, x, code)
2982 PUT_OP_SIZE (code, 'l', file);
2986 PUT_OP_SIZE (code, 'w', file);
2990 PUT_OP_SIZE (code, 'b', file);
2994 PUT_OP_SIZE (code, 'l', file);
2998 PUT_OP_SIZE (code, 's', file);
3002 PUT_OP_SIZE (code, 't', file);
3006 /* 387 opcodes don't get size suffixes if the operands are
3009 if (STACK_REG_P (x))
3012 /* this is the size of op from size of operand */
3013 switch (GET_MODE_SIZE (GET_MODE (x)))
3016 PUT_OP_SIZE ('B', 'b', file);
3020 PUT_OP_SIZE ('W', 'w', file);
3024 if (GET_MODE (x) == SFmode)
3026 PUT_OP_SIZE ('S', 's', file);
3030 PUT_OP_SIZE ('L', 'l', file);
3034 PUT_OP_SIZE ('T', 't', file);
3038 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3040 #ifdef GAS_MNEMONICS
3041 PUT_OP_SIZE ('Q', 'q', file);
3044 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3048 PUT_OP_SIZE ('Q', 'l', file);
3061 switch (GET_CODE (x))
3063 /* These conditions are appropriate for testing the result
3064 of an arithmetic operation, not for a compare operation.
3065 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3066 CC_Z_IN_NOT_C false and not floating point. */
3067 case NE: fputs ("jne", file); return;
3068 case EQ: fputs ("je", file); return;
3069 case GE: fputs ("jns", file); return;
3070 case LT: fputs ("js", file); return;
3071 case GEU: fputs ("jmp", file); return;
3072 case GTU: fputs ("jne", file); return;
3073 case LEU: fputs ("je", file); return;
3074 case LTU: fputs ("#branch never", file); return;
3076 /* no matching branches for GT nor LE */
3081 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3083 PRINT_OPERAND (file, x, 0);
3084 fputs (AS2C (,) + 1, file);
3088 /* This is used by the conditional move instructions. */
3090 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3093 /* like above, but reverse condition */
3095 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3098 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3101 /* like above, but reverse condition */
3103 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3110 sprintf (str, "invalid operand code `%c'", code);
3111 output_operand_lossage (str);
3115 if (GET_CODE (x) == REG)
3117 PRINT_REG (x, code, file);
3119 else if (GET_CODE (x) == MEM)
3121 PRINT_PTR (x, file);
3122 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3125 output_pic_addr_const (file, XEXP (x, 0), code);
3127 output_addr_const (file, XEXP (x, 0));
3130 output_address (XEXP (x, 0));
3132 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3134 REAL_VALUE_TYPE r; long l;
3135 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3136 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3137 PRINT_IMMED_PREFIX (file);
3138 fprintf (file, "0x%x", l);
3140 /* These float cases don't actually occur as immediate operands. */
3141 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3143 REAL_VALUE_TYPE r; char dstr[30];
3144 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3145 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3146 fprintf (file, "%s", dstr);
3148 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3150 REAL_VALUE_TYPE r; char dstr[30];
3151 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3152 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3153 fprintf (file, "%s", dstr);
3159 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3160 PRINT_IMMED_PREFIX (file);
3161 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3162 || GET_CODE (x) == LABEL_REF)
3163 PRINT_OFFSET_PREFIX (file);
3166 output_pic_addr_const (file, x, code);
3168 output_addr_const (file, x);
3172 /* Print a memory operand whose address is ADDR. */
3175 print_operand_address (file, addr)
3179 register rtx reg1, reg2, breg, ireg;
3182 switch (GET_CODE (addr))
3186 fprintf (file, "%se", RP);
3187 fputs (hi_reg_name[REGNO (addr)], file);
3197 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3199 offset = XEXP (addr, 0);
3200 addr = XEXP (addr, 1);
3202 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3204 offset = XEXP (addr, 1);
3205 addr = XEXP (addr, 0);
3207 if (GET_CODE (addr) != PLUS) ;
3208 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3210 reg1 = XEXP (addr, 0);
3211 addr = XEXP (addr, 1);
3213 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3215 reg1 = XEXP (addr, 1);
3216 addr = XEXP (addr, 0);
3218 else if (GET_CODE (XEXP (addr, 0)) == REG)
3220 reg1 = XEXP (addr, 0);
3221 addr = XEXP (addr, 1);
3223 else if (GET_CODE (XEXP (addr, 1)) == REG)
3225 reg1 = XEXP (addr, 1);
3226 addr = XEXP (addr, 0);
3228 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3230 if (reg1 == 0) reg1 = addr;
3236 if (addr != 0) abort ();
3239 if ((reg1 && GET_CODE (reg1) == MULT)
3240 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3245 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3251 if (ireg != 0 || breg != 0)
3258 output_pic_addr_const (file, addr, 0);
3260 else if (GET_CODE (addr) == LABEL_REF)
3261 output_asm_label (addr);
3264 output_addr_const (file, addr);
3267 if (ireg != 0 && GET_CODE (ireg) == MULT)
3269 scale = INTVAL (XEXP (ireg, 1));
3270 ireg = XEXP (ireg, 0);
3273 /* The stack pointer can only appear as a base register,
3274 never an index register, so exchange the regs if it is wrong. */
3276 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3285 /* output breg+ireg*scale */
3286 PRINT_B_I_S (breg, ireg, scale, file);
3293 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3295 scale = INTVAL (XEXP (addr, 0));
3296 ireg = XEXP (addr, 1);
3300 scale = INTVAL (XEXP (addr, 1));
3301 ireg = XEXP (addr, 0);
3303 output_addr_const (file, const0_rtx);
3304 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3309 if (GET_CODE (addr) == CONST_INT
3310 && INTVAL (addr) < 0x8000
3311 && INTVAL (addr) >= -0x8000)
3312 fprintf (file, "%d", INTVAL (addr));
3316 output_pic_addr_const (file, addr, 0);
3318 output_addr_const (file, addr);
3323 /* Set the cc_status for the results of an insn whose pattern is EXP.
3324 On the 80386, we assume that only test and compare insns, as well
3325 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3326 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3327 Also, we assume that jumps, moves and sCOND don't affect the condition
3328 codes. All else clobbers the condition codes, by assumption.
3330 We assume that ALL integer add, minus, etc. instructions effect the
3331 condition codes. This MUST be consistent with i386.md.
3333 We don't record any float test or compare - the redundant test &
3334 compare check in final.c does not handle stack-like regs correctly. */
3337 notice_update_cc (exp)
3340 if (GET_CODE (exp) == SET)
3342 /* Jumps do not alter the cc's. */
3343 if (SET_DEST (exp) == pc_rtx)
3345 #ifdef IS_STACK_MODE
3346 /* Moving into a memory of stack_mode may have been moved
3347 in between the use and set of cc0 by loop_spl(). So
3348 old value of cc.status must be retained */
3349 if(GET_CODE(SET_DEST(exp))==MEM
3350 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3355 /* Moving register or memory into a register:
3356 it doesn't alter the cc's, but it might invalidate
3357 the RTX's which we remember the cc's came from.
3358 (Note that moving a constant 0 or 1 MAY set the cc's). */
3359 if (REG_P (SET_DEST (exp))
3360 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3361 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3363 if (cc_status.value1
3364 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3365 cc_status.value1 = 0;
3366 if (cc_status.value2
3367 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3368 cc_status.value2 = 0;
3371 /* Moving register into memory doesn't alter the cc's.
3372 It may invalidate the RTX's which we remember the cc's came from. */
3373 if (GET_CODE (SET_DEST (exp)) == MEM
3374 && (REG_P (SET_SRC (exp))
3375 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3377 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3378 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3379 cc_status.value1 = 0;
3380 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3381 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3382 cc_status.value2 = 0;
3385 /* Function calls clobber the cc's. */
3386 else if (GET_CODE (SET_SRC (exp)) == CALL)
3391 /* Tests and compares set the cc's in predictable ways. */
3392 else if (SET_DEST (exp) == cc0_rtx)
3395 cc_status.value1 = SET_SRC (exp);
3398 /* Certain instructions effect the condition codes. */
3399 else if (GET_MODE (SET_SRC (exp)) == SImode
3400 || GET_MODE (SET_SRC (exp)) == HImode
3401 || GET_MODE (SET_SRC (exp)) == QImode)
3402 switch (GET_CODE (SET_SRC (exp)))
3404 case ASHIFTRT: case LSHIFTRT:
3406 /* Shifts on the 386 don't set the condition codes if the
3407 shift count is zero. */
3408 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3413 /* We assume that the CONST_INT is non-zero (this rtx would
3414 have been deleted if it were zero. */
3416 case PLUS: case MINUS: case NEG:
3417 case AND: case IOR: case XOR:
3418 cc_status.flags = CC_NO_OVERFLOW;
3419 cc_status.value1 = SET_SRC (exp);
3420 cc_status.value2 = SET_DEST (exp);
3431 else if (GET_CODE (exp) == PARALLEL
3432 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3434 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3436 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3439 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3441 cc_status.flags |= CC_IN_80387;
3442 if (TARGET_CMOVE && stack_regs_mentioned_p
3443 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3444 cc_status.flags |= CC_FCOMI;
3447 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3458 /* Split one or more DImode RTL references into pairs of SImode
3459 references. The RTL can be REG, offsettable MEM, integer constant, or
3460 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3461 split and "num" is its length. lo_half and hi_half are output arrays
3462 that parallel "operands". */
3465 split_di (operands, num, lo_half, hi_half)
3468 rtx lo_half[], hi_half[];
3472 if (GET_CODE (operands[num]) == REG)
3474 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3475 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3477 else if (CONSTANT_P (operands[num]))
3479 split_double (operands[num], &lo_half[num], &hi_half[num]);
3481 else if (offsettable_memref_p (operands[num]))
3483 lo_half[num] = operands[num];
3484 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3491 /* Return 1 if this is a valid binary operation on a 387.
3492 OP is the expression matched, and MODE is its mode. */
3495 binary_387_op (op, mode)
3497 enum machine_mode mode;
3499 if (mode != VOIDmode && mode != GET_MODE (op))
3502 switch (GET_CODE (op))
3508 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3516 /* Return 1 if this is a valid shift or rotate operation on a 386.
3517 OP is the expression matched, and MODE is its mode. */
3522 enum machine_mode mode;
3524 rtx operand = XEXP (op, 0);
3526 if (mode != VOIDmode && mode != GET_MODE (op))
3529 if (GET_MODE (operand) != GET_MODE (op)
3530 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3533 return (GET_CODE (op) == ASHIFT
3534 || GET_CODE (op) == ASHIFTRT
3535 || GET_CODE (op) == LSHIFTRT
3536 || GET_CODE (op) == ROTATE
3537 || GET_CODE (op) == ROTATERT);
3540 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3541 MODE is not used. */
3544 VOIDmode_compare_op (op, mode)
3546 enum machine_mode mode;
3548 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3551 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3552 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3553 is the expression of the binary operation. The output may either be
3554 emitted here, or returned to the caller, like all output_* functions.
3556 There is no guarantee that the operands are the same mode, as they
3557 might be within FLOAT or FLOAT_EXTEND expressions. */
3560 output_387_binary_op (insn, operands)
3566 static char buf[100];
3568 switch (GET_CODE (operands[3]))
3571 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3572 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3579 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3580 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3587 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3588 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3595 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3596 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3606 strcpy (buf, base_op);
3608 switch (GET_CODE (operands[3]))
3612 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3615 operands[2] = operands[1];
3619 if (GET_CODE (operands[2]) == MEM)
3620 return strcat (buf, AS1 (%z2,%2));
3622 if (NON_STACK_REG_P (operands[1]))
3624 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3627 else if (NON_STACK_REG_P (operands[2]))
3629 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3633 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3634 return strcat (buf, AS2 (p,%2,%0));
3636 if (STACK_TOP_P (operands[0]))
3637 return strcat (buf, AS2C (%y2,%0));
3639 return strcat (buf, AS2C (%2,%0));
3643 if (GET_CODE (operands[1]) == MEM)
3644 return strcat (buf, AS1 (r%z1,%1));
3646 if (GET_CODE (operands[2]) == MEM)
3647 return strcat (buf, AS1 (%z2,%2));
3649 if (NON_STACK_REG_P (operands[1]))
3651 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3654 else if (NON_STACK_REG_P (operands[2]))
3656 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3660 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3663 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3664 return strcat (buf, AS2 (rp,%2,%0));
3666 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3667 return strcat (buf, AS2 (p,%1,%0));
3669 if (STACK_TOP_P (operands[0]))
3671 if (STACK_TOP_P (operands[1]))
3672 return strcat (buf, AS2C (%y2,%0));
3674 return strcat (buf, AS2 (r,%y1,%0));
3676 else if (STACK_TOP_P (operands[1]))
3677 return strcat (buf, AS2C (%1,%0));
3679 return strcat (buf, AS2 (r,%2,%0));
3686 /* Output code for INSN to convert a float to a signed int. OPERANDS
3687 are the insn operands. The output may be SFmode or DFmode and the
3688 input operand may be SImode or DImode. As a special case, make sure
3689 that the 387 stack top dies if the output mode is DImode, because the
3690 hardware requires this. */
3693 output_fix_trunc (insn, operands)
3697 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3700 if (! STACK_TOP_P (operands[1]) ||
3701 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3704 xops[0] = GEN_INT (12);
3705 xops[1] = operands[4];
3707 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3708 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3709 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3710 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3711 output_asm_insn (AS1 (fldc%W3,%3), operands);
3713 if (NON_STACK_REG_P (operands[0]))
3714 output_to_reg (operands[0], stack_top_dies, operands[3]);
3715 else if (GET_CODE (operands[0]) == MEM)
3718 output_asm_insn (AS1 (fistp%z0,%0), operands);
3720 output_asm_insn (AS1 (fist%z0,%0), operands);
3725 return AS1 (fldc%W2,%2);
3728 /* Output code for INSN to compare OPERANDS. The two operands might
3729 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3730 expression. If the compare is in mode CCFPEQmode, use an opcode that
3731 will not fault if a qNaN is present. */
3734 output_float_compare (insn, operands)
3739 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3740 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3743 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3745 cc_status.flags |= CC_FCOMI;
3746 cc_prev_status.flags &= ~CC_TEST_AX;
3749 if (! STACK_TOP_P (operands[0]))
3752 operands[0] = operands[1];
3754 cc_status.flags |= CC_REVERSED;
3757 if (! STACK_TOP_P (operands[0]))
3760 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3762 if (STACK_REG_P (operands[1])
3764 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3765 && REGNO (operands[1]) != FIRST_STACK_REG)
3767 /* If both the top of the 387 stack dies, and the other operand
3768 is also a stack register that dies, then this must be a
3769 `fcompp' float compare */
3771 if (unordered_compare)
3772 if (cc_status.flags & CC_FCOMI)
3774 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
3775 output_asm_insn (AS1 (fstp, %y0), operands);
3779 output_asm_insn ("fucompp", operands);
3782 if (cc_status.flags & CC_FCOMI)
3784 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
3785 output_asm_insn (AS1 (fstp, %y0), operands);
3789 output_asm_insn ("fcompp", operands);
3794 static char buf[100];
3796 /* Decide if this is the integer or float compare opcode, or the
3797 unordered float compare. */
3799 if (unordered_compare)
3800 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3801 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3802 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3804 strcpy (buf, "ficom");
3806 /* Modify the opcode if the 387 stack is to be popped. */
3811 if (NON_STACK_REG_P (operands[1]))
3812 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3813 else if (cc_status.flags & CC_FCOMI)
3815 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
3819 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3822 /* Now retrieve the condition code. */
3824 return output_fp_cc0_set (insn);
3827 /* Output opcodes to transfer the results of FP compare or test INSN
3828 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3829 result of the compare or test is unordered, no comparison operator
3830 succeeds except NE. Return an output template, if any. */
3833 output_fp_cc0_set (insn)
3837 rtx unordered_label;
3841 xops[0] = gen_rtx (REG, HImode, 0);
3842 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3844 if (! TARGET_IEEE_FP)
3846 if (!(cc_status.flags & CC_REVERSED))
3848 next = next_cc0_user (insn);
3850 if (GET_CODE (next) == JUMP_INSN
3851 && GET_CODE (PATTERN (next)) == SET
3852 && SET_DEST (PATTERN (next)) == pc_rtx
3853 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3855 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3857 else if (GET_CODE (PATTERN (next)) == SET)
3859 code = GET_CODE (SET_SRC (PATTERN (next)));
3865 if (code == GT || code == LT || code == EQ || code == NE
3866 || code == LE || code == GE)
3867 { /* We will test eax directly */
3868 cc_status.flags |= CC_TEST_AX;
3875 next = next_cc0_user (insn);
3876 if (next == NULL_RTX)
3879 if (GET_CODE (next) == JUMP_INSN
3880 && GET_CODE (PATTERN (next)) == SET
3881 && SET_DEST (PATTERN (next)) == pc_rtx
3882 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3884 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3886 else if (GET_CODE (PATTERN (next)) == SET)
3888 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3889 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3890 else code = GET_CODE (SET_SRC (PATTERN (next)));
3892 else if (GET_CODE (PATTERN (next)) == PARALLEL
3893 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3895 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3896 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3897 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3902 xops[0] = gen_rtx (REG, QImode, 0);
3907 xops[1] = GEN_INT (0x45);
3908 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3913 xops[1] = GEN_INT (0x45);
3914 xops[2] = GEN_INT (0x01);
3915 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3916 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3921 xops[1] = GEN_INT (0x05);
3922 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3927 xops[1] = GEN_INT (0x45);
3928 xops[2] = GEN_INT (0x40);
3929 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3930 output_asm_insn (AS1 (dec%B0,%h0), xops);
3931 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3936 xops[1] = GEN_INT (0x45);
3937 xops[2] = GEN_INT (0x40);
3938 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3939 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3944 xops[1] = GEN_INT (0x44);
3945 xops[2] = GEN_INT (0x40);
3946 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3947 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3961 #define MAX_386_STACK_LOCALS 2
3963 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3965 /* Define the structure for the machine field in struct function. */
3966 struct machine_function
3968 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3971 /* Functions to save and restore i386_stack_locals.
3972 These will be called, via pointer variables,
3973 from push_function_context and pop_function_context. */
3976 save_386_machine_status (p)
3979 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3980 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3981 sizeof i386_stack_locals);
3985 restore_386_machine_status (p)
3988 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3989 sizeof i386_stack_locals);
3993 /* Clear stack slot assignments remembered from previous functions.
3994 This is called from INIT_EXPANDERS once before RTL is emitted for each
3998 clear_386_stack_locals ()
4000 enum machine_mode mode;
4003 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4004 mode = (enum machine_mode) ((int) mode + 1))
4005 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4006 i386_stack_locals[(int) mode][n] = NULL_RTX;
4008 /* Arrange to save and restore i386_stack_locals around nested functions. */
4009 save_machine_status = save_386_machine_status;
4010 restore_machine_status = restore_386_machine_status;
4013 /* Return a MEM corresponding to a stack slot with mode MODE.
4014 Allocate a new slot if necessary.
4016 The RTL for a function can have several slots available: N is
4017 which slot to use. */
4020 assign_386_stack_local (mode, n)
4021 enum machine_mode mode;
4024 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4027 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4028 i386_stack_locals[(int) mode][n]
4029 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4031 return i386_stack_locals[(int) mode][n];
4037 enum machine_mode mode;
4039 return (GET_CODE (op) == MULT);
4044 enum machine_mode mode;
4046 return (GET_CODE (op) == DIV);
4051 /* Create a new copy of an rtx.
4052 Recursively copies the operands of the rtx,
4053 except for those few rtx codes that are sharable.
4054 Doesn't share CONST */
4062 register RTX_CODE code;
4063 register char *format_ptr;
4065 code = GET_CODE (orig);
4078 /* SCRATCH must be shared because they represent distinct values. */
4083 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4084 a LABEL_REF, it isn't sharable. */
4085 if (GET_CODE (XEXP (orig, 0)) == PLUS
4086 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4087 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4091 /* A MEM with a constant address is not sharable. The problem is that
4092 the constant address may need to be reloaded. If the mem is shared,
4093 then reloading one copy of this mem will cause all copies to appear
4094 to have been reloaded. */
4097 copy = rtx_alloc (code);
4098 PUT_MODE (copy, GET_MODE (orig));
4099 copy->in_struct = orig->in_struct;
4100 copy->volatil = orig->volatil;
4101 copy->unchanging = orig->unchanging;
4102 copy->integrated = orig->integrated;
4104 copy->is_spill_rtx = orig->is_spill_rtx;
4106 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4108 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4110 switch (*format_ptr++)
4113 XEXP (copy, i) = XEXP (orig, i);
4114 if (XEXP (orig, i) != NULL)
4115 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4120 XEXP (copy, i) = XEXP (orig, i);
4125 XVEC (copy, i) = XVEC (orig, i);
4126 if (XVEC (orig, i) != NULL)
4128 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4129 for (j = 0; j < XVECLEN (copy, i); j++)
4130 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4135 XWINT (copy, i) = XWINT (orig, i);
4139 XINT (copy, i) = XINT (orig, i);
4144 XSTR (copy, i) = XSTR (orig, i);
4155 /* try to rewrite a memory address to make it valid */
4157 rewrite_address (mem_rtx)
4160 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4162 int offset_adjust = 0;
4163 int was_only_offset = 0;
4164 rtx mem_addr = XEXP (mem_rtx, 0);
4165 char *storage = (char *) oballoc (0);
4167 int is_spill_rtx = 0;
4169 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4170 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4172 if (GET_CODE (mem_addr) == PLUS &&
4173 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4174 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4175 { /* this part is utilized by the combiner */
4177 gen_rtx (PLUS, GET_MODE (mem_addr),
4178 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4180 XEXP (XEXP (mem_addr, 1), 0)),
4181 XEXP (XEXP (mem_addr, 1), 1));
4182 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4184 XEXP (mem_rtx, 0) = ret_rtx;
4185 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4191 /* this part is utilized by loop.c */
4192 /* If the address contains PLUS (reg,const) and this pattern is invalid
4193 in this case - try to rewrite the address to make it valid intel1
4195 storage = (char *) oballoc (0);
4196 index_rtx = base_rtx = offset_rtx = NULL;
4197 /* find the base index and offset elements of the memory address */
4198 if (GET_CODE (mem_addr) == PLUS)
4200 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4202 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4204 base_rtx = XEXP (mem_addr, 1);
4205 index_rtx = XEXP (mem_addr, 0);
4209 base_rtx = XEXP (mem_addr, 0);
4210 offset_rtx = XEXP (mem_addr, 1);
4213 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4215 index_rtx = XEXP (mem_addr, 0);
4216 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4218 base_rtx = XEXP (mem_addr, 1);
4222 offset_rtx = XEXP (mem_addr, 1);
4225 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4228 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4229 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4230 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4231 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4232 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4233 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4234 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4236 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4237 offset_rtx = XEXP (mem_addr, 1);
4238 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4239 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4243 offset_rtx = XEXP (mem_addr, 1);
4244 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4245 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4248 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4250 was_only_offset = 1;
4253 offset_rtx = XEXP (mem_addr, 1);
4254 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4255 if (offset_adjust == 0)
4257 XEXP (mem_rtx, 0) = offset_rtx;
4258 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4268 else if (GET_CODE (mem_addr) == MULT)
4270 index_rtx = mem_addr;
4277 if (index_rtx && GET_CODE (index_rtx) == MULT)
4279 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4284 scale_rtx = XEXP (index_rtx, 1);
4285 scale = INTVAL (scale_rtx);
4286 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4288 /* now find which of the elements are invalid and try to fix them */
4289 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4291 offset_adjust = INTVAL (index_rtx) * scale;
4292 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4293 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4295 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4296 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4298 offset_rtx = copy_all_rtx (offset_rtx);
4299 XEXP (XEXP (offset_rtx, 0), 1) =
4300 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4301 if (!CONSTANT_P (offset_rtx))
4308 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4311 gen_rtx (CONST, GET_MODE (offset_rtx),
4312 gen_rtx (PLUS, GET_MODE (offset_rtx),
4314 gen_rtx (CONST_INT, 0, offset_adjust)));
4315 if (!CONSTANT_P (offset_rtx))
4321 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4323 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4325 else if (!offset_rtx)
4327 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4329 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4330 XEXP (mem_rtx, 0) = offset_rtx;
4333 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4334 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4335 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4337 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4338 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4340 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4342 offset_adjust += INTVAL (base_rtx);
4345 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4346 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4347 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4349 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4350 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4354 if (!LEGITIMATE_INDEX_P (index_rtx)
4355 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4363 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4369 if (offset_adjust != 0)
4373 if (GET_CODE (offset_rtx) == CONST &&
4374 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4376 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4377 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4379 offset_rtx = copy_all_rtx (offset_rtx);
4380 XEXP (XEXP (offset_rtx, 0), 1) =
4381 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4382 if (!CONSTANT_P (offset_rtx))
4389 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4392 gen_rtx (CONST, GET_MODE (offset_rtx),
4393 gen_rtx (PLUS, GET_MODE (offset_rtx),
4395 gen_rtx (CONST_INT, 0, offset_adjust)));
4396 if (!CONSTANT_P (offset_rtx))
4402 else if (GET_CODE (offset_rtx) == CONST_INT)
4404 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4414 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4422 if (GET_CODE (offset_rtx) == CONST_INT &&
4423 INTVAL (offset_rtx) == 0)
4425 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4426 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4432 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4433 gen_rtx (PLUS, GET_MODE (base_rtx),
4434 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4442 if (GET_CODE (offset_rtx) == CONST_INT &&
4443 INTVAL (offset_rtx) == 0)
4445 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4449 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4450 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4460 if (GET_CODE (offset_rtx) == CONST_INT &&
4461 INTVAL (offset_rtx) == 0)
4463 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4468 gen_rtx (PLUS, GET_MODE (offset_rtx),
4469 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4476 if (GET_CODE (offset_rtx) == CONST_INT &&
4477 INTVAL (offset_rtx) == 0)
4479 ret_rtx = index_rtx;
4483 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4492 if (GET_CODE (offset_rtx) == CONST_INT &&
4493 INTVAL (offset_rtx) == 0)
4499 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4502 else if (was_only_offset)
4504 ret_rtx = offset_rtx;
4512 XEXP (mem_rtx, 0) = ret_rtx;
4513 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4525 /* return 1 if the first insn to set cc before insn also sets the register
4526 reg_rtx - otherwise return 0 */
4528 last_to_set_cc (reg_rtx, insn)
4531 rtx prev_insn = PREV_INSN (insn);
4535 if (GET_CODE (prev_insn) == NOTE)
4538 else if (GET_CODE (prev_insn) == INSN)
4540 if (GET_CODE (PATTERN (prev_insn)) != SET)
4543 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4545 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4551 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4558 prev_insn = PREV_INSN (prev_insn);
4566 doesnt_set_condition_code (pat)
4569 switch (GET_CODE (pat))
4583 sets_condition_code (pat)
4586 switch (GET_CODE (pat))
4610 str_immediate_operand (op, mode)
4612 enum machine_mode mode;
4614 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4626 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4627 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4628 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4629 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4638 Return 1 if the mode of the SET_DEST of insn is floating point
4639 and it is not an fld or a move from memory to memory.
4640 Otherwise return 0 */
4645 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4646 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4647 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4648 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4649 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4650 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4651 && GET_CODE (SET_SRC (insn)) != MEM)
4660 Return 1 if the mode of the SET_DEST floating point and is memory
4661 and the source is a register.
4667 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4668 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4669 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4670 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4671 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4672 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4682 Return 1 if dep_insn sets a register which insn uses as a base
4683 or index to reference memory.
4684 otherwise return 0 */
4687 agi_dependent (insn, dep_insn)
4690 if (GET_CODE (dep_insn) == INSN
4691 && GET_CODE (PATTERN (dep_insn)) == SET
4692 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4694 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4697 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4698 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4699 && push_operand (SET_DEST (PATTERN (dep_insn)),
4700 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4702 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4710 Return 1 if reg is used in rtl as a base or index for a memory ref
4711 otherwise return 0. */
4714 reg_mentioned_in_mem (reg, rtl)
4719 register enum rtx_code code;
4724 code = GET_CODE (rtl);
4742 if (code == MEM && reg_mentioned_p (reg, rtl))
4745 fmt = GET_RTX_FORMAT (code);
4746 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4751 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4753 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4758 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4765 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4767 operands[0] = result, initialized with the startaddress
4768 operands[1] = alignment of the address.
4769 operands[2] = scratch register, initialized with the startaddress when
4770 not aligned, otherwise undefined
4772 This is just the body. It needs the initialisations mentioned above and
4773 some address computing at the end. These things are done in i386.md. */
4776 output_strlen_unroll (operands)
4781 xops[0] = operands[0]; /* Result */
4782 /* operands[1]; * Alignment */
4783 xops[1] = operands[2]; /* Scratch */
4784 xops[2] = GEN_INT (0);
4785 xops[3] = GEN_INT (2);
4786 xops[4] = GEN_INT (3);
4787 xops[5] = GEN_INT (4);
4788 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4789 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4790 xops[8] = gen_label_rtx (); /* label of main loop */
4791 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4792 xops[9] = gen_label_rtx (); /* pentium optimisation */
4793 xops[10] = gen_label_rtx (); /* end label 2 */
4794 xops[11] = gen_label_rtx (); /* end label 1 */
4795 xops[12] = gen_label_rtx (); /* end label */
4796 /* xops[13] * Temporary used */
4797 xops[14] = GEN_INT (0xff);
4798 xops[15] = GEN_INT (0xff00);
4799 xops[16] = GEN_INT (0xff0000);
4800 xops[17] = GEN_INT (0xff000000);
4802 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4804 /* is there a known alignment and is it less then 4 */
4805 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4807 /* is there a known alignment and is it not 2 */
4808 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4810 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4811 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4813 /* leave just the 3 lower bits */
4814 /* if this is a q-register, then the high part is used later */
4815 /* therefore user andl rather than andb */
4816 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4817 /* is aligned to 4-byte adress when zero */
4818 output_asm_insn (AS1 (je,%l8), xops);
4819 /* side-effect even Parity when %eax == 3 */
4820 output_asm_insn (AS1 (jp,%6), xops);
4822 /* is it aligned to 2 bytes ? */
4823 if (QI_REG_P (xops[1]))
4824 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4826 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4827 output_asm_insn (AS1 (je,%7), xops);
4831 /* since the alignment is 2, we have to check 2 or 0 bytes */
4833 /* check if is aligned to 4 - byte */
4834 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4835 /* is aligned to 4-byte adress when zero */
4836 output_asm_insn (AS1 (je,%l8), xops);
4839 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4840 /* now, compare the bytes */
4841 /* compare with the high part of a q-reg gives shorter code */
4842 if (QI_REG_P (xops[1]))
4844 /* compare the first n unaligned byte on a byte per byte basis */
4845 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4846 /* when zero we reached the end */
4847 output_asm_insn (AS1 (je,%l12), xops);
4848 /* increment the address */
4849 output_asm_insn (AS1 (inc%L0,%0), xops);
4851 /* not needed with an alignment of 2 */
4852 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4854 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4855 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4856 output_asm_insn (AS1 (je,%l12), xops);
4857 output_asm_insn (AS1 (inc%L0,%0), xops);
4859 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4861 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4865 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4866 output_asm_insn (AS1 (je,%l12), xops);
4867 output_asm_insn (AS1 (inc%L0,%0), xops);
4869 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4870 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4871 output_asm_insn (AS1 (je,%l12), xops);
4872 output_asm_insn (AS1 (inc%L0,%0), xops);
4874 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4875 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4877 output_asm_insn (AS1 (je,%l12), xops);
4878 output_asm_insn (AS1 (inc%L0,%0), xops);
4881 /* Generate loop to check 4 bytes at a time */
4882 /* IMHO it is not a good idea to align this loop. It gives only */
4883 /* huge programs, but does not help to speed up */
4884 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4885 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4887 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4888 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4890 if (QI_REG_P (xops[1]))
4892 /* On i586 it is faster to combine the hi- and lo- part as
4893 a kind of lookahead. If anding both yields zero, then one
4894 of both *could* be zero, otherwise none of both is zero;
4895 this saves one instruction, on i486 this is slower
4896 tested with P-90, i486DX2-66, AMD486DX2-66 */
4899 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4900 output_asm_insn (AS1 (jne,%l9), xops);
4903 /* check first byte */
4904 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4905 output_asm_insn (AS1 (je,%l12), xops);
4907 /* check second byte */
4908 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4909 output_asm_insn (AS1 (je,%l11), xops);
4912 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4916 /* check first byte */
4917 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4918 output_asm_insn (AS1 (je,%l12), xops);
4920 /* check second byte */
4921 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4922 output_asm_insn (AS1 (je,%l11), xops);
4925 /* check third byte */
4926 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4927 output_asm_insn (AS1 (je,%l10), xops);
4929 /* check fourth byte and increment address */
4930 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4931 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4932 output_asm_insn (AS1 (jne,%l8), xops);
4934 /* now generate fixups when the compare stops within a 4-byte word */
4935 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4937 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4938 output_asm_insn (AS1 (inc%L0,%0), xops);
4940 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4941 output_asm_insn (AS1 (inc%L0,%0), xops);
4943 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));