1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost = { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
73 struct processor_costs i486_cost = { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
83 struct processor_costs pentium_cost = {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
93 struct processor_costs pentiumpro_cost = {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
103 struct processor_costs k6_cost = {
104 1, /* cost of an add instruction */
105 1, /* cost of a lea instruction */
106 1, /* variable shift costs */
107 1, /* constant shift costs */
108 2, /* cost of starting a multiply */
109 0, /* cost of multiply per each bit set */
110 18 /* cost of a divide/mod */
113 struct processor_costs *ix86_cost = &pentium_cost;
115 /* Processor feature/optimization bitmasks. */
116 #define m_386 (1<<PROCESSOR_I386)
117 #define m_486 (1<<PROCESSOR_I486)
118 #define m_PENT (1<<PROCESSOR_PENTIUM)
119 #define m_PPRO (1<<PROCESSOR_PENTIUMPRO)
120 #define m_K6 (1<<PROCESSOR_K6)
122 const int x86_use_leave = m_386 | m_K6;
123 const int x86_push_memory = m_386 | m_K6;
124 const int x86_zero_extend_with_and = m_486 | m_PENT;
125 const int x86_movx = m_386 | m_PPRO | m_K6;
126 const int x86_double_with_add = ~m_386;
127 const int x86_use_bit_test = m_386;
128 const int x86_unroll_strlen = m_486 | m_PENT | m_PPRO;
129 const int x86_use_q_reg = m_PENT | m_PPRO | m_K6;
130 const int x86_use_any_reg = m_486;
131 const int x86_cmove = m_PPRO;
132 const int x86_deep_branch = m_PPRO| m_K6;
134 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
136 extern FILE *asm_out_file;
137 extern char *strcat ();
139 static void ix86_epilogue PROTO((int));
140 static void ix86_prologue PROTO((int));
142 char *singlemove_string ();
143 char *output_move_const_single ();
144 char *output_fp_cc0_set ();
146 char *hi_reg_name[] = HI_REGISTER_NAMES;
147 char *qi_reg_name[] = QI_REGISTER_NAMES;
148 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
150 /* Array of the smallest class containing reg number REGNO, indexed by
151 REGNO. Used by REGNO_REG_CLASS in i386.h. */
153 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
156 AREG, DREG, CREG, BREG,
158 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
160 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
161 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
166 /* Test and compare insns in i386.md store the information needed to
167 generate branch and scc insns here. */
169 struct rtx_def *i386_compare_op0 = NULL_RTX;
170 struct rtx_def *i386_compare_op1 = NULL_RTX;
171 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
173 /* which cpu are we scheduling for */
174 enum processor_type ix86_cpu;
176 /* which instruction set architecture to use. */
179 /* Strings to hold which cpu and instruction set architecture to use. */
180 char *ix86_cpu_string; /* for -mcpu=<xxx> */
181 char *ix86_arch_string; /* for -march=<xxx> */
183 /* Register allocation order */
184 char *i386_reg_alloc_order;
185 static char regs_allocated[FIRST_PSEUDO_REGISTER];
187 /* # of registers to use to pass arguments. */
188 char *i386_regparm_string;
190 /* i386_regparm_string as a number */
193 /* Alignment to use for loops and jumps: */
195 /* Power of two alignment for loops. */
196 char *i386_align_loops_string;
198 /* Power of two alignment for non-loop jumps. */
199 char *i386_align_jumps_string;
201 /* Values 1-5: see jump.c */
202 int i386_branch_cost;
203 char *i386_branch_cost_string;
205 /* Power of two alignment for functions. */
206 int i386_align_funcs;
207 char *i386_align_funcs_string;
209 /* Power of two alignment for loops. */
210 int i386_align_loops;
212 /* Power of two alignment for non-loop jumps. */
213 int i386_align_jumps;
215 /* Sometimes certain combinations of command options do not make
216 sense on a particular target machine. You can define a macro
217 `OVERRIDE_OPTIONS' to take account of this. This macro, if
218 defined, is executed once just after all the command options have
221 Don't use this macro to turn on various extra optimizations for
222 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
232 char *name; /* Canonical processor name. */
233 enum processor_type processor; /* Processor type enum value. */
234 struct processor_costs *cost; /* Processor costs */
235 int target_enable; /* Target flags to enable. */
236 int target_disable; /* Target flags to disable. */
237 } processor_target_table[]
238 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
239 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
240 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
241 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
242 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
244 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
245 &pentiumpro_cost, 0, 0},
246 {PROCESSOR_K6_STRING, PROCESSOR_K6, &k6_cost, 0, 0}};
248 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
250 #ifdef SUBTARGET_OVERRIDE_OPTIONS
251 SUBTARGET_OVERRIDE_OPTIONS;
254 /* Validate registers in register allocation order. */
255 if (i386_reg_alloc_order)
257 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
263 case 'a': regno = 0; break;
264 case 'd': regno = 1; break;
265 case 'c': regno = 2; break;
266 case 'b': regno = 3; break;
267 case 'S': regno = 4; break;
268 case 'D': regno = 5; break;
269 case 'B': regno = 6; break;
271 default: fatal ("Register '%c' is unknown", ch);
274 if (regs_allocated[regno])
275 fatal ("Register '%c' already specified in allocation order", ch);
277 regs_allocated[regno] = 1;
281 if (ix86_arch_string == 0)
283 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
284 if (ix86_cpu_string == 0)
285 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
288 for (i = 0; i < ptt_size; i++)
289 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
291 ix86_arch = processor_target_table[i].processor;
292 if (ix86_cpu_string == 0)
293 ix86_cpu_string = processor_target_table[i].name;
299 error ("bad value (%s) for -march= switch", ix86_arch_string);
300 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
301 ix86_arch = PROCESSOR_DEFAULT;
304 if (ix86_cpu_string == 0)
305 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
307 for (j = 0; j < ptt_size; j++)
308 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
310 ix86_cpu = processor_target_table[j].processor;
311 ix86_cost = processor_target_table[j].cost;
312 if (i > j && (int) ix86_arch >= (int) PROCESSOR_K6)
313 error ("-mcpu=%s does not support -march=%s",
314 ix86_cpu_string, ix86_arch_string);
316 target_flags |= processor_target_table[j].target_enable;
317 target_flags &= ~processor_target_table[j].target_disable;
323 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
324 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
325 ix86_cpu = PROCESSOR_DEFAULT;
328 /* Validate -mregparm= value. */
329 if (i386_regparm_string)
331 i386_regparm = atoi (i386_regparm_string);
332 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
333 fatal ("-mregparm=%d is not between 0 and %d",
334 i386_regparm, REGPARM_MAX);
337 /* The 486 suffers more from non-aligned cache line fills, and the
338 larger code size results in a larger cache foot-print and more misses.
339 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
341 def_align = (TARGET_486) ? 4 : 2;
343 /* Validate -malign-loops= value, or provide default. */
344 if (i386_align_loops_string)
346 i386_align_loops = atoi (i386_align_loops_string);
347 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
348 fatal ("-malign-loops=%d is not between 0 and %d",
349 i386_align_loops, MAX_CODE_ALIGN);
352 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
353 i386_align_loops = 4;
355 i386_align_loops = 2;
358 /* Validate -malign-jumps= value, or provide default. */
359 if (i386_align_jumps_string)
361 i386_align_jumps = atoi (i386_align_jumps_string);
362 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
363 fatal ("-malign-jumps=%d is not between 0 and %d",
364 i386_align_jumps, MAX_CODE_ALIGN);
367 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
368 i386_align_jumps = 4;
370 i386_align_jumps = def_align;
373 /* Validate -malign-functions= value, or provide default. */
374 if (i386_align_funcs_string)
376 i386_align_funcs = atoi (i386_align_funcs_string);
377 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
378 fatal ("-malign-functions=%d is not between 0 and %d",
379 i386_align_funcs, MAX_CODE_ALIGN);
382 i386_align_funcs = def_align;
384 /* Validate -mbranch-cost= value, or provide default. */
385 if (i386_branch_cost_string)
387 i386_branch_cost = atoi (i386_branch_cost_string);
388 if (i386_branch_cost < 0 || i386_branch_cost > 5)
389 fatal ("-mbranch-cost=%d is not between 0 and 5",
393 i386_branch_cost = 1;
395 /* Keep nonleaf frame pointers. */
396 if (TARGET_OMIT_LEAF_FRAME_POINTER)
397 flag_omit_frame_pointer = 1;
400 /* A C statement (sans semicolon) to choose the order in which to
401 allocate hard registers for pseudo-registers local to a basic
404 Store the desired register order in the array `reg_alloc_order'.
405 Element 0 should be the register to allocate first; element 1, the
406 next register; and so on.
408 The macro body should not assume anything about the contents of
409 `reg_alloc_order' before execution of the macro.
411 On most machines, it is not necessary to define this macro. */
414 order_regs_for_local_alloc ()
418 /* User specified the register allocation order. */
420 if (i386_reg_alloc_order)
422 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
428 case 'a': regno = 0; break;
429 case 'd': regno = 1; break;
430 case 'c': regno = 2; break;
431 case 'b': regno = 3; break;
432 case 'S': regno = 4; break;
433 case 'D': regno = 5; break;
434 case 'B': regno = 6; break;
437 reg_alloc_order[order++] = regno;
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
442 if (! regs_allocated[i])
443 reg_alloc_order[order++] = i;
447 /* If user did not specify a register allocation order, use natural order. */
450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
451 reg_alloc_order[i] = i;
456 optimization_options (level, size)
458 int size ATTRIBUTE_UNUSED;
460 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
461 make the problem with not enough registers even worse. */
462 #ifdef INSN_SCHEDULING
464 flag_schedule_insns = 0;
468 /* Sign-extend a 16-bit constant */
471 i386_sext16_if_const (op)
474 if (GET_CODE (op) == CONST_INT)
476 HOST_WIDE_INT val = INTVAL (op);
477 HOST_WIDE_INT sext_val;
479 sext_val = val | ~0xffff;
481 sext_val = val & 0xffff;
483 op = GEN_INT (sext_val);
488 /* Return nonzero if the rtx is aligned */
491 i386_aligned_reg_p (regno)
494 return (regno == STACK_POINTER_REGNUM
495 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
502 /* Registers and immediate operands are always "aligned". */
503 if (GET_CODE (op) != MEM)
506 /* Don't even try to do any aligned optimizations with volatiles. */
507 if (MEM_VOLATILE_P (op))
510 /* Get address of memory operand. */
513 switch (GET_CODE (op))
520 /* Match "reg + offset" */
522 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
524 if (INTVAL (XEXP (op, 1)) & 3)
528 if (GET_CODE (op) != REG)
531 /* ... fall through ... */
534 return i386_aligned_reg_p (REGNO (op));
543 /* Return nonzero if INSN looks like it won't compute useful cc bits
544 as a side effect. This information is only a hint. */
547 i386_cc_probably_useless_p (insn)
550 return ! next_cc0_user (insn);
553 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
554 attribute for DECL. The attributes in ATTRIBUTES have previously been
558 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
559 tree decl ATTRIBUTE_UNUSED;
560 tree attributes ATTRIBUTE_UNUSED;
561 tree identifier ATTRIBUTE_UNUSED;
562 tree args ATTRIBUTE_UNUSED;
567 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
568 attribute for TYPE. The attributes in ATTRIBUTES have previously been
572 i386_valid_type_attribute_p (type, attributes, identifier, args)
574 tree attributes ATTRIBUTE_UNUSED;
578 if (TREE_CODE (type) != FUNCTION_TYPE
579 && TREE_CODE (type) != METHOD_TYPE
580 && TREE_CODE (type) != FIELD_DECL
581 && TREE_CODE (type) != TYPE_DECL)
584 /* Stdcall attribute says callee is responsible for popping arguments
585 if they are not variable. */
586 if (is_attribute_p ("stdcall", identifier))
587 return (args == NULL_TREE);
589 /* Cdecl attribute says the callee is a normal C declaration. */
590 if (is_attribute_p ("cdecl", identifier))
591 return (args == NULL_TREE);
593 /* Regparm attribute specifies how many integer arguments are to be
594 passed in registers. */
595 if (is_attribute_p ("regparm", identifier))
599 if (! args || TREE_CODE (args) != TREE_LIST
600 || TREE_CHAIN (args) != NULL_TREE
601 || TREE_VALUE (args) == NULL_TREE)
604 cst = TREE_VALUE (args);
605 if (TREE_CODE (cst) != INTEGER_CST)
608 if (TREE_INT_CST_HIGH (cst) != 0
609 || TREE_INT_CST_LOW (cst) < 0
610 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
619 /* Return 0 if the attributes for two types are incompatible, 1 if they
620 are compatible, and 2 if they are nearly compatible (which causes a
621 warning to be generated). */
624 i386_comp_type_attributes (type1, type2)
628 /* Check for mismatch of non-default calling convention. */
629 char *rtdstr = TARGET_RTD ? "cdecl" : "stdcall";
631 if (TREE_CODE (type1) != FUNCTION_TYPE)
634 /* Check for mismatched return types (cdecl vs stdcall). */
635 if (lookup_attribute (rtdstr, TYPE_ATTRIBUTES (type1))
636 != lookup_attribute (rtdstr, TYPE_ATTRIBUTES (type2)))
642 /* Value is the number of bytes of arguments automatically
643 popped when returning from a subroutine call.
644 FUNDECL is the declaration node of the function (as a tree),
645 FUNTYPE is the data type of the function (as a tree),
646 or for a library call it is an identifier node for the subroutine name.
647 SIZE is the number of bytes of arguments passed on the stack.
649 On the 80386, the RTD insn may be used to pop them if the number
650 of args is fixed, but if the number is variable then the caller
651 must pop them all. RTD can't be used for library calls now
652 because the library is compiled with the Unix compiler.
653 Use of RTD is a selectable option, since it is incompatible with
654 standard Unix calling sequences. If the option is not selected,
655 the caller must always pop the args.
657 The attribute stdcall is equivalent to RTD on a per module basis. */
660 i386_return_pops_args (fundecl, funtype, size)
665 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
667 /* Cdecl functions override -mrtd, and never pop the stack. */
668 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
670 /* Stdcall functions will pop the stack if not variable args. */
671 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
675 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
676 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
681 /* Lose any fake structure return argument. */
682 if (aggregate_value_p (TREE_TYPE (funtype)))
683 return GET_MODE_SIZE (Pmode);
689 /* Argument support functions. */
691 /* Initialize a variable CUM of type CUMULATIVE_ARGS
692 for a call to a function whose data type is FNTYPE.
693 For a library call, FNTYPE is 0. */
696 init_cumulative_args (cum, fntype, libname)
697 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
698 tree fntype; /* tree ptr for function decl */
699 rtx libname; /* SYMBOL_REF of library name or 0 */
701 static CUMULATIVE_ARGS zero_cum;
702 tree param, next_param;
704 if (TARGET_DEBUG_ARG)
706 fprintf (stderr, "\ninit_cumulative_args (");
708 fprintf (stderr, "fntype code = %s, ret code = %s",
709 tree_code_name[(int) TREE_CODE (fntype)],
710 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
712 fprintf (stderr, "no fntype");
715 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
720 /* Set up the number of registers to use for passing arguments. */
721 cum->nregs = i386_regparm;
724 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
727 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
730 /* Determine if this function has variable arguments. This is
731 indicated by the last argument being 'void_type_mode' if there
732 are no variable arguments. If there are variable arguments, then
733 we won't pass anything in registers */
737 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
738 param != 0; param = next_param)
740 next_param = TREE_CHAIN (param);
741 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
746 if (TARGET_DEBUG_ARG)
747 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
752 /* Update the data in CUM to advance over an argument
753 of mode MODE and data type TYPE.
754 (TYPE is null for libcalls where that information may not be available.) */
757 function_arg_advance (cum, mode, type, named)
758 CUMULATIVE_ARGS *cum; /* current arg information */
759 enum machine_mode mode; /* current arg mode */
760 tree type; /* type of the argument or 0 if lib support */
761 int named; /* whether or not the argument was named */
764 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
765 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
767 if (TARGET_DEBUG_ARG)
769 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
770 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
785 /* Define where to put the arguments to a function.
786 Value is zero to push the argument on the stack,
787 or a hard register in which to store the argument.
789 MODE is the argument's machine mode.
790 TYPE is the data type of the argument (as a tree).
791 This is null for libcalls where that information may
793 CUM is a variable of type CUMULATIVE_ARGS which gives info about
794 the preceding args and about the function being called.
795 NAMED is nonzero if this argument is a named parameter
796 (otherwise it is an extra parameter matching an ellipsis). */
799 function_arg (cum, mode, type, named)
800 CUMULATIVE_ARGS *cum; /* current arg information */
801 enum machine_mode mode; /* current arg mode */
802 tree type; /* type of the argument or 0 if lib support */
803 int named; /* != 0 for normal args, == 0 for ... args */
807 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
808 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
812 /* For now, pass fp/complex values on the stack. */
821 if (words <= cum->nregs)
822 ret = gen_rtx_REG (mode, cum->regno);
826 if (TARGET_DEBUG_ARG)
829 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
830 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
833 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
835 fprintf (stderr, ", stack");
837 fprintf (stderr, " )\n");
843 /* For an arg passed partly in registers and partly in memory,
844 this is the number of registers used.
845 For args passed entirely in registers or entirely in memory, zero. */
848 function_arg_partial_nregs (cum, mode, type, named)
849 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED; /* current arg information */
850 enum machine_mode mode ATTRIBUTE_UNUSED; /* current arg mode */
851 tree type ATTRIBUTE_UNUSED; /* type of the argument or 0 if lib support */
852 int named ATTRIBUTE_UNUSED; /* != 0 for normal args, == 0 for ... args */
857 /* Output an insn whose source is a 386 integer register. SRC is the
858 rtx for the register, and TEMPLATE is the op-code template. SRC may
859 be either SImode or DImode.
861 The template will be output with operands[0] as SRC, and operands[1]
862 as a pointer to the top of the 386 stack. So a call from floatsidf2
863 would look like this:
865 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
867 where %z0 corresponds to the caller's operands[1], and is used to
868 emit the proper size suffix.
870 ??? Extend this to handle HImode - a 387 can load and store HImode
874 output_op_from_reg (src, template)
879 int size = GET_MODE_SIZE (GET_MODE (src));
882 xops[1] = AT_SP (Pmode);
883 xops[2] = GEN_INT (size);
884 xops[3] = stack_pointer_rtx;
886 if (size > UNITS_PER_WORD)
890 if (size > 2 * UNITS_PER_WORD)
892 high = gen_rtx_REG (SImode, REGNO (src) + 2);
893 output_asm_insn (AS1 (push%L0,%0), &high);
896 high = gen_rtx_REG (SImode, REGNO (src) + 1);
897 output_asm_insn (AS1 (push%L0,%0), &high);
900 output_asm_insn (AS1 (push%L0,%0), &src);
901 output_asm_insn (template, xops);
902 output_asm_insn (AS2 (add%L3,%2,%3), xops);
905 /* Output an insn to pop an value from the 387 top-of-stack to 386
906 register DEST. The 387 register stack is popped if DIES is true. If
907 the mode of DEST is an integer mode, a `fist' integer store is done,
908 otherwise a `fst' float store is done. */
911 output_to_reg (dest, dies, scratch_mem)
917 int size = GET_MODE_SIZE (GET_MODE (dest));
920 xops[0] = AT_SP (Pmode);
922 xops[0] = scratch_mem;
924 xops[1] = stack_pointer_rtx;
925 xops[2] = GEN_INT (size);
929 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
931 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
934 output_asm_insn (AS1 (fistp%z3,%y0), xops);
935 else if (GET_MODE (xops[3]) == DImode && ! dies)
937 /* There is no DImode version of this without a stack pop, so
938 we must emulate it. It doesn't matter much what the second
939 instruction is, because the value being pushed on the FP stack
940 is not used except for the following stack popping store.
941 This case can only happen without optimization, so it doesn't
942 matter that it is inefficient. */
943 output_asm_insn (AS1 (fistp%z3,%0), xops);
944 output_asm_insn (AS1 (fild%z3,%0), xops);
947 output_asm_insn (AS1 (fist%z3,%y0), xops);
950 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
953 output_asm_insn (AS1 (fstp%z3,%y0), xops);
956 if (GET_MODE (dest) == XFmode)
958 output_asm_insn (AS1 (fstp%z3,%y0), xops);
959 output_asm_insn (AS1 (fld%z3,%y0), xops);
962 output_asm_insn (AS1 (fst%z3,%y0), xops);
970 output_asm_insn (AS1 (pop%L0,%0), &dest);
972 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
975 if (size > UNITS_PER_WORD)
977 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
979 output_asm_insn (AS1 (pop%L0,%0), &dest);
982 xops[0] = adj_offsettable_operand (xops[0], 4);
984 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
987 if (size > 2 * UNITS_PER_WORD)
989 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
991 output_asm_insn (AS1 (pop%L0,%0), &dest);
994 xops[0] = adj_offsettable_operand (xops[0], 4);
995 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
1002 singlemove_string (operands)
1006 if (GET_CODE (operands[0]) == MEM
1007 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
1009 if (XEXP (x, 0) != stack_pointer_rtx)
1011 return "push%L1 %1";
1013 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
1014 return output_move_const_single (operands);
1015 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
1016 return AS2 (mov%L0,%1,%0);
1017 else if (CONSTANT_P (operands[1]))
1018 return AS2 (mov%L0,%1,%0);
1021 output_asm_insn ("push%L1 %1", operands);
1026 /* Return a REG that occurs in ADDR with coefficient 1.
1027 ADDR can be effectively incremented by incrementing REG. */
1030 find_addr_reg (addr)
1033 while (GET_CODE (addr) == PLUS)
1035 if (GET_CODE (XEXP (addr, 0)) == REG)
1036 addr = XEXP (addr, 0);
1037 else if (GET_CODE (XEXP (addr, 1)) == REG)
1038 addr = XEXP (addr, 1);
1039 else if (CONSTANT_P (XEXP (addr, 0)))
1040 addr = XEXP (addr, 1);
1041 else if (CONSTANT_P (XEXP (addr, 1)))
1042 addr = XEXP (addr, 0);
1047 if (GET_CODE (addr) == REG)
1052 /* Output an insn to add the constant N to the register X. */
1063 output_asm_insn (AS1 (dec%L0,%0), xops);
1065 output_asm_insn (AS1 (inc%L0,%0), xops);
1066 else if (n < 0 || n == 128)
1068 xops[1] = GEN_INT (-n);
1069 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1073 xops[1] = GEN_INT (n);
1074 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1078 /* Output assembler code to perform a doubleword move insn
1079 with operands OPERANDS. */
1082 output_move_double (operands)
1085 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1089 rtx addreg0 = 0, addreg1 = 0;
1090 int dest_overlapped_low = 0;
1091 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1096 /* First classify both operands. */
1098 if (REG_P (operands[0]))
1100 else if (offsettable_memref_p (operands[0]))
1102 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1104 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1106 else if (GET_CODE (operands[0]) == MEM)
1111 if (REG_P (operands[1]))
1113 else if (CONSTANT_P (operands[1]))
1115 else if (offsettable_memref_p (operands[1]))
1117 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1119 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1121 else if (GET_CODE (operands[1]) == MEM)
1126 /* Check for the cases that the operand constraints are not
1127 supposed to allow to happen. Abort if we get one,
1128 because generating code for these cases is painful. */
1130 if (optype0 == RNDOP || optype1 == RNDOP)
1133 /* If one operand is decrementing and one is incrementing
1134 decrement the former register explicitly
1135 and change that operand into ordinary indexing. */
1137 if (optype0 == PUSHOP && optype1 == POPOP)
1139 /* ??? Can this ever happen on i386? */
1140 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1141 asm_add (-size, operands[0]);
1142 if (GET_MODE (operands[1]) == XFmode)
1143 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1144 else if (GET_MODE (operands[0]) == DFmode)
1145 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1147 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1151 if (optype0 == POPOP && optype1 == PUSHOP)
1153 /* ??? Can this ever happen on i386? */
1154 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1155 asm_add (-size, operands[1]);
1156 if (GET_MODE (operands[1]) == XFmode)
1157 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1158 else if (GET_MODE (operands[1]) == DFmode)
1159 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1161 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1165 /* If an operand is an unoffsettable memory ref, find a register
1166 we can increment temporarily to make it refer to the second word. */
1168 if (optype0 == MEMOP)
1169 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1171 if (optype1 == MEMOP)
1172 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1174 /* Ok, we can do one word at a time.
1175 Normally we do the low-numbered word first,
1176 but if either operand is autodecrementing then we
1177 do the high-numbered word first.
1179 In either case, set up in LATEHALF the operands to use
1180 for the high-numbered word and in some cases alter the
1181 operands in OPERANDS to be suitable for the low-numbered word. */
1185 if (optype0 == REGOP)
1187 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1188 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1190 else if (optype0 == OFFSOP)
1192 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1193 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1197 middlehalf[0] = operands[0];
1198 latehalf[0] = operands[0];
1201 if (optype1 == REGOP)
1203 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1204 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1206 else if (optype1 == OFFSOP)
1208 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1209 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1211 else if (optype1 == CNSTOP)
1213 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1215 REAL_VALUE_TYPE r; long l[3];
1217 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1218 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1219 operands[1] = GEN_INT (l[0]);
1220 middlehalf[1] = GEN_INT (l[1]);
1221 latehalf[1] = GEN_INT (l[2]);
1223 else if (CONSTANT_P (operands[1]))
1224 /* No non-CONST_DOUBLE constant should ever appear here. */
1229 middlehalf[1] = operands[1];
1230 latehalf[1] = operands[1];
1236 /* Size is not 12. */
1238 if (optype0 == REGOP)
1239 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1240 else if (optype0 == OFFSOP)
1241 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1243 latehalf[0] = operands[0];
1245 if (optype1 == REGOP)
1246 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1247 else if (optype1 == OFFSOP)
1248 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1249 else if (optype1 == CNSTOP)
1250 split_double (operands[1], &operands[1], &latehalf[1]);
1252 latehalf[1] = operands[1];
1255 /* If insn is effectively movd N (sp),-(sp) then we will do the
1256 high word first. We should use the adjusted operand 1
1257 (which is N+4 (sp) or N+8 (sp))
1258 for the low word and middle word as well,
1259 to compensate for the first decrement of sp. */
1260 if (optype0 == PUSHOP
1261 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1262 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1263 middlehalf[1] = operands[1] = latehalf[1];
1265 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1266 if the upper part of reg N does not appear in the MEM, arrange to
1267 emit the move late-half first. Otherwise, compute the MEM address
1268 into the upper part of N and use that as a pointer to the memory
1270 if (optype0 == REGOP
1271 && (optype1 == OFFSOP || optype1 == MEMOP))
1273 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1274 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1276 /* If both halves of dest are used in the src memory address,
1277 compute the address into latehalf of dest. */
1279 xops[0] = latehalf[0];
1280 xops[1] = XEXP (operands[1], 0);
1281 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1282 if (GET_MODE (operands[1]) == XFmode)
1284 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1285 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1286 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1290 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1291 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1296 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1298 /* Check for two regs used by both source and dest. */
1299 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1300 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1303 /* JRV says this can't happen: */
1304 if (addreg0 || addreg1)
1307 /* Only the middle reg conflicts; simply put it last. */
1308 output_asm_insn (singlemove_string (operands), operands);
1309 output_asm_insn (singlemove_string (latehalf), latehalf);
1310 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1314 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1315 /* If the low half of dest is mentioned in the source memory
1316 address, the arrange to emit the move late half first. */
1317 dest_overlapped_low = 1;
1320 /* If one or both operands autodecrementing,
1321 do the two words, high-numbered first. */
1323 /* Likewise, the first move would clobber the source of the second one,
1324 do them in the other order. This happens only for registers;
1325 such overlap can't happen in memory unless the user explicitly
1326 sets it up, and that is an undefined circumstance. */
1329 if (optype0 == PUSHOP || optype1 == PUSHOP
1330 || (optype0 == REGOP && optype1 == REGOP
1331 && REGNO (operands[0]) == REGNO (latehalf[1]))
1332 || dest_overlapped_low)
1335 if (optype0 == PUSHOP || optype1 == PUSHOP
1336 || (optype0 == REGOP && optype1 == REGOP
1337 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1338 || REGNO (operands[0]) == REGNO (latehalf[1])))
1339 || dest_overlapped_low)
1341 /* Make any unoffsettable addresses point at high-numbered word. */
1343 asm_add (size-4, addreg0);
1345 asm_add (size-4, addreg1);
1348 output_asm_insn (singlemove_string (latehalf), latehalf);
1350 /* Undo the adds we just did. */
1352 asm_add (-4, addreg0);
1354 asm_add (-4, addreg1);
1358 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1360 asm_add (-4, addreg0);
1362 asm_add (-4, addreg1);
1365 /* Do low-numbered word. */
1366 return singlemove_string (operands);
1369 /* Normal case: do the two words, low-numbered first. */
1371 output_asm_insn (singlemove_string (operands), operands);
1373 /* Do the middle one of the three words for long double */
1377 asm_add (4, addreg0);
1379 asm_add (4, addreg1);
1381 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1384 /* Make any unoffsettable addresses point at high-numbered word. */
1386 asm_add (4, addreg0);
1388 asm_add (4, addreg1);
1391 output_asm_insn (singlemove_string (latehalf), latehalf);
1393 /* Undo the adds we just did. */
1395 asm_add (4-size, addreg0);
1397 asm_add (4-size, addreg1);
1402 #define MAX_TMPS 2 /* max temporary registers used */
1404 /* Output the appropriate code to move push memory on the stack */
1407 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1419 } tmp_info[MAX_TMPS];
1421 rtx src = operands[1];
1424 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1425 int stack_offset = 0;
1429 if (! offsettable_memref_p (src))
1430 fatal_insn ("Source is not offsettable", insn);
1432 if ((length & 3) != 0)
1433 fatal_insn ("Pushing non-word aligned size", insn);
1435 /* Figure out which temporary registers we have available */
1436 for (i = tmp_start; i < n_operands; i++)
1438 if (GET_CODE (operands[i]) == REG)
1440 if (reg_overlap_mentioned_p (operands[i], src))
1443 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1444 if (max_tmps == MAX_TMPS)
1450 for (offset = length - 4; offset >= 0; offset -= 4)
1452 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1453 output_asm_insn (AS1(push%L0,%0), xops);
1459 for (offset = length - 4; offset >= 0; )
1461 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1463 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1464 tmp_info[num_tmps].push = AS1(push%L0,%1);
1465 tmp_info[num_tmps].xops[0]
1466 = adj_offsettable_operand (src, offset + stack_offset);
1470 for (i = 0; i < num_tmps; i++)
1471 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1473 for (i = 0; i < num_tmps; i++)
1474 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1477 stack_offset += 4*num_tmps;
1483 /* Output the appropriate code to move data between two memory locations */
1486 output_move_memory (operands, insn, length, tmp_start, n_operands)
1498 } tmp_info[MAX_TMPS];
1500 rtx dest = operands[0];
1501 rtx src = operands[1];
1502 rtx qi_tmp = NULL_RTX;
1508 if (GET_CODE (dest) == MEM
1509 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1510 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1511 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1513 if (! offsettable_memref_p (src))
1514 fatal_insn ("Source is not offsettable", insn);
1516 if (! offsettable_memref_p (dest))
1517 fatal_insn ("Destination is not offsettable", insn);
1519 /* Figure out which temporary registers we have available */
1520 for (i = tmp_start; i < n_operands; i++)
1522 if (GET_CODE (operands[i]) == REG)
1524 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1525 qi_tmp = operands[i];
1527 if (reg_overlap_mentioned_p (operands[i], dest))
1528 fatal_insn ("Temporary register overlaps the destination", insn);
1530 if (reg_overlap_mentioned_p (operands[i], src))
1531 fatal_insn ("Temporary register overlaps the source", insn);
1533 tmp_info[max_tmps++].xops[2] = operands[i];
1534 if (max_tmps == MAX_TMPS)
1540 fatal_insn ("No scratch registers were found to do memory->memory moves",
1543 if ((length & 1) != 0)
1546 fatal_insn ("No byte register found when moving odd # of bytes.",
1552 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1556 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1557 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1558 tmp_info[num_tmps].xops[0]
1559 = adj_offsettable_operand (dest, offset);
1560 tmp_info[num_tmps].xops[1]
1561 = adj_offsettable_operand (src, offset);
1567 else if (length >= 2)
1569 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1570 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1571 tmp_info[num_tmps].xops[0]
1572 = adj_offsettable_operand (dest, offset);
1573 tmp_info[num_tmps].xops[1]
1574 = adj_offsettable_operand (src, offset);
1583 for (i = 0; i < num_tmps; i++)
1584 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1586 for (i = 0; i < num_tmps; i++)
1587 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1592 xops[0] = adj_offsettable_operand (dest, offset);
1593 xops[1] = adj_offsettable_operand (src, offset);
1595 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1596 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1603 standard_80387_constant_p (x)
1606 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1611 if (setjmp (handler))
1614 set_float_handler (handler);
1615 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1616 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1617 is1 = REAL_VALUES_EQUAL (d, dconst1);
1618 set_float_handler (NULL_PTR);
1626 /* Note that on the 80387, other constants, such as pi,
1627 are much slower to load as standard constants
1628 than to load from doubles in memory! */
1629 /* ??? Not true on K6: all constants are equal cost. */
1636 output_move_const_single (operands)
1639 if (FP_REG_P (operands[0]))
1641 int conval = standard_80387_constant_p (operands[1]);
1650 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1652 REAL_VALUE_TYPE r; long l;
1654 if (GET_MODE (operands[1]) == XFmode)
1657 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1658 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1659 operands[1] = GEN_INT (l);
1662 return singlemove_string (operands);
1665 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1666 reference and a constant. */
1669 symbolic_operand (op, mode)
1671 enum machine_mode mode ATTRIBUTE_UNUSED;
1673 switch (GET_CODE (op))
1681 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1682 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1683 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1690 /* Test for a valid operand for a call instruction.
1691 Don't allow the arg pointer register or virtual regs
1692 since they may change into reg + const, which the patterns
1693 can't handle yet. */
1696 call_insn_operand (op, mode)
1698 enum machine_mode mode ATTRIBUTE_UNUSED;
1700 if (GET_CODE (op) == MEM
1701 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1702 /* This makes a difference for PIC. */
1703 && general_operand (XEXP (op, 0), Pmode))
1704 || (GET_CODE (XEXP (op, 0)) == REG
1705 && XEXP (op, 0) != arg_pointer_rtx
1706 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1707 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1713 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1717 expander_call_insn_operand (op, mode)
1719 enum machine_mode mode ATTRIBUTE_UNUSED;
1721 if (GET_CODE (op) == MEM
1722 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1723 || (GET_CODE (XEXP (op, 0)) == REG
1724 && XEXP (op, 0) != arg_pointer_rtx
1725 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1726 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1732 /* Return 1 if OP is a comparison operator that can use the condition code
1733 generated by an arithmetic operation. */
1736 arithmetic_comparison_operator (op, mode)
1738 enum machine_mode mode;
1742 if (mode != VOIDmode && mode != GET_MODE (op))
1745 code = GET_CODE (op);
1746 if (GET_RTX_CLASS (code) != '<')
1749 return (code != GT && code != LE);
1753 ix86_logical_operator (op, mode)
1755 enum machine_mode mode ATTRIBUTE_UNUSED;
1757 return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1761 /* Returns 1 if OP contains a symbol reference */
1764 symbolic_reference_mentioned_p (op)
1770 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1773 fmt = GET_RTX_FORMAT (GET_CODE (op));
1774 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1780 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1781 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1785 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1792 /* Attempt to expand a binary operator. Make the expansion closer to the
1793 actual machine, then just general_operand, which will allow 3 separate
1794 memory references (one output, two input) in a single insn. Return
1795 whether the insn fails, or succeeds. */
1798 ix86_expand_binary_operator (code, mode, operands)
1800 enum machine_mode mode;
1805 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1806 if (GET_RTX_CLASS (code) == 'c'
1807 && (rtx_equal_p (operands[0], operands[2])
1808 || immediate_operand (operands[1], mode)))
1810 rtx temp = operands[1];
1811 operands[1] = operands[2];
1815 /* If optimizing, copy to regs to improve CSE */
1816 if (TARGET_PSEUDO && optimize
1817 && ((reload_in_progress | reload_completed) == 0))
1819 if (GET_CODE (operands[1]) == MEM
1820 && ! rtx_equal_p (operands[0], operands[1]))
1821 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1823 if (GET_CODE (operands[2]) == MEM)
1824 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1826 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1828 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1830 emit_move_insn (temp, operands[1]);
1836 if (!ix86_binary_operator_ok (code, mode, operands))
1838 /* If not optimizing, try to make a valid insn (optimize code
1839 previously did this above to improve chances of CSE) */
1841 if ((! TARGET_PSEUDO || !optimize)
1842 && ((reload_in_progress | reload_completed) == 0)
1843 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1846 if (GET_CODE (operands[1]) == MEM
1847 && ! rtx_equal_p (operands[0], operands[1]))
1849 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1853 if (GET_CODE (operands[2]) == MEM)
1855 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1859 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1861 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1863 emit_move_insn (temp, operands[1]);
1868 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1878 /* Return TRUE or FALSE depending on whether the binary operator meets the
1879 appropriate constraints. */
1882 ix86_binary_operator_ok (code, mode, operands)
1884 enum machine_mode mode ATTRIBUTE_UNUSED;
1887 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1888 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1891 /* Attempt to expand a unary operator. Make the expansion closer to the
1892 actual machine, then just general_operand, which will allow 2 separate
1893 memory references (one output, one input) in a single insn. Return
1894 whether the insn fails, or succeeds. */
1897 ix86_expand_unary_operator (code, mode, operands)
1899 enum machine_mode mode;
1902 /* If optimizing, copy to regs to improve CSE */
1905 && ((reload_in_progress | reload_completed) == 0)
1906 && GET_CODE (operands[1]) == MEM)
1907 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1909 if (! ix86_unary_operator_ok (code, mode, operands))
1911 if ((! TARGET_PSEUDO || optimize == 0)
1912 && ((reload_in_progress | reload_completed) == 0)
1913 && GET_CODE (operands[1]) == MEM)
1915 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1916 if (! ix86_unary_operator_ok (code, mode, operands))
1926 /* Return TRUE or FALSE depending on whether the unary operator meets the
1927 appropriate constraints. */
1930 ix86_unary_operator_ok (code, mode, operands)
1931 enum rtx_code code ATTRIBUTE_UNUSED;
1932 enum machine_mode mode ATTRIBUTE_UNUSED;
1933 rtx operands[2] ATTRIBUTE_UNUSED;
1938 static rtx pic_label_rtx;
1939 static char pic_label_name [256];
1940 static int pic_label_no = 0;
1942 /* This function generates code for -fpic that loads %ebx with
1943 the return address of the caller and then returns. */
1946 asm_output_function_prefix (file, name)
1948 char *name ATTRIBUTE_UNUSED;
1951 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1952 || current_function_uses_const_pool);
1953 xops[0] = pic_offset_table_rtx;
1954 xops[1] = stack_pointer_rtx;
1956 /* Deep branch prediction favors having a return for every call. */
1957 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1961 if (pic_label_rtx == 0)
1963 pic_label_rtx = gen_label_rtx ();
1964 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1965 LABEL_NAME (pic_label_rtx) = pic_label_name;
1968 prologue_node = make_node (FUNCTION_DECL);
1969 DECL_RESULT (prologue_node) = 0;
1971 /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1972 internal (non-global) label that's being emitted, it didn't make
1973 sense to have .type information for local labels. This caused
1974 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1975 me debug info for a label that you're declaring non-global?) this
1976 was changed to call ASM_OUTPUT_LABEL() instead. */
1979 ASM_OUTPUT_LABEL (file, pic_label_name);
1980 output_asm_insn ("movl (%1),%0", xops);
1981 output_asm_insn ("ret", xops);
1985 /* Generate the assembly code for function entry.
1986 FILE is an stdio stream to output the code to.
1987 SIZE is an int: how many units of temporary storage to allocate. */
1990 function_prologue (file, size)
1991 FILE *file ATTRIBUTE_UNUSED;
1992 int size ATTRIBUTE_UNUSED;
1994 if (TARGET_SCHEDULE_PROLOGUE)
2003 /* Expand the prologue into a bunch of separate insns. */
2006 ix86_expand_prologue ()
2008 if (! TARGET_SCHEDULE_PROLOGUE)
2015 load_pic_register (do_rtl)
2020 if (TARGET_DEEP_BRANCH_PREDICTION)
2022 xops[0] = pic_offset_table_rtx;
2023 if (pic_label_rtx == 0)
2025 pic_label_rtx = gen_label_rtx ();
2026 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
2027 LABEL_NAME (pic_label_rtx) = pic_label_name;
2030 xops[1] = gen_rtx_MEM (QImode,
2031 gen_rtx (SYMBOL_REF, Pmode,
2032 LABEL_NAME (pic_label_rtx)));
2036 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
2037 emit_insn (gen_prologue_set_got (xops[0],
2038 gen_rtx (SYMBOL_REF, Pmode,
2039 "$_GLOBAL_OFFSET_TABLE_"),
2044 output_asm_insn (AS1 (call,%X1), xops);
2045 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
2052 xops[0] = pic_offset_table_rtx;
2053 xops[1] = gen_label_rtx ();
2057 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2058 a new CODE_LABEL after reload, so we need a single pattern to
2059 emit the 3 necessary instructions. */
2060 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2064 output_asm_insn (AS1 (call,%P1), xops);
2065 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2066 CODE_LABEL_NUMBER (xops[1]));
2067 output_asm_insn (AS1 (pop%L0,%0), xops);
2068 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2072 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2073 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2074 moved before any instruction which implicitly uses the got. */
2077 emit_insn (gen_blockage ());
2081 ix86_prologue (do_rtl)
2087 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2088 || current_function_uses_const_pool);
2089 long tsize = get_frame_size ();
2091 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2093 xops[0] = stack_pointer_rtx;
2094 xops[1] = frame_pointer_rtx;
2095 xops[2] = GEN_INT (tsize);
2097 if (frame_pointer_needed)
2101 insn = emit_insn (gen_rtx (SET, VOIDmode,
2102 gen_rtx_MEM (SImode,
2103 gen_rtx (PRE_DEC, SImode,
2104 stack_pointer_rtx)),
2105 frame_pointer_rtx));
2107 RTX_FRAME_RELATED_P (insn) = 1;
2108 insn = emit_move_insn (xops[1], xops[0]);
2109 RTX_FRAME_RELATED_P (insn) = 1;
2114 output_asm_insn ("push%L1 %1", xops);
2115 #ifdef INCOMING_RETURN_ADDR_RTX
2116 if (dwarf2out_do_frame ())
2118 char *l = dwarf2out_cfi_label ();
2120 cfa_store_offset += 4;
2121 cfa_offset = cfa_store_offset;
2122 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2123 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2127 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2128 #ifdef INCOMING_RETURN_ADDR_RTX
2129 if (dwarf2out_do_frame ())
2130 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2137 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2141 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2142 RTX_FRAME_RELATED_P (insn) = 1;
2146 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2147 #ifdef INCOMING_RETURN_ADDR_RTX
2148 if (dwarf2out_do_frame ())
2150 cfa_store_offset += tsize;
2151 if (! frame_pointer_needed)
2153 cfa_offset = cfa_store_offset;
2154 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2162 xops[3] = gen_rtx_REG (SImode, 0);
2164 emit_move_insn (xops[3], xops[2]);
2166 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2168 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2169 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2172 emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2174 output_asm_insn (AS1 (call,%P3), xops);
2177 /* Note If use enter it is NOT reversed args.
2178 This one is not reversed from intel!!
2179 I think enter is slower. Also sdb doesn't like it.
2180 But if you want it the code is:
2182 xops[3] = const0_rtx;
2183 output_asm_insn ("enter %2,%3", xops);
2187 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2188 for (regno = limit - 1; regno >= 0; regno--)
2189 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2190 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2192 xops[0] = gen_rtx_REG (SImode, regno);
2195 insn = emit_insn (gen_rtx (SET, VOIDmode,
2196 gen_rtx_MEM (SImode,
2197 gen_rtx (PRE_DEC, SImode,
2198 stack_pointer_rtx)),
2201 RTX_FRAME_RELATED_P (insn) = 1;
2205 output_asm_insn ("push%L0 %0", xops);
2206 #ifdef INCOMING_RETURN_ADDR_RTX
2207 if (dwarf2out_do_frame ())
2209 char *l = dwarf2out_cfi_label ();
2211 cfa_store_offset += 4;
2212 if (! frame_pointer_needed)
2214 cfa_offset = cfa_store_offset;
2215 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2218 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2224 #ifdef SUBTARGET_PROLOGUE
2229 load_pic_register (do_rtl);
2231 /* If we are profiling, make sure no instructions are scheduled before
2232 the call to mcount. However, if -fpic, the above call will have
2234 if ((profile_flag || profile_block_flag)
2235 && ! pic_reg_used && do_rtl)
2236 emit_insn (gen_blockage ());
2239 /* Return 1 if it is appropriate to emit `ret' instructions in the
2240 body of a function. Do this only if the epilogue is simple, needing a
2241 couple of insns. Prior to reloading, we can't tell how many registers
2242 must be saved, so return 0 then. Return 0 if there is no frame
2243 marker to de-allocate.
2245 If NON_SAVING_SETJMP is defined and true, then it is not possible
2246 for the epilogue to be simple, so return 0. This is a special case
2247 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2248 until final, but jump_optimize may need to know sooner if a
2252 ix86_can_use_return_insn_p ()
2256 int reglimit = (frame_pointer_needed
2257 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2258 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2259 || current_function_uses_const_pool);
2261 #ifdef NON_SAVING_SETJMP
2262 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2266 if (! reload_completed)
2269 for (regno = reglimit - 1; regno >= 0; regno--)
2270 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2271 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2274 return nregs == 0 || ! frame_pointer_needed;
2277 /* This function generates the assembly code for function exit.
2278 FILE is an stdio stream to output the code to.
2279 SIZE is an int: how many units of temporary storage to deallocate. */
2282 function_epilogue (file, size)
2283 FILE *file ATTRIBUTE_UNUSED;
2284 int size ATTRIBUTE_UNUSED;
2289 /* Restore function stack, frame, and registers. */
2292 ix86_expand_epilogue ()
2298 ix86_epilogue (do_rtl)
2302 register int nregs, limit;
2305 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2306 || current_function_uses_const_pool);
2307 int sp_valid = !frame_pointer_needed || current_function_sp_is_unchanging;
2308 long tsize = get_frame_size ();
2310 /* Compute the number of registers to pop */
2312 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2316 for (regno = limit - 1; regno >= 0; regno--)
2317 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2318 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2321 /* sp is often unreliable so we may have to go off the frame pointer. */
2323 offset = - tsize - (nregs * UNITS_PER_WORD);
2325 xops[2] = stack_pointer_rtx;
2327 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2328 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2329 moved before any instruction which implicitly uses the got. This
2330 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2332 Alternatively, this could be fixed by making the dependence on the
2333 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2335 if (flag_pic || profile_flag || profile_block_flag)
2336 emit_insn (gen_blockage ());
2338 /* If we're only restoring one register and sp is not valid then
2339 using a move instruction to restore the register since it's
2340 less work than reloading sp and popping the register. Otherwise,
2341 restore sp (if necessary) and pop the registers. */
2343 if (nregs > 1 || sp_valid)
2347 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2349 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2351 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2354 for (regno = 0; regno < limit; regno++)
2355 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2356 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2358 xops[0] = gen_rtx_REG (SImode, regno);
2361 emit_insn (gen_pop (xops[0]));
2363 output_asm_insn ("pop%L0 %0", xops);
2368 for (regno = 0; regno < limit; regno++)
2369 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2370 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2372 xops[0] = gen_rtx_REG (SImode, regno);
2373 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2376 emit_move_insn (xops[0], xops[1]);
2378 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2383 if (frame_pointer_needed)
2385 /* If not an i386, mov & pop is faster than "leave". */
2387 if (TARGET_USE_LEAVE)
2390 emit_insn (gen_leave());
2392 output_asm_insn ("leave", xops);
2396 xops[0] = frame_pointer_rtx;
2397 xops[1] = stack_pointer_rtx;
2401 emit_insn (gen_epilogue_set_stack_ptr());
2402 emit_insn (gen_pop (xops[0]));
2406 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2407 output_asm_insn ("pop%L0 %0", xops);
2414 /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2415 use `pop' and not `add'. */
2416 int use_pop = tsize == 4;
2418 /* Use two pops only for the Pentium processors. */
2419 if (tsize == 8 && !TARGET_386 && !TARGET_486)
2421 rtx retval = current_function_return_rtx;
2423 xops[1] = gen_rtx_REG (SImode, 1); /* %edx */
2425 /* This case is a bit more complex. Since we cannot pop into
2426 %ecx twice we need a second register. But this is only
2427 available if the return value is not of DImode in which
2428 case the %edx register is not available. */
2429 use_pop = (retval == NULL
2430 || ! reg_overlap_mentioned_p (xops[1], retval));
2435 xops[0] = gen_rtx_REG (SImode, 2); /* %ecx */
2439 /* We have to prevent the two pops here from being scheduled.
2440 GCC otherwise would try in some situation to put other
2441 instructions in between them which has a bad effect. */
2442 emit_insn (gen_blockage ());
2443 emit_insn (gen_pop (xops[0]));
2445 emit_insn (gen_pop (xops[1]));
2449 output_asm_insn ("pop%L0 %0", xops);
2451 output_asm_insn ("pop%L1 %1", xops);
2456 /* If there is no frame pointer, we must still release the frame. */
2457 xops[0] = GEN_INT (tsize);
2460 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2461 gen_rtx (PLUS, SImode, xops[2], xops[0])));
2463 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2467 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2468 if (profile_block_flag == 2)
2470 FUNCTION_BLOCK_PROFILER_EXIT(file);
2474 if (current_function_pops_args && current_function_args_size)
2476 xops[1] = GEN_INT (current_function_pops_args);
2478 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2479 asked to pop more, pop return address, do explicit add, and jump
2480 indirectly to the caller. */
2482 if (current_function_pops_args >= 32768)
2484 /* ??? Which register to use here? */
2485 xops[0] = gen_rtx_REG (SImode, 2);
2489 emit_insn (gen_pop (xops[0]));
2490 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2491 gen_rtx (PLUS, SImode, xops[1], xops[2])));
2492 emit_jump_insn (xops[0]);
2496 output_asm_insn ("pop%L0 %0", xops);
2497 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2498 output_asm_insn ("jmp %*%0", xops);
2504 emit_jump_insn (gen_return_pop_internal (xops[1]));
2506 output_asm_insn ("ret %1", xops);
2512 emit_jump_insn (gen_return_internal ());
2514 output_asm_insn ("ret", xops);
2518 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2519 that is a valid memory address for an instruction.
2520 The MODE argument is the machine mode for the MEM expression
2521 that wants to use this address.
2523 On x86, legitimate addresses are:
2524 base movl (base),reg
2525 displacement movl disp,reg
2526 base + displacement movl disp(base),reg
2527 index + base movl (base,index),reg
2528 (index + base) + displacement movl disp(base,index),reg
2529 index*scale movl (,index,scale),reg
2530 index*scale + disp movl disp(,index,scale),reg
2531 index*scale + base movl (base,index,scale),reg
2532 (index*scale + base) + disp movl disp(base,index,scale),reg
2534 In each case, scale can be 1, 2, 4, 8. */
2536 /* This is exactly the same as print_operand_addr, except that
2537 it recognizes addresses instead of printing them.
2539 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2540 convert common non-canonical forms to canonical form so that they will
2543 #define ADDR_INVALID(msg,insn) \
2545 if (TARGET_DEBUG_ADDR) \
2547 fprintf (stderr, msg); \
2553 legitimate_pic_address_disp_p (disp)
2556 if (GET_CODE (disp) != CONST)
2558 disp = XEXP (disp, 0);
2560 if (GET_CODE (disp) == PLUS)
2562 if (GET_CODE (XEXP (disp, 1)) != CONST_INT)
2564 disp = XEXP (disp, 0);
2567 if (GET_CODE (disp) != UNSPEC
2568 || XVECLEN (disp, 0) != 1)
2571 /* Must be @GOT or @GOTOFF. */
2572 if (XINT (disp, 1) != 6
2573 && XINT (disp, 1) != 7)
2576 if (GET_CODE (XVECEXP (disp, 0, 0)) != SYMBOL_REF
2577 && GET_CODE (XVECEXP (disp, 0, 0)) != LABEL_REF)
2584 legitimate_address_p (mode, addr, strict)
2585 enum machine_mode mode;
2589 rtx base = NULL_RTX;
2590 rtx indx = NULL_RTX;
2591 rtx scale = NULL_RTX;
2592 rtx disp = NULL_RTX;
2594 if (TARGET_DEBUG_ADDR)
2597 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2598 GET_MODE_NAME (mode), strict);
2603 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2606 else if (GET_CODE (addr) == PLUS)
2608 rtx op0 = XEXP (addr, 0);
2609 rtx op1 = XEXP (addr, 1);
2610 enum rtx_code code0 = GET_CODE (op0);
2611 enum rtx_code code1 = GET_CODE (op1);
2613 if (code0 == REG || code0 == SUBREG)
2615 if (code1 == REG || code1 == SUBREG)
2617 indx = op0; /* index + base */
2623 base = op0; /* base + displacement */
2628 else if (code0 == MULT)
2630 indx = XEXP (op0, 0);
2631 scale = XEXP (op0, 1);
2633 if (code1 == REG || code1 == SUBREG)
2634 base = op1; /* index*scale + base */
2637 disp = op1; /* index*scale + disp */
2640 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2642 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2643 scale = XEXP (XEXP (op0, 0), 1);
2644 base = XEXP (op0, 1);
2648 else if (code0 == PLUS)
2650 indx = XEXP (op0, 0); /* index + base + disp */
2651 base = XEXP (op0, 1);
2657 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2662 else if (GET_CODE (addr) == MULT)
2664 indx = XEXP (addr, 0); /* index*scale */
2665 scale = XEXP (addr, 1);
2669 disp = addr; /* displacement */
2671 /* Allow arg pointer and stack pointer as index if there is not scaling */
2672 if (base && indx && !scale
2673 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2680 /* Validate base register:
2682 Don't allow SUBREG's here, it can lead to spill failures when the base
2683 is one word out of a two word structure, which is represented internally
2688 if (GET_CODE (base) != REG)
2690 ADDR_INVALID ("Base is not a register.\n", base);
2694 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2695 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2697 ADDR_INVALID ("Base is not valid.\n", base);
2702 /* Validate index register:
2704 Don't allow SUBREG's here, it can lead to spill failures when the index
2705 is one word out of a two word structure, which is represented internally
2709 if (GET_CODE (indx) != REG)
2711 ADDR_INVALID ("Index is not a register.\n", indx);
2715 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2716 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2718 ADDR_INVALID ("Index is not valid.\n", indx);
2723 abort (); /* scale w/o index invalid */
2725 /* Validate scale factor: */
2728 HOST_WIDE_INT value;
2730 if (GET_CODE (scale) != CONST_INT)
2732 ADDR_INVALID ("Scale is not valid.\n", scale);
2736 value = INTVAL (scale);
2737 if (value != 1 && value != 2 && value != 4 && value != 8)
2739 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2744 /* Validate displacement. */
2747 if (!CONSTANT_ADDRESS_P (disp))
2749 ADDR_INVALID ("Displacement is not valid.\n", disp);
2753 else if (GET_CODE (disp) == CONST_DOUBLE)
2755 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2759 if (flag_pic && SYMBOLIC_CONST (disp))
2761 if (! legitimate_pic_address_disp_p (disp))
2763 ADDR_INVALID ("Displacement is an invalid PIC construct.\n",
2768 if (base != pic_offset_table_rtx
2769 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2771 ADDR_INVALID ("PIC displacement against invalid base.\n", disp);
2776 else if (HALF_PIC_P ())
2778 if (! HALF_PIC_ADDRESS_P (disp)
2779 || (base != NULL_RTX || indx != NULL_RTX))
2781 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2788 if (TARGET_DEBUG_ADDR)
2789 fprintf (stderr, "Address is valid.\n");
2791 /* Everything looks valid, return true */
2795 /* Return a legitimate reference for ORIG (an address) using the
2796 register REG. If REG is 0, a new pseudo is generated.
2798 There are two types of references that must be handled:
2800 1. Global data references must load the address from the GOT, via
2801 the PIC reg. An insn is emitted to do this load, and the reg is
2804 2. Static data references, constant pool addresses, and code labels
2805 compute the address as an offset from the GOT, whose base is in
2806 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
2807 differentiate them from global data objects. The returned
2808 address is the PIC reg + an unspec constant.
2810 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2811 reg also appears in the address. */
2814 legitimize_pic_address (orig, reg)
2822 if (GET_CODE (addr) == LABEL_REF
2823 || (GET_CODE (addr) == SYMBOL_REF
2824 && (CONSTANT_POOL_ADDRESS_P (addr)
2825 || SYMBOL_REF_FLAG (addr))))
2827 /* This symbol may be referenced via a displacement from the PIC
2828 base address (@GOTOFF). */
2830 current_function_uses_pic_offset_table = 1;
2831 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 7);
2832 new = gen_rtx_CONST (VOIDmode, new);
2833 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2837 emit_move_insn (reg, new);
2841 else if (GET_CODE (addr) == SYMBOL_REF)
2843 /* This symbol must be referenced via a load from the
2844 Global Offset Table (@GOT). */
2846 current_function_uses_pic_offset_table = 1;
2847 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, addr), 6);
2848 new = gen_rtx_CONST (VOIDmode, new);
2849 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2850 new = gen_rtx_MEM (Pmode, new);
2851 RTX_UNCHANGING_P (new) = 1;
2854 reg = gen_reg_rtx (Pmode);
2855 emit_move_insn (reg, new);
2860 if (GET_CODE (addr) == CONST)
2862 addr = XEXP (addr, 0);
2863 if (GET_CODE (addr) == UNSPEC)
2865 /* Check that the unspec is one of the ones we generate? */
2867 else if (GET_CODE (addr) != PLUS)
2870 if (GET_CODE (addr) == PLUS)
2872 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2874 /* Check first to see if this is a constant offset from a @GOTOFF
2875 symbol reference. */
2876 if ((GET_CODE (op0) == LABEL_REF
2877 || (GET_CODE (op0) == SYMBOL_REF
2878 && (CONSTANT_POOL_ADDRESS_P (op0)
2879 || SYMBOL_REF_FLAG (op0))))
2880 && GET_CODE (op1) == CONST_INT)
2882 current_function_uses_pic_offset_table = 1;
2883 new = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, op0), 7);
2884 new = gen_rtx_PLUS (VOIDmode, new, op1);
2885 new = gen_rtx_CONST (VOIDmode, new);
2886 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2890 emit_move_insn (reg, new);
2896 base = legitimize_pic_address (XEXP (addr, 0), reg);
2897 new = legitimize_pic_address (XEXP (addr, 1),
2898 base == reg ? NULL_RTX : reg);
2900 if (GET_CODE (new) == CONST_INT)
2901 new = plus_constant (base, INTVAL (new));
2904 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2906 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2907 new = XEXP (new, 1);
2909 new = gen_rtx_PLUS (Pmode, base, new);
2917 /* Emit insns to move operands[1] into operands[0]. */
2920 emit_pic_move (operands, mode)
2922 enum machine_mode mode ATTRIBUTE_UNUSED;
2924 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2926 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2927 operands[1] = force_reg (Pmode, operands[1]);
2929 operands[1] = legitimize_pic_address (operands[1], temp);
2932 /* Try machine-dependent ways of modifying an illegitimate address
2933 to be legitimate. If we find one, return the new, valid address.
2934 This macro is used in only one place: `memory_address' in explow.c.
2936 OLDX is the address as it was before break_out_memory_refs was called.
2937 In some cases it is useful to look at this to decide what needs to be done.
2939 MODE and WIN are passed so that this macro can use
2940 GO_IF_LEGITIMATE_ADDRESS.
2942 It is always safe for this macro to do nothing. It exists to recognize
2943 opportunities to optimize the output.
2945 For the 80386, we handle X+REG by loading X into a register R and
2946 using R+REG. R will go in a general reg and indexing will be used.
2947 However, if REG is a broken-out memory address or multiplication,
2948 nothing needs to be done because REG can certainly go in a general reg.
2950 When -fpic is used, special handling is needed for symbolic references.
2951 See comments by legitimize_pic_address in i386.c for details. */
2954 legitimize_address (x, oldx, mode)
2956 register rtx oldx ATTRIBUTE_UNUSED;
2957 enum machine_mode mode;
2962 if (TARGET_DEBUG_ADDR)
2964 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2965 GET_MODE_NAME (mode));
2969 if (flag_pic && SYMBOLIC_CONST (x))
2970 return legitimize_pic_address (x, 0);
2972 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2973 if (GET_CODE (x) == ASHIFT
2974 && GET_CODE (XEXP (x, 1)) == CONST_INT
2975 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2978 x = gen_rtx_MULT (Pmode, force_reg (Pmode, XEXP (x, 0)),
2979 GEN_INT (1 << log));
2982 if (GET_CODE (x) == PLUS)
2984 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2986 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2987 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2988 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2991 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2992 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2993 GEN_INT (1 << log));
2996 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2997 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2998 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
3001 XEXP (x, 1) = gen_rtx (MULT, Pmode,
3002 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
3003 GEN_INT (1 << log));
3006 /* Put multiply first if it isn't already. */
3007 if (GET_CODE (XEXP (x, 1)) == MULT)
3009 rtx tmp = XEXP (x, 0);
3010 XEXP (x, 0) = XEXP (x, 1);
3015 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
3016 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
3017 created by virtual register instantiation, register elimination, and
3018 similar optimizations. */
3019 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
3022 x = gen_rtx (PLUS, Pmode,
3023 gen_rtx (PLUS, Pmode, XEXP (x, 0),
3024 XEXP (XEXP (x, 1), 0)),
3025 XEXP (XEXP (x, 1), 1));
3029 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
3030 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
3031 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
3032 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
3033 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
3034 && CONSTANT_P (XEXP (x, 1)))
3037 rtx other = NULL_RTX;
3039 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3041 constant = XEXP (x, 1);
3042 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
3044 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
3046 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
3047 other = XEXP (x, 1);
3055 x = gen_rtx (PLUS, Pmode,
3056 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
3057 XEXP (XEXP (XEXP (x, 0), 1), 0)),
3058 plus_constant (other, INTVAL (constant)));
3062 if (changed && legitimate_address_p (mode, x, FALSE))
3065 if (GET_CODE (XEXP (x, 0)) == MULT)
3068 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
3071 if (GET_CODE (XEXP (x, 1)) == MULT)
3074 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
3078 && GET_CODE (XEXP (x, 1)) == REG
3079 && GET_CODE (XEXP (x, 0)) == REG)
3082 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
3085 x = legitimize_pic_address (x, 0);
3088 if (changed && legitimate_address_p (mode, x, FALSE))
3091 if (GET_CODE (XEXP (x, 0)) == REG)
3093 register rtx temp = gen_reg_rtx (Pmode);
3094 register rtx val = force_operand (XEXP (x, 1), temp);
3096 emit_move_insn (temp, val);
3102 else if (GET_CODE (XEXP (x, 1)) == REG)
3104 register rtx temp = gen_reg_rtx (Pmode);
3105 register rtx val = force_operand (XEXP (x, 0), temp);
3107 emit_move_insn (temp, val);
3117 /* Print an integer constant expression in assembler syntax. Addition
3118 and subtraction are the only arithmetic that may appear in these
3119 expressions. FILE is the stdio stream to write to, X is the rtx, and
3120 CODE is the operand print code from the output string. */
3123 output_pic_addr_const (file, x, code)
3130 switch (GET_CODE (x))
3140 assemble_name (file, XSTR (x, 0));
3141 if (code == 'P' && ! SYMBOL_REF_FLAG (x))
3142 fputs ("@PLT", file);
3149 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3150 assemble_name (asm_out_file, buf);
3154 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3158 /* This used to output parentheses around the expression,
3159 but that does not work on the 386 (either ATT or BSD assembler). */
3160 output_pic_addr_const (file, XEXP (x, 0), code);
3164 if (GET_MODE (x) == VOIDmode)
3166 /* We can use %d if the number is <32 bits and positive. */
3167 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3168 fprintf (file, "0x%lx%08lx",
3169 (unsigned long) CONST_DOUBLE_HIGH (x),
3170 (unsigned long) CONST_DOUBLE_LOW (x));
3172 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3175 /* We can't handle floating point constants;
3176 PRINT_OPERAND must handle them. */
3177 output_operand_lossage ("floating constant misused");
3181 /* Some assemblers need integer constants to appear first. */
3182 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3184 output_pic_addr_const (file, XEXP (x, 0), code);
3185 fprintf (file, "+");
3186 output_pic_addr_const (file, XEXP (x, 1), code);
3188 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3190 output_pic_addr_const (file, XEXP (x, 1), code);
3191 fprintf (file, "+");
3192 output_pic_addr_const (file, XEXP (x, 0), code);
3199 output_pic_addr_const (file, XEXP (x, 0), code);
3200 fprintf (file, "-");
3201 output_pic_addr_const (file, XEXP (x, 1), code);
3205 if (XVECLEN (x, 0) != 1)
3207 output_pic_addr_const (file, XVECEXP (x, 0, 0), code);
3208 switch (XINT (x, 1))
3211 fputs ("@GOT", file);
3214 fputs ("@GOTOFF", file);
3217 fputs ("@PLT", file);
3220 output_operand_lossage ("invalid UNSPEC as operand");
3226 output_operand_lossage ("invalid expression as operand");
3231 put_jump_code (code, reverse, file)
3236 int flags = cc_prev_status.flags;
3237 int ieee = (TARGET_IEEE_FP && (flags & CC_IN_80387));
3240 if (flags & CC_Z_IN_NOT_C)
3244 fputs (reverse ? "c" : "nc", file);
3248 fputs (reverse ? "nc" : "c", file);
3259 suffix = reverse ? "ae" : "b";
3264 suffix = reverse ? "ne" : "e";
3267 suffix = reverse ? "ne" : "e";
3270 suffix = reverse ? "e" : "ne";
3275 fputs (suffix, file);
3278 if (flags & CC_TEST_AX)
3280 if ((flags & CC_NO_OVERFLOW) && (code == LE || code == GT))
3283 code = reverse_condition (code);
3295 suffix = flags & CC_IN_80387 ? "a" : "g";
3303 if (flags & CC_NO_OVERFLOW)
3306 suffix = flags & CC_IN_80387 ? "b" : "l";
3314 if (flags & CC_NO_OVERFLOW)
3317 suffix = flags & CC_IN_80387 ? "ae" : "ge";
3325 suffix = flags & CC_IN_80387 ? "be" : "le";
3335 fputs (suffix, file);
3338 /* Append the correct conditional move suffix which corresponds to CODE. */
3341 put_condition_code (code, reverse_cc, mode, file)
3344 enum mode_class mode;
3347 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3348 && ! (cc_prev_status.flags & CC_FCOMI));
3349 if (reverse_cc && ! ieee)
3350 code = reverse_condition (code);
3352 if (mode == MODE_INT)
3356 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3363 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3370 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3385 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3408 output_operand_lossage ("Invalid %%C operand");
3411 else if (mode == MODE_FLOAT)
3415 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3418 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3421 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3424 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3427 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3430 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3433 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3436 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3439 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3442 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3445 output_operand_lossage ("Invalid %%C operand");
3450 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3451 C -- print opcode suffix for set/cmov insn.
3452 c -- like C, but print reversed condition
3453 F -- print opcode suffix for fcmov insn.
3454 f -- like F, but print reversed condition
3455 D -- print the opcode suffix for a jump
3456 d -- like D, but print reversed condition
3457 R -- print the prefix for register names.
3458 z -- print the opcode suffix for the size of the current operand.
3459 * -- print a star (in certain assembler syntax)
3460 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3461 J -- print the appropriate jump operand.
3462 s -- print a shift double count, followed by the assemblers argument
3464 b -- print the QImode name of the register for the indicated operand.
3465 %b0 would print %al if operands[0] is reg 0.
3466 w -- likewise, print the HImode name of the register.
3467 k -- likewise, print the SImode name of the register.
3468 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3469 y -- print "st(0)" instead of "st" as a register.
3470 P -- print as a PIC constant */
3473 print_operand (file, x, code)
3488 PUT_OP_SIZE (code, 'l', file);
3492 PUT_OP_SIZE (code, 'w', file);
3496 PUT_OP_SIZE (code, 'b', file);
3500 PUT_OP_SIZE (code, 'l', file);
3504 PUT_OP_SIZE (code, 's', file);
3508 PUT_OP_SIZE (code, 't', file);
3512 /* 387 opcodes don't get size suffixes if the operands are
3515 if (STACK_REG_P (x))
3518 /* this is the size of op from size of operand */
3519 switch (GET_MODE_SIZE (GET_MODE (x)))
3522 PUT_OP_SIZE ('B', 'b', file);
3526 PUT_OP_SIZE ('W', 'w', file);
3530 if (GET_MODE (x) == SFmode)
3532 PUT_OP_SIZE ('S', 's', file);
3536 PUT_OP_SIZE ('L', 'l', file);
3540 PUT_OP_SIZE ('T', 't', file);
3544 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3546 #ifdef GAS_MNEMONICS
3547 PUT_OP_SIZE ('Q', 'q', file);
3550 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3554 PUT_OP_SIZE ('Q', 'l', file);
3568 switch (GET_CODE (x))
3570 /* These conditions are appropriate for testing the result
3571 of an arithmetic operation, not for a compare operation.
3572 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3573 CC_Z_IN_NOT_C false and not floating point. */
3574 case NE: fputs ("jne", file); return;
3575 case EQ: fputs ("je", file); return;
3576 case GE: fputs ("jns", file); return;
3577 case LT: fputs ("js", file); return;
3578 case GEU: fputs ("jmp", file); return;
3579 case GTU: fputs ("jne", file); return;
3580 case LEU: fputs ("je", file); return;
3581 case LTU: fputs ("#branch never", file); return;
3583 /* no matching branches for GT nor LE */
3590 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3592 PRINT_OPERAND (file, x, 0);
3593 fputs (AS2C (,) + 1, file);
3599 put_jump_code (GET_CODE (x), 0, file);
3603 put_jump_code (GET_CODE (x), 1, file);
3606 /* This is used by the conditional move instructions. */
3608 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3611 /* Like above, but reverse condition */
3613 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3616 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3619 /* Like above, but reverse condition */
3621 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3628 sprintf (str, "invalid operand code `%c'", code);
3629 output_operand_lossage (str);
3634 if (GET_CODE (x) == REG)
3636 PRINT_REG (x, code, file);
3639 else if (GET_CODE (x) == MEM)
3641 PRINT_PTR (x, file);
3642 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3645 output_pic_addr_const (file, XEXP (x, 0), code);
3647 output_addr_const (file, XEXP (x, 0));
3650 output_address (XEXP (x, 0));
3653 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3658 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3659 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3660 PRINT_IMMED_PREFIX (file);
3661 fprintf (file, "0x%lx", l);
3664 /* These float cases don't actually occur as immediate operands. */
3665 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3670 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3671 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3672 fprintf (file, "%s", dstr);
3675 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3680 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3681 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3682 fprintf (file, "%s", dstr);
3688 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3689 PRINT_IMMED_PREFIX (file);
3690 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3691 || GET_CODE (x) == LABEL_REF)
3692 PRINT_OFFSET_PREFIX (file);
3695 output_pic_addr_const (file, x, code);
3697 output_addr_const (file, x);
3701 /* Print a memory operand whose address is ADDR. */
3704 print_operand_address (file, addr)
3708 register rtx reg1, reg2, breg, ireg;
3711 switch (GET_CODE (addr))
3715 fprintf (file, "%se", RP);
3716 fputs (hi_reg_name[REGNO (addr)], file);
3726 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3728 offset = XEXP (addr, 0);
3729 addr = XEXP (addr, 1);
3731 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3733 offset = XEXP (addr, 1);
3734 addr = XEXP (addr, 0);
3737 if (GET_CODE (addr) != PLUS)
3739 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3740 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3741 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3742 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3743 else if (GET_CODE (XEXP (addr, 0)) == REG)
3744 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3745 else if (GET_CODE (XEXP (addr, 1)) == REG)
3746 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3748 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3765 if ((reg1 && GET_CODE (reg1) == MULT)
3766 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3771 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3777 if (ireg != 0 || breg != 0)
3784 output_pic_addr_const (file, addr, 0);
3785 else if (GET_CODE (addr) == LABEL_REF)
3786 output_asm_label (addr);
3788 output_addr_const (file, addr);
3791 if (ireg != 0 && GET_CODE (ireg) == MULT)
3793 scale = INTVAL (XEXP (ireg, 1));
3794 ireg = XEXP (ireg, 0);
3797 /* The stack pointer can only appear as a base register,
3798 never an index register, so exchange the regs if it is wrong. */
3800 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3809 /* output breg+ireg*scale */
3810 PRINT_B_I_S (breg, ireg, scale, file);
3818 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3820 scale = INTVAL (XEXP (addr, 0));
3821 ireg = XEXP (addr, 1);
3825 scale = INTVAL (XEXP (addr, 1));
3826 ireg = XEXP (addr, 0);
3829 output_addr_const (file, const0_rtx);
3830 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3835 if (GET_CODE (addr) == CONST_INT
3836 && INTVAL (addr) < 0x8000
3837 && INTVAL (addr) >= -0x8000)
3838 fprintf (file, "%d", (int) INTVAL (addr));
3842 output_pic_addr_const (file, addr, 0);
3844 output_addr_const (file, addr);
3849 /* Set the cc_status for the results of an insn whose pattern is EXP.
3850 On the 80386, we assume that only test and compare insns, as well
3851 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3852 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3853 Also, we assume that jumps, moves and sCOND don't affect the condition
3854 codes. All else clobbers the condition codes, by assumption.
3856 We assume that ALL integer add, minus, etc. instructions effect the
3857 condition codes. This MUST be consistent with i386.md.
3859 We don't record any float test or compare - the redundant test &
3860 compare check in final.c does not handle stack-like regs correctly. */
3863 notice_update_cc (exp)
3866 if (GET_CODE (exp) == SET)
3868 /* Jumps do not alter the cc's. */
3869 if (SET_DEST (exp) == pc_rtx)
3872 /* Moving register or memory into a register:
3873 it doesn't alter the cc's, but it might invalidate
3874 the RTX's which we remember the cc's came from.
3875 (Note that moving a constant 0 or 1 MAY set the cc's). */
3876 if (REG_P (SET_DEST (exp))
3877 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3878 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3879 || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3880 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3882 if (cc_status.value1
3883 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3884 cc_status.value1 = 0;
3886 if (cc_status.value2
3887 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3888 cc_status.value2 = 0;
3893 /* Moving register into memory doesn't alter the cc's.
3894 It may invalidate the RTX's which we remember the cc's came from. */
3895 if (GET_CODE (SET_DEST (exp)) == MEM
3896 && (REG_P (SET_SRC (exp))
3897 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3899 if (cc_status.value1
3900 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3901 cc_status.value1 = 0;
3902 if (cc_status.value2
3903 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3904 cc_status.value2 = 0;
3909 /* Function calls clobber the cc's. */
3910 else if (GET_CODE (SET_SRC (exp)) == CALL)
3916 /* Tests and compares set the cc's in predictable ways. */
3917 else if (SET_DEST (exp) == cc0_rtx)
3920 cc_status.value1 = SET_SRC (exp);
3924 /* Certain instructions effect the condition codes. */
3925 else if (GET_MODE (SET_SRC (exp)) == SImode
3926 || GET_MODE (SET_SRC (exp)) == HImode
3927 || GET_MODE (SET_SRC (exp)) == QImode)
3928 switch (GET_CODE (SET_SRC (exp)))
3930 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3931 /* Shifts on the 386 don't set the condition codes if the
3932 shift count is zero. */
3933 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3939 /* We assume that the CONST_INT is non-zero (this rtx would
3940 have been deleted if it were zero. */
3942 case PLUS: case MINUS: case NEG:
3943 case AND: case IOR: case XOR:
3944 cc_status.flags = CC_NO_OVERFLOW;
3945 cc_status.value1 = SET_SRC (exp);
3946 cc_status.value2 = SET_DEST (exp);
3949 /* This is the bsf pattern used by ffs. */
3951 if (XINT (SET_SRC (exp), 1) == 5)
3953 /* Only the Z flag is defined after bsf. */
3955 = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3956 cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3957 cc_status.value2 = 0;
3970 else if (GET_CODE (exp) == PARALLEL
3971 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3973 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3975 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3979 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3981 cc_status.flags |= CC_IN_80387;
3982 if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3983 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3984 cc_status.flags |= CC_FCOMI;
3987 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3999 /* Split one or more DImode RTL references into pairs of SImode
4000 references. The RTL can be REG, offsettable MEM, integer constant, or
4001 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
4002 split and "num" is its length. lo_half and hi_half are output arrays
4003 that parallel "operands". */
4006 split_di (operands, num, lo_half, hi_half)
4009 rtx lo_half[], hi_half[];
4013 rtx op = operands[num];
4014 if (! reload_completed)
4016 lo_half[num] = gen_lowpart (SImode, op);
4017 hi_half[num] = gen_highpart (SImode, op);
4019 else if (GET_CODE (op) == REG)
4021 lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
4022 hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
4024 else if (CONSTANT_P (op))
4025 split_double (op, &lo_half[num], &hi_half[num]);
4026 else if (offsettable_memref_p (op))
4028 rtx lo_addr = XEXP (op, 0);
4029 rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
4030 lo_half[num] = change_address (op, SImode, lo_addr);
4031 hi_half[num] = change_address (op, SImode, hi_addr);
4038 /* Return 1 if this is a valid binary operation on a 387.
4039 OP is the expression matched, and MODE is its mode. */
4042 binary_387_op (op, mode)
4044 enum machine_mode mode;
4046 if (mode != VOIDmode && mode != GET_MODE (op))
4049 switch (GET_CODE (op))
4055 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
4062 /* Return 1 if this is a valid shift or rotate operation on a 386.
4063 OP is the expression matched, and MODE is its mode. */
4068 enum machine_mode mode;
4070 rtx operand = XEXP (op, 0);
4072 if (mode != VOIDmode && mode != GET_MODE (op))
4075 if (GET_MODE (operand) != GET_MODE (op)
4076 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
4079 return (GET_CODE (op) == ASHIFT
4080 || GET_CODE (op) == ASHIFTRT
4081 || GET_CODE (op) == LSHIFTRT
4082 || GET_CODE (op) == ROTATE
4083 || GET_CODE (op) == ROTATERT);
4086 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
4087 MODE is not used. */
4090 VOIDmode_compare_op (op, mode)
4092 enum machine_mode mode ATTRIBUTE_UNUSED;
4094 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
4097 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
4098 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
4099 is the expression of the binary operation. The output may either be
4100 emitted here, or returned to the caller, like all output_* functions.
4102 There is no guarantee that the operands are the same mode, as they
4103 might be within FLOAT or FLOAT_EXTEND expressions. */
4106 output_387_binary_op (insn, operands)
4112 static char buf[100];
4114 switch (GET_CODE (operands[3]))
4117 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4118 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4125 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4126 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4133 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4134 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4141 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
4142 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
4152 strcpy (buf, base_op);
4154 switch (GET_CODE (operands[3]))
4158 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
4161 operands[2] = operands[1];
4165 if (GET_CODE (operands[2]) == MEM)
4166 return strcat (buf, AS1 (%z2,%2));
4168 if (NON_STACK_REG_P (operands[1]))
4170 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4174 else if (NON_STACK_REG_P (operands[2]))
4176 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4180 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4182 if (STACK_TOP_P (operands[0]))
4183 return strcat (buf, AS2 (p,%0,%2));
4185 return strcat (buf, AS2 (p,%2,%0));
4188 if (STACK_TOP_P (operands[0]))
4189 return strcat (buf, AS2C (%y2,%0));
4191 return strcat (buf, AS2C (%2,%0));
4195 if (GET_CODE (operands[1]) == MEM)
4196 return strcat (buf, AS1 (r%z1,%1));
4198 if (GET_CODE (operands[2]) == MEM)
4199 return strcat (buf, AS1 (%z2,%2));
4201 if (NON_STACK_REG_P (operands[1]))
4203 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
4207 else if (NON_STACK_REG_P (operands[2]))
4209 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
4213 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
4216 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
4218 if (STACK_TOP_P (operands[0]))
4219 return strcat (buf, AS2 (p,%0,%2));
4221 return strcat (buf, AS2 (rp,%2,%0));
4224 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
4226 if (STACK_TOP_P (operands[0]))
4227 return strcat (buf, AS2 (rp,%0,%1));
4229 return strcat (buf, AS2 (p,%1,%0));
4232 if (STACK_TOP_P (operands[0]))
4234 if (STACK_TOP_P (operands[1]))
4235 return strcat (buf, AS2C (%y2,%0));
4237 return strcat (buf, AS2 (r,%y1,%0));
4239 else if (STACK_TOP_P (operands[1]))
4240 return strcat (buf, AS2C (%1,%0));
4242 return strcat (buf, AS2 (r,%2,%0));
4249 /* Output code for INSN to convert a float to a signed int. OPERANDS
4250 are the insn operands. The output may be SFmode or DFmode and the
4251 input operand may be SImode or DImode. As a special case, make sure
4252 that the 387 stack top dies if the output mode is DImode, because the
4253 hardware requires this. */
4256 output_fix_trunc (insn, operands)
4260 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4263 if (! STACK_TOP_P (operands[1]))
4266 xops[0] = GEN_INT (12);
4267 xops[1] = operands[4];
4269 output_asm_insn (AS1 (fnstc%W2,%2), operands);
4270 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
4271 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
4272 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
4273 output_asm_insn (AS1 (fldc%W3,%3), operands);
4275 if (NON_STACK_REG_P (operands[0]))
4276 output_to_reg (operands[0], stack_top_dies, operands[3]);
4278 else if (GET_CODE (operands[0]) == MEM)
4281 output_asm_insn (AS1 (fistp%z0,%0), operands);
4282 else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
4284 /* There is no DImode version of this without a stack pop, so
4285 we must emulate it. It doesn't matter much what the second
4286 instruction is, because the value being pushed on the FP stack
4287 is not used except for the following stack popping store.
4288 This case can only happen without optimization, so it doesn't
4289 matter that it is inefficient. */
4290 output_asm_insn (AS1 (fistp%z0,%0), operands);
4291 output_asm_insn (AS1 (fild%z0,%0), operands);
4294 output_asm_insn (AS1 (fist%z0,%0), operands);
4299 return AS1 (fldc%W2,%2);
4302 /* Output code for INSN to compare OPERANDS. The two operands might
4303 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4304 expression. If the compare is in mode CCFPEQmode, use an opcode that
4305 will not fault if a qNaN is present. */
4308 output_float_compare (insn, operands)
4313 rtx body = XVECEXP (PATTERN (insn), 0, 0);
4314 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4317 if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4319 cc_status.flags |= CC_FCOMI;
4320 cc_prev_status.flags &= ~CC_TEST_AX;
4323 if (! STACK_TOP_P (operands[0]))
4326 operands[0] = operands[1];
4328 cc_status.flags |= CC_REVERSED;
4331 if (! STACK_TOP_P (operands[0]))
4334 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4336 if (STACK_REG_P (operands[1])
4338 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4339 && REGNO (operands[1]) != FIRST_STACK_REG)
4341 /* If both the top of the 387 stack dies, and the other operand
4342 is also a stack register that dies, then this must be a
4343 `fcompp' float compare */
4345 if (unordered_compare)
4347 if (cc_status.flags & CC_FCOMI)
4349 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4350 output_asm_insn (AS1 (fstp, %y0), operands);
4354 output_asm_insn ("fucompp", operands);
4358 if (cc_status.flags & CC_FCOMI)
4360 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4361 output_asm_insn (AS1 (fstp, %y0), operands);
4365 output_asm_insn ("fcompp", operands);
4370 static char buf[100];
4372 /* Decide if this is the integer or float compare opcode, or the
4373 unordered float compare. */
4375 if (unordered_compare)
4376 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4377 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4378 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4380 strcpy (buf, "ficom");
4382 /* Modify the opcode if the 387 stack is to be popped. */
4387 if (NON_STACK_REG_P (operands[1]))
4388 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4389 else if (cc_status.flags & CC_FCOMI)
4391 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4395 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4398 /* Now retrieve the condition code. */
4400 return output_fp_cc0_set (insn);
4403 /* Output opcodes to transfer the results of FP compare or test INSN
4404 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4405 result of the compare or test is unordered, no comparison operator
4406 succeeds except NE. Return an output template, if any. */
4409 output_fp_cc0_set (insn)
4416 xops[0] = gen_rtx_REG (HImode, 0);
4417 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4419 if (! TARGET_IEEE_FP)
4421 if (!(cc_status.flags & CC_REVERSED))
4423 next = next_cc0_user (insn);
4425 if (GET_CODE (next) == JUMP_INSN
4426 && GET_CODE (PATTERN (next)) == SET
4427 && SET_DEST (PATTERN (next)) == pc_rtx
4428 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4429 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4430 else if (GET_CODE (PATTERN (next)) == SET)
4431 code = GET_CODE (SET_SRC (PATTERN (next)));
4435 if (code == GT || code == LT || code == EQ || code == NE
4436 || code == LE || code == GE)
4438 /* We will test eax directly. */
4439 cc_status.flags |= CC_TEST_AX;
4447 next = next_cc0_user (insn);
4448 if (next == NULL_RTX)
4451 if (GET_CODE (next) == JUMP_INSN
4452 && GET_CODE (PATTERN (next)) == SET
4453 && SET_DEST (PATTERN (next)) == pc_rtx
4454 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4455 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4456 else if (GET_CODE (PATTERN (next)) == SET)
4458 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4459 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4461 code = GET_CODE (SET_SRC (PATTERN (next)));
4464 else if (GET_CODE (PATTERN (next)) == PARALLEL
4465 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4467 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4468 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4470 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4475 xops[0] = gen_rtx_REG (QImode, 0);
4480 xops[1] = GEN_INT (0x45);
4481 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4486 xops[1] = GEN_INT (0x45);
4487 xops[2] = GEN_INT (0x01);
4488 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4489 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4494 xops[1] = GEN_INT (0x05);
4495 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4500 xops[1] = GEN_INT (0x45);
4501 xops[2] = GEN_INT (0x40);
4502 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4503 output_asm_insn (AS1 (dec%B0,%h0), xops);
4504 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4509 xops[1] = GEN_INT (0x45);
4510 xops[2] = GEN_INT (0x40);
4511 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4512 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4517 xops[1] = GEN_INT (0x44);
4518 xops[2] = GEN_INT (0x40);
4519 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4520 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4535 #define MAX_386_STACK_LOCALS 2
4537 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4539 /* Define the structure for the machine field in struct function. */
4540 struct machine_function
4542 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4544 char pic_label_name[256];
4547 /* Functions to save and restore i386_stack_locals.
4548 These will be called, via pointer variables,
4549 from push_function_context and pop_function_context. */
4552 save_386_machine_status (p)
4556 = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4557 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4558 sizeof i386_stack_locals);
4559 p->machine->pic_label_rtx = pic_label_rtx;
4560 bcopy (pic_label_name, p->machine->pic_label_name, 256);
4564 restore_386_machine_status (p)
4567 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4568 sizeof i386_stack_locals);
4569 pic_label_rtx = p->machine->pic_label_rtx;
4570 bcopy (p->machine->pic_label_name, pic_label_name, 256);
4575 /* Clear stack slot assignments remembered from previous functions.
4576 This is called from INIT_EXPANDERS once before RTL is emitted for each
4580 clear_386_stack_locals ()
4582 enum machine_mode mode;
4585 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4586 mode = (enum machine_mode) ((int) mode + 1))
4587 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4588 i386_stack_locals[(int) mode][n] = NULL_RTX;
4590 pic_label_rtx = NULL_RTX;
4591 bzero (pic_label_name, 256);
4592 /* Arrange to save and restore i386_stack_locals around nested functions. */
4593 save_machine_status = save_386_machine_status;
4594 restore_machine_status = restore_386_machine_status;
4597 /* Return a MEM corresponding to a stack slot with mode MODE.
4598 Allocate a new slot if necessary.
4600 The RTL for a function can have several slots available: N is
4601 which slot to use. */
4604 assign_386_stack_local (mode, n)
4605 enum machine_mode mode;
4608 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4611 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4612 i386_stack_locals[(int) mode][n]
4613 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4615 return i386_stack_locals[(int) mode][n];
4620 enum machine_mode mode ATTRIBUTE_UNUSED;
4622 return (GET_CODE (op) == MULT);
4627 enum machine_mode mode ATTRIBUTE_UNUSED;
4629 return (GET_CODE (op) == DIV);
4633 /* Create a new copy of an rtx.
4634 Recursively copies the operands of the rtx,
4635 except for those few rtx codes that are sharable.
4636 Doesn't share CONST */
4644 register RTX_CODE code;
4645 register char *format_ptr;
4647 code = GET_CODE (orig);
4660 /* SCRATCH must be shared because they represent distinct values. */
4665 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4666 a LABEL_REF, it isn't sharable. */
4667 if (GET_CODE (XEXP (orig, 0)) == PLUS
4668 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4669 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4673 /* A MEM with a constant address is not sharable. The problem is that
4674 the constant address may need to be reloaded. If the mem is shared,
4675 then reloading one copy of this mem will cause all copies to appear
4676 to have been reloaded. */
4679 copy = rtx_alloc (code);
4680 PUT_MODE (copy, GET_MODE (orig));
4681 copy->in_struct = orig->in_struct;
4682 copy->volatil = orig->volatil;
4683 copy->unchanging = orig->unchanging;
4684 copy->integrated = orig->integrated;
4686 copy->is_spill_rtx = orig->is_spill_rtx;
4688 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4690 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4692 switch (*format_ptr++)
4695 XEXP (copy, i) = XEXP (orig, i);
4696 if (XEXP (orig, i) != NULL)
4697 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4702 XEXP (copy, i) = XEXP (orig, i);
4707 XVEC (copy, i) = XVEC (orig, i);
4708 if (XVEC (orig, i) != NULL)
4710 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4711 for (j = 0; j < XVECLEN (copy, i); j++)
4712 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4717 XWINT (copy, i) = XWINT (orig, i);
4721 XINT (copy, i) = XINT (orig, i);
4726 XSTR (copy, i) = XSTR (orig, i);
4737 /* Try to rewrite a memory address to make it valid */
4740 rewrite_address (mem_rtx)
4743 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4745 int offset_adjust = 0;
4746 int was_only_offset = 0;
4747 rtx mem_addr = XEXP (mem_rtx, 0);
4748 char *storage = oballoc (0);
4750 int is_spill_rtx = 0;
4752 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4753 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4755 if (GET_CODE (mem_addr) == PLUS
4756 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4757 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4759 /* This part is utilized by the combiner. */
4761 = gen_rtx (PLUS, GET_MODE (mem_addr),
4762 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4763 XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4764 XEXP (XEXP (mem_addr, 1), 1));
4766 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4768 XEXP (mem_rtx, 0) = ret_rtx;
4769 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4776 /* This part is utilized by loop.c.
4777 If the address contains PLUS (reg,const) and this pattern is invalid
4778 in this case - try to rewrite the address to make it valid. */
4779 storage = oballoc (0);
4780 index_rtx = base_rtx = offset_rtx = NULL;
4782 /* Find the base index and offset elements of the memory address. */
4783 if (GET_CODE (mem_addr) == PLUS)
4785 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4787 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4788 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4790 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4793 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4795 index_rtx = XEXP (mem_addr, 0);
4796 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4797 base_rtx = XEXP (mem_addr, 1);
4799 offset_rtx = XEXP (mem_addr, 1);
4802 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4804 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4805 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4806 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4808 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4810 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4812 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4813 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4815 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4816 offset_rtx = XEXP (mem_addr, 1);
4817 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4818 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4822 offset_rtx = XEXP (mem_addr, 1);
4823 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4824 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4828 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4830 was_only_offset = 1;
4833 offset_rtx = XEXP (mem_addr, 1);
4834 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4835 if (offset_adjust == 0)
4837 XEXP (mem_rtx, 0) = offset_rtx;
4838 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4848 else if (GET_CODE (mem_addr) == MULT)
4849 index_rtx = mem_addr;
4856 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4858 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4864 scale_rtx = XEXP (index_rtx, 1);
4865 scale = INTVAL (scale_rtx);
4866 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4869 /* Now find which of the elements are invalid and try to fix them. */
4870 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4872 offset_adjust = INTVAL (index_rtx) * scale;
4874 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4875 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4876 else if (offset_rtx == 0)
4877 offset_rtx = const0_rtx;
4879 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4880 XEXP (mem_rtx, 0) = offset_rtx;
4884 if (base_rtx && GET_CODE (base_rtx) == PLUS
4885 && GET_CODE (XEXP (base_rtx, 0)) == REG
4886 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4888 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4889 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4892 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4894 offset_adjust += INTVAL (base_rtx);
4898 if (index_rtx && GET_CODE (index_rtx) == PLUS
4899 && GET_CODE (XEXP (index_rtx, 0)) == REG
4900 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4902 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4903 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4908 if (! LEGITIMATE_INDEX_P (index_rtx)
4909 && ! (index_rtx == stack_pointer_rtx && scale == 1
4910 && base_rtx == NULL))
4919 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4926 if (offset_adjust != 0)
4928 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4929 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4931 offset_rtx = const0_rtx;
4939 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4940 gen_rtx (MULT, GET_MODE (index_rtx),
4941 index_rtx, scale_rtx),
4944 if (GET_CODE (offset_rtx) != CONST_INT
4945 || INTVAL (offset_rtx) != 0)
4946 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4947 ret_rtx, offset_rtx);
4951 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4952 index_rtx, base_rtx);
4954 if (GET_CODE (offset_rtx) != CONST_INT
4955 || INTVAL (offset_rtx) != 0)
4956 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4957 ret_rtx, offset_rtx);
4964 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4965 index_rtx, scale_rtx);
4967 if (GET_CODE (offset_rtx) != CONST_INT
4968 || INTVAL (offset_rtx) != 0)
4969 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4970 ret_rtx, offset_rtx);
4974 if (GET_CODE (offset_rtx) == CONST_INT
4975 && INTVAL (offset_rtx) == 0)
4976 ret_rtx = index_rtx;
4978 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4979 index_rtx, offset_rtx);
4987 if (GET_CODE (offset_rtx) == CONST_INT
4988 && INTVAL (offset_rtx) == 0)
4991 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4994 else if (was_only_offset)
4995 ret_rtx = offset_rtx;
5003 XEXP (mem_rtx, 0) = ret_rtx;
5004 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
5015 /* Return 1 if the first insn to set cc before INSN also sets the register
5016 REG_RTX; otherwise return 0. */
5018 last_to_set_cc (reg_rtx, insn)
5021 rtx prev_insn = PREV_INSN (insn);
5025 if (GET_CODE (prev_insn) == NOTE)
5028 else if (GET_CODE (prev_insn) == INSN)
5030 if (GET_CODE (PATTERN (prev_insn)) != SET)
5033 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
5035 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
5041 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
5048 prev_insn = PREV_INSN (prev_insn);
5055 doesnt_set_condition_code (pat)
5058 switch (GET_CODE (pat))
5071 sets_condition_code (pat)
5074 switch (GET_CODE (pat))
5096 str_immediate_operand (op, mode)
5098 enum machine_mode mode ATTRIBUTE_UNUSED;
5100 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
5110 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5111 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5112 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5113 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
5119 /* Return 1 if the mode of the SET_DEST of insn is floating point
5120 and it is not an fld or a move from memory to memory.
5121 Otherwise return 0 */
5127 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5128 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5129 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5130 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5131 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5132 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
5133 && GET_CODE (SET_SRC (PATTERN (insn))) != MEM)
5139 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
5140 memory and the source is a register. */
5146 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
5147 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
5148 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
5149 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
5150 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
5151 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
5157 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
5158 or index to reference memory.
5159 otherwise return 0 */
5162 agi_dependent (insn, dep_insn)
5165 if (GET_CODE (dep_insn) == INSN
5166 && GET_CODE (PATTERN (dep_insn)) == SET
5167 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
5168 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
5170 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
5171 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
5172 && push_operand (SET_DEST (PATTERN (dep_insn)),
5173 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
5174 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
5179 /* Return 1 if reg is used in rtl as a base or index for a memory ref
5180 otherwise return 0. */
5183 reg_mentioned_in_mem (reg, rtl)
5188 register enum rtx_code code;
5193 code = GET_CODE (rtl);
5211 if (code == MEM && reg_mentioned_p (reg, rtl))
5214 fmt = GET_RTX_FORMAT (code);
5215 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5219 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
5220 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
5224 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
5231 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
5233 operands[0] = result, initialized with the startaddress
5234 operands[1] = alignment of the address.
5235 operands[2] = scratch register, initialized with the startaddress when
5236 not aligned, otherwise undefined
5238 This is just the body. It needs the initialisations mentioned above and
5239 some address computing at the end. These things are done in i386.md. */
5242 output_strlen_unroll (operands)
5247 xops[0] = operands[0]; /* Result */
5248 /* operands[1]; * Alignment */
5249 xops[1] = operands[2]; /* Scratch */
5250 xops[2] = GEN_INT (0);
5251 xops[3] = GEN_INT (2);
5252 xops[4] = GEN_INT (3);
5253 xops[5] = GEN_INT (4);
5254 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
5255 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
5256 xops[8] = gen_label_rtx (); /* label of main loop */
5258 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
5259 xops[9] = gen_label_rtx (); /* pentium optimisation */
5261 xops[10] = gen_label_rtx (); /* end label 2 */
5262 xops[11] = gen_label_rtx (); /* end label 1 */
5263 xops[12] = gen_label_rtx (); /* end label */
5264 /* xops[13] * Temporary used */
5265 xops[14] = GEN_INT (0xff);
5266 xops[15] = GEN_INT (0xff00);
5267 xops[16] = GEN_INT (0xff0000);
5268 xops[17] = GEN_INT (0xff000000);
5270 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
5272 /* Is there a known alignment and is it less than 4? */
5273 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
5275 /* Is there a known alignment and is it not 2? */
5276 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5278 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5279 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5281 /* Leave just the 3 lower bits.
5282 If this is a q-register, then the high part is used later
5283 therefore use andl rather than andb. */
5284 output_asm_insn (AS2 (and%L1,%4,%1), xops);
5286 /* Is aligned to 4-byte address when zero */
5287 output_asm_insn (AS1 (je,%l8), xops);
5289 /* Side-effect even Parity when %eax == 3 */
5290 output_asm_insn (AS1 (jp,%6), xops);
5292 /* Is it aligned to 2 bytes ? */
5293 if (QI_REG_P (xops[1]))
5294 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5296 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5298 output_asm_insn (AS1 (je,%7), xops);
5302 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5303 check if is aligned to 4 - byte. */
5304 output_asm_insn (AS2 (and%L1,%3,%1), xops);
5306 /* Is aligned to 4-byte address when zero */
5307 output_asm_insn (AS1 (je,%l8), xops);
5310 xops[13] = gen_rtx_MEM (QImode, xops[0]);
5312 /* Now compare the bytes; compare with the high part of a q-reg
5313 gives shorter code. */
5314 if (QI_REG_P (xops[1]))
5316 /* Compare the first n unaligned byte on a byte per byte basis. */
5317 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5319 /* When zero we reached the end. */
5320 output_asm_insn (AS1 (je,%l12), xops);
5322 /* Increment the address. */
5323 output_asm_insn (AS1 (inc%L0,%0), xops);
5325 /* Not needed with an alignment of 2 */
5326 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5328 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5329 CODE_LABEL_NUMBER (xops[7]));
5330 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5331 output_asm_insn (AS1 (je,%l12), xops);
5332 output_asm_insn (AS1 (inc%L0,%0), xops);
5334 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5335 CODE_LABEL_NUMBER (xops[6]));
5338 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5342 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5343 output_asm_insn (AS1 (je,%l12), xops);
5344 output_asm_insn (AS1 (inc%L0,%0), xops);
5346 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5347 CODE_LABEL_NUMBER (xops[7]));
5348 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5349 output_asm_insn (AS1 (je,%l12), xops);
5350 output_asm_insn (AS1 (inc%L0,%0), xops);
5352 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5353 CODE_LABEL_NUMBER (xops[6]));
5354 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5357 output_asm_insn (AS1 (je,%l12), xops);
5358 output_asm_insn (AS1 (inc%L0,%0), xops);
5361 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5362 align this loop. It gives only huge programs, but does not help to
5364 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5366 xops[13] = gen_rtx_MEM (SImode, xops[0]);
5367 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5369 if (QI_REG_P (xops[1]))
5371 /* On i586 it is faster to combine the hi- and lo- part as
5372 a kind of lookahead. If anding both yields zero, then one
5373 of both *could* be zero, otherwise none of both is zero;
5374 this saves one instruction, on i486 this is slower
5375 tested with P-90, i486DX2-66, AMD486DX2-66 */
5378 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5379 output_asm_insn (AS1 (jne,%l9), xops);
5382 /* Check first byte. */
5383 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5384 output_asm_insn (AS1 (je,%l12), xops);
5386 /* Check second byte. */
5387 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5388 output_asm_insn (AS1 (je,%l11), xops);
5391 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5392 CODE_LABEL_NUMBER (xops[9]));
5397 /* Check first byte. */
5398 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5399 output_asm_insn (AS1 (je,%l12), xops);
5401 /* Check second byte. */
5402 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5403 output_asm_insn (AS1 (je,%l11), xops);
5406 /* Check third byte. */
5407 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5408 output_asm_insn (AS1 (je,%l10), xops);
5410 /* Check fourth byte and increment address. */
5411 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5412 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5413 output_asm_insn (AS1 (jne,%l8), xops);
5415 /* Now generate fixups when the compare stops within a 4-byte word. */
5416 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5418 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5419 output_asm_insn (AS1 (inc%L0,%0), xops);
5421 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5422 output_asm_insn (AS1 (inc%L0,%0), xops);
5424 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5430 output_fp_conditional_move (which_alternative, operands)
5431 int which_alternative;
5434 switch (which_alternative)
5437 /* r <- cond ? arg : r */
5438 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5442 /* r <- cond ? r : arg */
5443 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5447 /* r <- cond ? r : arg */
5448 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5449 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5460 output_int_conditional_move (which_alternative, operands)
5461 int which_alternative;
5464 int code = GET_CODE (operands[1]);
5465 enum machine_mode mode;
5468 /* This is very tricky. We have to do it right. For a code segement
5477 final_scan_insn () may delete the insn which sets CC. We have to
5478 tell final_scan_insn () if it should be reinserted. When CODE is
5479 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5480 NULL_PTR to tell final to reinsert the test insn because the
5481 conditional move cannot be handled properly without it. */
5482 if ((code == GT || code == LE)
5483 && (cc_prev_status.flags & CC_NO_OVERFLOW))
5486 mode = GET_MODE (operands [0]);
5489 xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5490 xops [1] = operands [1];
5491 xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5492 xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5495 switch (which_alternative)
5498 /* r <- cond ? arg : r */
5499 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5501 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5505 /* r <- cond ? r : arg */
5506 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5508 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5512 /* rm <- cond ? arg1 : arg2 */
5513 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5514 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5517 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5518 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5530 x86_adjust_cost (insn, link, dep_insn, cost)
5531 rtx insn, link, dep_insn;
5536 if (GET_CODE (dep_insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
5539 if (GET_CODE (dep_insn) == INSN
5540 && GET_CODE (PATTERN (dep_insn)) == SET
5541 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG
5542 && GET_CODE (insn) == INSN
5543 && GET_CODE (PATTERN (insn)) == SET
5544 && !reg_overlap_mentioned_p (SET_DEST (PATTERN (dep_insn)),
5545 SET_SRC (PATTERN (insn))))
5551 case PROCESSOR_PENTIUM:
5552 if (cost != 0 && is_fp_insn (insn) && is_fp_insn (dep_insn)
5553 && !is_fp_dest (dep_insn))
5556 if (agi_dependent (insn, dep_insn))
5559 if (GET_CODE (insn) == INSN
5560 && GET_CODE (PATTERN (insn)) == SET
5561 && SET_DEST (PATTERN (insn)) == cc0_rtx
5562 && (next_inst = next_nonnote_insn (insn))
5563 && GET_CODE (next_inst) == JUMP_INSN)
5564 /* compare probably paired with jump */
5570 if (!is_fp_dest (dep_insn))
5572 if(!agi_dependent (insn, dep_insn))
5578 if (is_fp_store (insn) && is_fp_insn (dep_insn)
5579 && NEXT_INSN (insn) && NEXT_INSN (NEXT_INSN (insn))
5580 && NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))
5581 && (GET_CODE (NEXT_INSN (insn)) == INSN)
5582 && (GET_CODE (NEXT_INSN (NEXT_INSN (insn))) == JUMP_INSN)
5583 && (GET_CODE (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn)))) == NOTE)
5584 && (NOTE_LINE_NUMBER (NEXT_INSN (NEXT_INSN (NEXT_INSN (insn))))
5585 == NOTE_INSN_LOOP_END))