1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 92, 94, 95, 96, 97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
41 #ifdef EXTRA_CONSTRAINT
42 /* If EXTRA_CONSTRAINT is defined, then the 'S'
43 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
44 asm statements that need 'S' for class SIREG will break. */
45 error EXTRA_CONSTRAINT conflicts with S constraint letter
46 /* The previous line used to be #error, but some compilers barf
47 even if the conditional was untrue. */
50 #ifndef CHECK_STACK_LIMIT
51 #define CHECK_STACK_LIMIT -1
54 /* Type of an operand for ix86_{binary,unary}_operator_ok */
62 /* Processor costs (relative to an add) */
63 struct processor_costs i386_cost = { /* 386 specific costs */
64 1, /* cost of an add instruction */
65 1, /* cost of a lea instruction */
66 3, /* variable shift costs */
67 2, /* constant shift costs */
68 6, /* cost of starting a multiply */
69 1, /* cost of multiply per each bit set */
70 23 /* cost of a divide/mod */
73 struct processor_costs i486_cost = { /* 486 specific costs */
74 1, /* cost of an add instruction */
75 1, /* cost of a lea instruction */
76 3, /* variable shift costs */
77 2, /* constant shift costs */
78 12, /* cost of starting a multiply */
79 1, /* cost of multiply per each bit set */
80 40 /* cost of a divide/mod */
83 struct processor_costs pentium_cost = {
84 1, /* cost of an add instruction */
85 1, /* cost of a lea instruction */
86 4, /* variable shift costs */
87 1, /* constant shift costs */
88 11, /* cost of starting a multiply */
89 0, /* cost of multiply per each bit set */
90 25 /* cost of a divide/mod */
93 struct processor_costs pentiumpro_cost = {
94 1, /* cost of an add instruction */
95 1, /* cost of a lea instruction */
96 3, /* variable shift costs */
97 1, /* constant shift costs */
98 4, /* cost of starting a multiply */
99 0, /* cost of multiply per each bit set */
100 17 /* cost of a divide/mod */
103 struct processor_costs *ix86_cost = &pentium_cost;
105 #define AT_BP(mode) (gen_rtx_MEM ((mode), frame_pointer_rtx))
107 extern FILE *asm_out_file;
108 extern char *strcat ();
110 static void ix86_epilogue PROTO((int));
111 static void ix86_prologue PROTO((int));
113 char *singlemove_string ();
114 char *output_move_const_single ();
115 char *output_fp_cc0_set ();
117 char *hi_reg_name[] = HI_REGISTER_NAMES;
118 char *qi_reg_name[] = QI_REGISTER_NAMES;
119 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
121 /* Array of the smallest class containing reg number REGNO, indexed by
122 REGNO. Used by REGNO_REG_CLASS in i386.h. */
124 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
127 AREG, DREG, CREG, BREG,
129 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
131 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
132 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
137 /* Test and compare insns in i386.md store the information needed to
138 generate branch and scc insns here. */
140 struct rtx_def *i386_compare_op0 = NULL_RTX;
141 struct rtx_def *i386_compare_op1 = NULL_RTX;
142 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
144 /* which cpu are we scheduling for */
145 enum processor_type ix86_cpu;
147 /* which instruction set architecture to use. */
150 /* Strings to hold which cpu and instruction set architecture to use. */
151 char *ix86_cpu_string; /* for -mcpu=<xxx> */
152 char *ix86_arch_string; /* for -march=<xxx> */
154 /* Register allocation order */
155 char *i386_reg_alloc_order;
156 static char regs_allocated[FIRST_PSEUDO_REGISTER];
158 /* # of registers to use to pass arguments. */
159 char *i386_regparm_string;
161 /* i386_regparm_string as a number */
164 /* Alignment to use for loops and jumps: */
166 /* Power of two alignment for loops. */
167 char *i386_align_loops_string;
169 /* Power of two alignment for non-loop jumps. */
170 char *i386_align_jumps_string;
172 /* Values 1-5: see jump.c */
173 int i386_branch_cost;
174 char *i386_branch_cost_string;
176 /* Power of two alignment for functions. */
177 int i386_align_funcs;
178 char *i386_align_funcs_string;
180 /* Power of two alignment for loops. */
181 int i386_align_loops;
183 /* Power of two alignment for non-loop jumps. */
184 int i386_align_jumps;
186 /* Sometimes certain combinations of command options do not make
187 sense on a particular target machine. You can define a macro
188 `OVERRIDE_OPTIONS' to take account of this. This macro, if
189 defined, is executed once just after all the command options have
192 Don't use this macro to turn on various extra optimizations for
193 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
203 char *name; /* Canonical processor name. */
204 enum processor_type processor; /* Processor type enum value. */
205 struct processor_costs *cost; /* Processor costs */
206 int target_enable; /* Target flags to enable. */
207 int target_disable; /* Target flags to disable. */
208 } processor_target_table[]
209 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
210 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
211 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
212 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
213 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentiumpro_cost,
215 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO,
216 &pentiumpro_cost, 0, 0}};
218 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
220 #ifdef SUBTARGET_OVERRIDE_OPTIONS
221 SUBTARGET_OVERRIDE_OPTIONS;
224 /* Validate registers in register allocation order. */
225 if (i386_reg_alloc_order)
227 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
233 case 'a': regno = 0; break;
234 case 'd': regno = 1; break;
235 case 'c': regno = 2; break;
236 case 'b': regno = 3; break;
237 case 'S': regno = 4; break;
238 case 'D': regno = 5; break;
239 case 'B': regno = 6; break;
241 default: fatal ("Register '%c' is unknown", ch);
244 if (regs_allocated[regno])
245 fatal ("Register '%c' already specified in allocation order", ch);
247 regs_allocated[regno] = 1;
251 if (ix86_arch_string == 0)
253 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
254 if (ix86_cpu_string == 0)
255 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
258 for (i = 0; i < ptt_size; i++)
259 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
261 ix86_arch = processor_target_table[i].processor;
262 if (ix86_cpu_string == 0)
263 ix86_cpu_string = processor_target_table[i].name;
269 error ("bad value (%s) for -march= switch", ix86_arch_string);
270 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
271 ix86_arch = PROCESSOR_DEFAULT;
274 if (ix86_cpu_string == 0)
275 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
277 for (j = 0; j < ptt_size; j++)
278 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
280 ix86_cpu = processor_target_table[j].processor;
281 ix86_cost = processor_target_table[j].cost;
282 if (i > j && (int) ix86_arch >= (int) PROCESSOR_PENTIUMPRO)
283 error ("-mcpu=%s does not support -march=%s",
284 ix86_cpu_string, ix86_arch_string);
286 target_flags |= processor_target_table[j].target_enable;
287 target_flags &= ~processor_target_table[j].target_disable;
293 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
294 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
295 ix86_cpu = PROCESSOR_DEFAULT;
298 /* Validate -mregparm= value. */
299 if (i386_regparm_string)
301 i386_regparm = atoi (i386_regparm_string);
302 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
303 fatal ("-mregparm=%d is not between 0 and %d",
304 i386_regparm, REGPARM_MAX);
307 /* The 486 suffers more from non-aligned cache line fills, and the
308 larger code size results in a larger cache foot-print and more misses.
309 The 486 has a 16 byte cache line, pentium and pentiumpro have a 32 byte
311 def_align = (TARGET_486) ? 4 : 2;
313 /* Validate -malign-loops= value, or provide default. */
314 if (i386_align_loops_string)
316 i386_align_loops = atoi (i386_align_loops_string);
317 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
318 fatal ("-malign-loops=%d is not between 0 and %d",
319 i386_align_loops, MAX_CODE_ALIGN);
322 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
323 i386_align_loops = 4;
325 i386_align_loops = 2;
328 /* Validate -malign-jumps= value, or provide default. */
329 if (i386_align_jumps_string)
331 i386_align_jumps = atoi (i386_align_jumps_string);
332 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
333 fatal ("-malign-jumps=%d is not between 0 and %d",
334 i386_align_jumps, MAX_CODE_ALIGN);
337 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
338 i386_align_jumps = 4;
340 i386_align_jumps = def_align;
343 /* Validate -malign-functions= value, or provide default. */
344 if (i386_align_funcs_string)
346 i386_align_funcs = atoi (i386_align_funcs_string);
347 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
348 fatal ("-malign-functions=%d is not between 0 and %d",
349 i386_align_funcs, MAX_CODE_ALIGN);
352 i386_align_funcs = def_align;
354 /* Validate -mbranch-cost= value, or provide default. */
355 if (i386_branch_cost_string)
357 i386_branch_cost = atoi (i386_branch_cost_string);
358 if (i386_branch_cost < 0 || i386_branch_cost > 5)
359 fatal ("-mbranch-cost=%d is not between 0 and 5",
363 i386_branch_cost = 1;
365 /* Keep nonleaf frame pointers. */
366 if (TARGET_OMIT_LEAF_FRAME_POINTER)
367 flag_omit_frame_pointer = 1;
370 /* A C statement (sans semicolon) to choose the order in which to
371 allocate hard registers for pseudo-registers local to a basic
374 Store the desired register order in the array `reg_alloc_order'.
375 Element 0 should be the register to allocate first; element 1, the
376 next register; and so on.
378 The macro body should not assume anything about the contents of
379 `reg_alloc_order' before execution of the macro.
381 On most machines, it is not necessary to define this macro. */
384 order_regs_for_local_alloc ()
388 /* User specified the register allocation order. */
390 if (i386_reg_alloc_order)
392 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
398 case 'a': regno = 0; break;
399 case 'd': regno = 1; break;
400 case 'c': regno = 2; break;
401 case 'b': regno = 3; break;
402 case 'S': regno = 4; break;
403 case 'D': regno = 5; break;
404 case 'B': regno = 6; break;
407 reg_alloc_order[order++] = regno;
410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
412 if (! regs_allocated[i])
413 reg_alloc_order[order++] = i;
417 /* If user did not specify a register allocation order, use natural order. */
420 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
421 reg_alloc_order[i] = i;
426 optimization_options (level, size)
428 int size ATTRIBUTE_UNUSED;
430 /* For -O2 and beyond, turn off -fschedule-insns by default. It tends to
431 make the problem with not enough registers even worse. */
432 #ifdef INSN_SCHEDULING
434 flag_schedule_insns = 0;
438 /* Sign-extend a 16-bit constant */
441 i386_sext16_if_const (op)
444 if (GET_CODE (op) == CONST_INT)
446 HOST_WIDE_INT val = INTVAL (op);
447 HOST_WIDE_INT sext_val;
449 sext_val = val | ~0xffff;
451 sext_val = val & 0xffff;
453 op = GEN_INT (sext_val);
458 /* Return nonzero if the rtx is aligned */
461 i386_aligned_reg_p (regno)
464 return (regno == STACK_POINTER_REGNUM
465 || (! flag_omit_frame_pointer && regno == FRAME_POINTER_REGNUM));
472 /* Registers and immediate operands are always "aligned". */
473 if (GET_CODE (op) != MEM)
476 /* Don't even try to do any aligned optimizations with volatiles. */
477 if (MEM_VOLATILE_P (op))
480 /* Get address of memory operand. */
483 switch (GET_CODE (op))
490 /* Match "reg + offset" */
492 if (GET_CODE (XEXP (op, 1)) != CONST_INT)
494 if (INTVAL (XEXP (op, 1)) & 3)
498 if (GET_CODE (op) != REG)
501 /* ... fall through ... */
504 return i386_aligned_reg_p (REGNO (op));
513 /* Return nonzero if INSN looks like it won't compute useful cc bits
514 as a side effect. This information is only a hint. */
517 i386_cc_probably_useless_p (insn)
520 return ! next_cc0_user (insn);
523 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
524 attribute for DECL. The attributes in ATTRIBUTES have previously been
528 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
529 tree decl ATTRIBUTE_UNUSED;
530 tree attributes ATTRIBUTE_UNUSED;
531 tree identifier ATTRIBUTE_UNUSED;
532 tree args ATTRIBUTE_UNUSED;
537 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
538 attribute for TYPE. The attributes in ATTRIBUTES have previously been
542 i386_valid_type_attribute_p (type, attributes, identifier, args)
544 tree attributes ATTRIBUTE_UNUSED;
548 if (TREE_CODE (type) != FUNCTION_TYPE
549 && TREE_CODE (type) != FIELD_DECL
550 && TREE_CODE (type) != TYPE_DECL)
553 /* Stdcall attribute says callee is responsible for popping arguments
554 if they are not variable. */
555 if (is_attribute_p ("stdcall", identifier))
556 return (args == NULL_TREE);
558 /* Cdecl attribute says the callee is a normal C declaration. */
559 if (is_attribute_p ("cdecl", identifier))
560 return (args == NULL_TREE);
562 /* Regparm attribute specifies how many integer arguments are to be
563 passed in registers. */
564 if (is_attribute_p ("regparm", identifier))
568 if (! args || TREE_CODE (args) != TREE_LIST
569 || TREE_CHAIN (args) != NULL_TREE
570 || TREE_VALUE (args) == NULL_TREE)
573 cst = TREE_VALUE (args);
574 if (TREE_CODE (cst) != INTEGER_CST)
577 if (TREE_INT_CST_HIGH (cst) != 0
578 || TREE_INT_CST_LOW (cst) < 0
579 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
588 /* Return 0 if the attributes for two types are incompatible, 1 if they
589 are compatible, and 2 if they are nearly compatible (which causes a
590 warning to be generated). */
593 i386_comp_type_attributes (type1, type2)
594 tree type1 ATTRIBUTE_UNUSED;
595 tree type2 ATTRIBUTE_UNUSED;
601 /* Value is the number of bytes of arguments automatically
602 popped when returning from a subroutine call.
603 FUNDECL is the declaration node of the function (as a tree),
604 FUNTYPE is the data type of the function (as a tree),
605 or for a library call it is an identifier node for the subroutine name.
606 SIZE is the number of bytes of arguments passed on the stack.
608 On the 80386, the RTD insn may be used to pop them if the number
609 of args is fixed, but if the number is variable then the caller
610 must pop them all. RTD can't be used for library calls now
611 because the library is compiled with the Unix compiler.
612 Use of RTD is a selectable option, since it is incompatible with
613 standard Unix calling sequences. If the option is not selected,
614 the caller must always pop the args.
616 The attribute stdcall is equivalent to RTD on a per module basis. */
619 i386_return_pops_args (fundecl, funtype, size)
624 int rtd = TARGET_RTD && (!fundecl || TREE_CODE (fundecl) != IDENTIFIER_NODE);
626 /* Cdecl functions override -mrtd, and never pop the stack. */
627 if (! lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
629 /* Stdcall functions will pop the stack if not variable args. */
630 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
634 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
635 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype)))
640 /* Lose any fake structure return argument. */
641 if (aggregate_value_p (TREE_TYPE (funtype)))
642 return GET_MODE_SIZE (Pmode);
648 /* Argument support functions. */
650 /* Initialize a variable CUM of type CUMULATIVE_ARGS
651 for a call to a function whose data type is FNTYPE.
652 For a library call, FNTYPE is 0. */
655 init_cumulative_args (cum, fntype, libname)
656 CUMULATIVE_ARGS *cum; /* Argument info to initialize */
657 tree fntype; /* tree ptr for function decl */
658 rtx libname; /* SYMBOL_REF of library name or 0 */
660 static CUMULATIVE_ARGS zero_cum;
661 tree param, next_param;
663 if (TARGET_DEBUG_ARG)
665 fprintf (stderr, "\ninit_cumulative_args (");
667 fprintf (stderr, "fntype code = %s, ret code = %s",
668 tree_code_name[(int) TREE_CODE (fntype)],
669 tree_code_name[(int) TREE_CODE (TREE_TYPE (fntype))]);
671 fprintf (stderr, "no fntype");
674 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
679 /* Set up the number of registers to use for passing arguments. */
680 cum->nregs = i386_regparm;
683 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
686 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
689 /* Determine if this function has variable arguments. This is
690 indicated by the last argument being 'void_type_mode' if there
691 are no variable arguments. If there are variable arguments, then
692 we won't pass anything in registers */
696 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
697 param != 0; param = next_param)
699 next_param = TREE_CHAIN (param);
700 if (next_param == 0 && TREE_VALUE (param) != void_type_node)
705 if (TARGET_DEBUG_ARG)
706 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
711 /* Update the data in CUM to advance over an argument
712 of mode MODE and data type TYPE.
713 (TYPE is null for libcalls where that information may not be available.) */
716 function_arg_advance (cum, mode, type, named)
717 CUMULATIVE_ARGS *cum; /* current arg information */
718 enum machine_mode mode; /* current arg mode */
719 tree type; /* type of the argument or 0 if lib support */
720 int named; /* whether or not the argument was named */
723 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
724 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
726 if (TARGET_DEBUG_ARG)
728 "function_adv (sz=%d, wds=%2d, nregs=%d, mode=%s, named=%d)\n\n",
729 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
744 /* Define where to put the arguments to a function.
745 Value is zero to push the argument on the stack,
746 or a hard register in which to store the argument.
748 MODE is the argument's machine mode.
749 TYPE is the data type of the argument (as a tree).
750 This is null for libcalls where that information may
752 CUM is a variable of type CUMULATIVE_ARGS which gives info about
753 the preceding args and about the function being called.
754 NAMED is nonzero if this argument is a named parameter
755 (otherwise it is an extra parameter matching an ellipsis). */
758 function_arg (cum, mode, type, named)
759 CUMULATIVE_ARGS *cum; /* current arg information */
760 enum machine_mode mode; /* current arg mode */
761 tree type; /* type of the argument or 0 if lib support */
762 int named; /* != 0 for normal args, == 0 for ... args */
766 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
767 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
771 /* For now, pass fp/complex values on the stack. */
780 if (words <= cum->nregs)
781 ret = gen_rtx_REG (mode, cum->regno);
785 if (TARGET_DEBUG_ARG)
788 "function_arg (size=%d, wds=%2d, nregs=%d, mode=%4s, named=%d",
789 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
792 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
794 fprintf (stderr, ", stack");
796 fprintf (stderr, " )\n");
802 /* For an arg passed partly in registers and partly in memory,
803 this is the number of registers used.
804 For args passed entirely in registers or entirely in memory, zero. */
807 function_arg_partial_nregs (cum, mode, type, named)
808 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED; /* current arg information */
809 enum machine_mode mode ATTRIBUTE_UNUSED; /* current arg mode */
810 tree type ATTRIBUTE_UNUSED; /* type of the argument or 0 if lib support */
811 int named ATTRIBUTE_UNUSED; /* != 0 for normal args, == 0 for ... args */
816 /* Output an insn whose source is a 386 integer register. SRC is the
817 rtx for the register, and TEMPLATE is the op-code template. SRC may
818 be either SImode or DImode.
820 The template will be output with operands[0] as SRC, and operands[1]
821 as a pointer to the top of the 386 stack. So a call from floatsidf2
822 would look like this:
824 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
826 where %z0 corresponds to the caller's operands[1], and is used to
827 emit the proper size suffix.
829 ??? Extend this to handle HImode - a 387 can load and store HImode
833 output_op_from_reg (src, template)
838 int size = GET_MODE_SIZE (GET_MODE (src));
841 xops[1] = AT_SP (Pmode);
842 xops[2] = GEN_INT (size);
843 xops[3] = stack_pointer_rtx;
845 if (size > UNITS_PER_WORD)
849 if (size > 2 * UNITS_PER_WORD)
851 high = gen_rtx_REG (SImode, REGNO (src) + 2);
852 output_asm_insn (AS1 (push%L0,%0), &high);
855 high = gen_rtx_REG (SImode, REGNO (src) + 1);
856 output_asm_insn (AS1 (push%L0,%0), &high);
859 output_asm_insn (AS1 (push%L0,%0), &src);
860 output_asm_insn (template, xops);
861 output_asm_insn (AS2 (add%L3,%2,%3), xops);
864 /* Output an insn to pop an value from the 387 top-of-stack to 386
865 register DEST. The 387 register stack is popped if DIES is true. If
866 the mode of DEST is an integer mode, a `fist' integer store is done,
867 otherwise a `fst' float store is done. */
870 output_to_reg (dest, dies, scratch_mem)
876 int size = GET_MODE_SIZE (GET_MODE (dest));
879 xops[0] = AT_SP (Pmode);
881 xops[0] = scratch_mem;
883 xops[1] = stack_pointer_rtx;
884 xops[2] = GEN_INT (size);
888 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
890 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
893 output_asm_insn (AS1 (fistp%z3,%y0), xops);
895 output_asm_insn (AS1 (fist%z3,%y0), xops);
898 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
901 output_asm_insn (AS1 (fstp%z3,%y0), xops);
904 if (GET_MODE (dest) == XFmode)
906 output_asm_insn (AS1 (fstp%z3,%y0), xops);
907 output_asm_insn (AS1 (fld%z3,%y0), xops);
910 output_asm_insn (AS1 (fst%z3,%y0), xops);
918 output_asm_insn (AS1 (pop%L0,%0), &dest);
920 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
923 if (size > UNITS_PER_WORD)
925 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
927 output_asm_insn (AS1 (pop%L0,%0), &dest);
930 xops[0] = adj_offsettable_operand (xops[0], 4);
932 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
935 if (size > 2 * UNITS_PER_WORD)
937 dest = gen_rtx_REG (SImode, REGNO (dest) + 1);
939 output_asm_insn (AS1 (pop%L0,%0), &dest);
942 xops[0] = adj_offsettable_operand (xops[0], 4);
943 output_asm_insn (AS2 (mov%L0,%0,%3), xops);
950 singlemove_string (operands)
954 if (GET_CODE (operands[0]) == MEM
955 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
957 if (XEXP (x, 0) != stack_pointer_rtx)
961 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
962 return output_move_const_single (operands);
963 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
964 return AS2 (mov%L0,%1,%0);
965 else if (CONSTANT_P (operands[1]))
966 return AS2 (mov%L0,%1,%0);
969 output_asm_insn ("push%L1 %1", operands);
974 /* Return a REG that occurs in ADDR with coefficient 1.
975 ADDR can be effectively incremented by incrementing REG. */
981 while (GET_CODE (addr) == PLUS)
983 if (GET_CODE (XEXP (addr, 0)) == REG)
984 addr = XEXP (addr, 0);
985 else if (GET_CODE (XEXP (addr, 1)) == REG)
986 addr = XEXP (addr, 1);
987 else if (CONSTANT_P (XEXP (addr, 0)))
988 addr = XEXP (addr, 1);
989 else if (CONSTANT_P (XEXP (addr, 1)))
990 addr = XEXP (addr, 0);
995 if (GET_CODE (addr) == REG)
1000 /* Output an insn to add the constant N to the register X. */
1011 output_asm_insn (AS1 (dec%L0,%0), xops);
1013 output_asm_insn (AS1 (inc%L0,%0), xops);
1014 else if (n < 0 || n == 128)
1016 xops[1] = GEN_INT (-n);
1017 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
1021 xops[1] = GEN_INT (n);
1022 output_asm_insn (AS2 (add%L0,%1,%0), xops);
1026 /* Output assembler code to perform a doubleword move insn
1027 with operands OPERANDS. */
1030 output_move_double (operands)
1033 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
1037 rtx addreg0 = 0, addreg1 = 0;
1038 int dest_overlapped_low = 0;
1039 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
1044 /* First classify both operands. */
1046 if (REG_P (operands[0]))
1048 else if (offsettable_memref_p (operands[0]))
1050 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
1052 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1054 else if (GET_CODE (operands[0]) == MEM)
1059 if (REG_P (operands[1]))
1061 else if (CONSTANT_P (operands[1]))
1063 else if (offsettable_memref_p (operands[1]))
1065 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
1067 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
1069 else if (GET_CODE (operands[1]) == MEM)
1074 /* Check for the cases that the operand constraints are not
1075 supposed to allow to happen. Abort if we get one,
1076 because generating code for these cases is painful. */
1078 if (optype0 == RNDOP || optype1 == RNDOP)
1081 /* If one operand is decrementing and one is incrementing
1082 decrement the former register explicitly
1083 and change that operand into ordinary indexing. */
1085 if (optype0 == PUSHOP && optype1 == POPOP)
1087 /* ??? Can this ever happen on i386? */
1088 operands[0] = XEXP (XEXP (operands[0], 0), 0);
1089 asm_add (-size, operands[0]);
1090 if (GET_MODE (operands[1]) == XFmode)
1091 operands[0] = gen_rtx_MEM (XFmode, operands[0]);
1092 else if (GET_MODE (operands[0]) == DFmode)
1093 operands[0] = gen_rtx_MEM (DFmode, operands[0]);
1095 operands[0] = gen_rtx_MEM (DImode, operands[0]);
1099 if (optype0 == POPOP && optype1 == PUSHOP)
1101 /* ??? Can this ever happen on i386? */
1102 operands[1] = XEXP (XEXP (operands[1], 0), 0);
1103 asm_add (-size, operands[1]);
1104 if (GET_MODE (operands[1]) == XFmode)
1105 operands[1] = gen_rtx_MEM (XFmode, operands[1]);
1106 else if (GET_MODE (operands[1]) == DFmode)
1107 operands[1] = gen_rtx_MEM (DFmode, operands[1]);
1109 operands[1] = gen_rtx_MEM (DImode, operands[1]);
1113 /* If an operand is an unoffsettable memory ref, find a register
1114 we can increment temporarily to make it refer to the second word. */
1116 if (optype0 == MEMOP)
1117 addreg0 = find_addr_reg (XEXP (operands[0], 0));
1119 if (optype1 == MEMOP)
1120 addreg1 = find_addr_reg (XEXP (operands[1], 0));
1122 /* Ok, we can do one word at a time.
1123 Normally we do the low-numbered word first,
1124 but if either operand is autodecrementing then we
1125 do the high-numbered word first.
1127 In either case, set up in LATEHALF the operands to use
1128 for the high-numbered word and in some cases alter the
1129 operands in OPERANDS to be suitable for the low-numbered word. */
1133 if (optype0 == REGOP)
1135 middlehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1136 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 2);
1138 else if (optype0 == OFFSOP)
1140 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
1141 latehalf[0] = adj_offsettable_operand (operands[0], 8);
1145 middlehalf[0] = operands[0];
1146 latehalf[0] = operands[0];
1149 if (optype1 == REGOP)
1151 middlehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1152 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 2);
1154 else if (optype1 == OFFSOP)
1156 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1157 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1159 else if (optype1 == CNSTOP)
1161 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1163 REAL_VALUE_TYPE r; long l[3];
1165 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1166 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1167 operands[1] = GEN_INT (l[0]);
1168 middlehalf[1] = GEN_INT (l[1]);
1169 latehalf[1] = GEN_INT (l[2]);
1171 else if (CONSTANT_P (operands[1]))
1172 /* No non-CONST_DOUBLE constant should ever appear here. */
1177 middlehalf[1] = operands[1];
1178 latehalf[1] = operands[1];
1184 /* Size is not 12. */
1186 if (optype0 == REGOP)
1187 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
1188 else if (optype0 == OFFSOP)
1189 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1191 latehalf[0] = operands[0];
1193 if (optype1 == REGOP)
1194 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
1195 else if (optype1 == OFFSOP)
1196 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1197 else if (optype1 == CNSTOP)
1198 split_double (operands[1], &operands[1], &latehalf[1]);
1200 latehalf[1] = operands[1];
1203 /* If insn is effectively movd N (sp),-(sp) then we will do the
1204 high word first. We should use the adjusted operand 1
1205 (which is N+4 (sp) or N+8 (sp))
1206 for the low word and middle word as well,
1207 to compensate for the first decrement of sp. */
1208 if (optype0 == PUSHOP
1209 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1210 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1211 middlehalf[1] = operands[1] = latehalf[1];
1213 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1214 if the upper part of reg N does not appear in the MEM, arrange to
1215 emit the move late-half first. Otherwise, compute the MEM address
1216 into the upper part of N and use that as a pointer to the memory
1218 if (optype0 == REGOP
1219 && (optype1 == OFFSOP || optype1 == MEMOP))
1221 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1222 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1224 /* If both halves of dest are used in the src memory address,
1225 compute the address into latehalf of dest. */
1227 xops[0] = latehalf[0];
1228 xops[1] = XEXP (operands[1], 0);
1229 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1230 if (GET_MODE (operands[1]) == XFmode)
1232 operands[1] = gen_rtx_MEM (XFmode, latehalf[0]);
1233 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1234 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1238 operands[1] = gen_rtx_MEM (DImode, latehalf[0]);
1239 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1244 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1246 /* Check for two regs used by both source and dest. */
1247 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1248 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1251 /* JRV says this can't happen: */
1252 if (addreg0 || addreg1)
1255 /* Only the middle reg conflicts; simply put it last. */
1256 output_asm_insn (singlemove_string (operands), operands);
1257 output_asm_insn (singlemove_string (latehalf), latehalf);
1258 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1262 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1263 /* If the low half of dest is mentioned in the source memory
1264 address, the arrange to emit the move late half first. */
1265 dest_overlapped_low = 1;
1268 /* If one or both operands autodecrementing,
1269 do the two words, high-numbered first. */
1271 /* Likewise, the first move would clobber the source of the second one,
1272 do them in the other order. This happens only for registers;
1273 such overlap can't happen in memory unless the user explicitly
1274 sets it up, and that is an undefined circumstance. */
1277 if (optype0 == PUSHOP || optype1 == PUSHOP
1278 || (optype0 == REGOP && optype1 == REGOP
1279 && REGNO (operands[0]) == REGNO (latehalf[1]))
1280 || dest_overlapped_low)
1283 if (optype0 == PUSHOP || optype1 == PUSHOP
1284 || (optype0 == REGOP && optype1 == REGOP
1285 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1286 || REGNO (operands[0]) == REGNO (latehalf[1])))
1287 || dest_overlapped_low)
1289 /* Make any unoffsettable addresses point at high-numbered word. */
1291 asm_add (size-4, addreg0);
1293 asm_add (size-4, addreg1);
1296 output_asm_insn (singlemove_string (latehalf), latehalf);
1298 /* Undo the adds we just did. */
1300 asm_add (-4, addreg0);
1302 asm_add (-4, addreg1);
1306 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1308 asm_add (-4, addreg0);
1310 asm_add (-4, addreg1);
1313 /* Do low-numbered word. */
1314 return singlemove_string (operands);
1317 /* Normal case: do the two words, low-numbered first. */
1319 output_asm_insn (singlemove_string (operands), operands);
1321 /* Do the middle one of the three words for long double */
1325 asm_add (4, addreg0);
1327 asm_add (4, addreg1);
1329 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1332 /* Make any unoffsettable addresses point at high-numbered word. */
1334 asm_add (4, addreg0);
1336 asm_add (4, addreg1);
1339 output_asm_insn (singlemove_string (latehalf), latehalf);
1341 /* Undo the adds we just did. */
1343 asm_add (4-size, addreg0);
1345 asm_add (4-size, addreg1);
1350 #define MAX_TMPS 2 /* max temporary registers used */
1352 /* Output the appropriate code to move push memory on the stack */
1355 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1367 } tmp_info[MAX_TMPS];
1369 rtx src = operands[1];
1372 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1373 int stack_offset = 0;
1377 if (! offsettable_memref_p (src))
1378 fatal_insn ("Source is not offsettable", insn);
1380 if ((length & 3) != 0)
1381 fatal_insn ("Pushing non-word aligned size", insn);
1383 /* Figure out which temporary registers we have available */
1384 for (i = tmp_start; i < n_operands; i++)
1386 if (GET_CODE (operands[i]) == REG)
1388 if (reg_overlap_mentioned_p (operands[i], src))
1391 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1392 if (max_tmps == MAX_TMPS)
1398 for (offset = length - 4; offset >= 0; offset -= 4)
1400 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1401 output_asm_insn (AS1(push%L0,%0), xops);
1407 for (offset = length - 4; offset >= 0; )
1409 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1411 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1412 tmp_info[num_tmps].push = AS1(push%L0,%1);
1413 tmp_info[num_tmps].xops[0]
1414 = adj_offsettable_operand (src, offset + stack_offset);
1418 for (i = 0; i < num_tmps; i++)
1419 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1421 for (i = 0; i < num_tmps; i++)
1422 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1425 stack_offset += 4*num_tmps;
1431 /* Output the appropriate code to move data between two memory locations */
1434 output_move_memory (operands, insn, length, tmp_start, n_operands)
1446 } tmp_info[MAX_TMPS];
1448 rtx dest = operands[0];
1449 rtx src = operands[1];
1450 rtx qi_tmp = NULL_RTX;
1456 if (GET_CODE (dest) == MEM
1457 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1458 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1459 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1461 if (! offsettable_memref_p (src))
1462 fatal_insn ("Source is not offsettable", insn);
1464 if (! offsettable_memref_p (dest))
1465 fatal_insn ("Destination is not offsettable", insn);
1467 /* Figure out which temporary registers we have available */
1468 for (i = tmp_start; i < n_operands; i++)
1470 if (GET_CODE (operands[i]) == REG)
1472 if ((length & 1) != 0 && qi_tmp == 0 && QI_REG_P (operands[i]))
1473 qi_tmp = operands[i];
1475 if (reg_overlap_mentioned_p (operands[i], dest))
1476 fatal_insn ("Temporary register overlaps the destination", insn);
1478 if (reg_overlap_mentioned_p (operands[i], src))
1479 fatal_insn ("Temporary register overlaps the source", insn);
1481 tmp_info[max_tmps++].xops[2] = operands[i];
1482 if (max_tmps == MAX_TMPS)
1488 fatal_insn ("No scratch registers were found to do memory->memory moves",
1491 if ((length & 1) != 0)
1494 fatal_insn ("No byte register found when moving odd # of bytes.",
1500 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1504 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1505 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1506 tmp_info[num_tmps].xops[0]
1507 = adj_offsettable_operand (dest, offset);
1508 tmp_info[num_tmps].xops[1]
1509 = adj_offsettable_operand (src, offset);
1515 else if (length >= 2)
1517 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1518 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1519 tmp_info[num_tmps].xops[0]
1520 = adj_offsettable_operand (dest, offset);
1521 tmp_info[num_tmps].xops[1]
1522 = adj_offsettable_operand (src, offset);
1531 for (i = 0; i < num_tmps; i++)
1532 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1534 for (i = 0; i < num_tmps; i++)
1535 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1540 xops[0] = adj_offsettable_operand (dest, offset);
1541 xops[1] = adj_offsettable_operand (src, offset);
1543 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1544 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1551 standard_80387_constant_p (x)
1554 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1559 if (setjmp (handler))
1562 set_float_handler (handler);
1563 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1564 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1565 is1 = REAL_VALUES_EQUAL (d, dconst1);
1566 set_float_handler (NULL_PTR);
1574 /* Note that on the 80387, other constants, such as pi,
1575 are much slower to load as standard constants
1576 than to load from doubles in memory! */
1583 output_move_const_single (operands)
1586 if (FP_REG_P (operands[0]))
1588 int conval = standard_80387_constant_p (operands[1]);
1597 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1599 REAL_VALUE_TYPE r; long l;
1601 if (GET_MODE (operands[1]) == XFmode)
1604 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1605 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1606 operands[1] = GEN_INT (l);
1609 return singlemove_string (operands);
1612 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1613 reference and a constant. */
1616 symbolic_operand (op, mode)
1618 enum machine_mode mode ATTRIBUTE_UNUSED;
1620 switch (GET_CODE (op))
1628 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1629 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1630 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1637 /* Test for a valid operand for a call instruction.
1638 Don't allow the arg pointer register or virtual regs
1639 since they may change into reg + const, which the patterns
1640 can't handle yet. */
1643 call_insn_operand (op, mode)
1645 enum machine_mode mode ATTRIBUTE_UNUSED;
1647 if (GET_CODE (op) == MEM
1648 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1649 /* This makes a difference for PIC. */
1650 && general_operand (XEXP (op, 0), Pmode))
1651 || (GET_CODE (XEXP (op, 0)) == REG
1652 && XEXP (op, 0) != arg_pointer_rtx
1653 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1654 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1660 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1664 expander_call_insn_operand (op, mode)
1666 enum machine_mode mode ATTRIBUTE_UNUSED;
1668 if (GET_CODE (op) == MEM
1669 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1670 || (GET_CODE (XEXP (op, 0)) == REG
1671 && XEXP (op, 0) != arg_pointer_rtx
1672 && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1673 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1679 /* Return 1 if OP is a comparison operator that can use the condition code
1680 generated by an arithmetic operation. */
1683 arithmetic_comparison_operator (op, mode)
1685 enum machine_mode mode;
1689 if (mode != VOIDmode && mode != GET_MODE (op))
1692 code = GET_CODE (op);
1693 if (GET_RTX_CLASS (code) != '<')
1696 return (code != GT && code != LE);
1700 ix86_logical_operator (op, mode)
1702 enum machine_mode mode ATTRIBUTE_UNUSED;
1704 return GET_CODE (op) == AND || GET_CODE (op) == IOR || GET_CODE (op) == XOR;
1708 /* Returns 1 if OP contains a symbol reference */
1711 symbolic_reference_mentioned_p (op)
1717 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1720 fmt = GET_RTX_FORMAT (GET_CODE (op));
1721 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1727 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1728 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1732 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1739 /* Attempt to expand a binary operator. Make the expansion closer to the
1740 actual machine, then just general_operand, which will allow 3 separate
1741 memory references (one output, two input) in a single insn. Return
1742 whether the insn fails, or succeeds. */
1745 ix86_expand_binary_operator (code, mode, operands)
1747 enum machine_mode mode;
1752 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1753 if (GET_RTX_CLASS (code) == 'c'
1754 && (rtx_equal_p (operands[0], operands[2])
1755 || immediate_operand (operands[1], mode)))
1757 rtx temp = operands[1];
1758 operands[1] = operands[2];
1762 /* If optimizing, copy to regs to improve CSE */
1763 if (TARGET_PSEUDO && optimize
1764 && ((reload_in_progress | reload_completed) == 0))
1766 if (GET_CODE (operands[1]) == MEM
1767 && ! rtx_equal_p (operands[0], operands[1]))
1768 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1770 if (GET_CODE (operands[2]) == MEM)
1771 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1773 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1775 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1777 emit_move_insn (temp, operands[1]);
1783 if (!ix86_binary_operator_ok (code, mode, operands))
1785 /* If not optimizing, try to make a valid insn (optimize code
1786 previously did this above to improve chances of CSE) */
1788 if ((! TARGET_PSEUDO || !optimize)
1789 && ((reload_in_progress | reload_completed) == 0)
1790 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1793 if (GET_CODE (operands[1]) == MEM
1794 && ! rtx_equal_p (operands[0], operands[1]))
1796 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1800 if (GET_CODE (operands[2]) == MEM)
1802 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1806 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1808 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1810 emit_move_insn (temp, operands[1]);
1815 if (modified && ! ix86_binary_operator_ok (code, mode, operands))
1825 /* Return TRUE or FALSE depending on whether the binary operator meets the
1826 appropriate constraints. */
1829 ix86_binary_operator_ok (code, mode, operands)
1831 enum machine_mode mode ATTRIBUTE_UNUSED;
1834 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1835 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1838 /* Attempt to expand a unary operator. Make the expansion closer to the
1839 actual machine, then just general_operand, which will allow 2 separate
1840 memory references (one output, one input) in a single insn. Return
1841 whether the insn fails, or succeeds. */
1844 ix86_expand_unary_operator (code, mode, operands)
1846 enum machine_mode mode;
1849 /* If optimizing, copy to regs to improve CSE */
1852 && ((reload_in_progress | reload_completed) == 0)
1853 && GET_CODE (operands[1]) == MEM)
1854 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1856 if (! ix86_unary_operator_ok (code, mode, operands))
1858 if ((! TARGET_PSEUDO || optimize == 0)
1859 && ((reload_in_progress | reload_completed) == 0)
1860 && GET_CODE (operands[1]) == MEM)
1862 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1863 if (! ix86_unary_operator_ok (code, mode, operands))
1873 /* Return TRUE or FALSE depending on whether the unary operator meets the
1874 appropriate constraints. */
1877 ix86_unary_operator_ok (code, mode, operands)
1878 enum rtx_code code ATTRIBUTE_UNUSED;
1879 enum machine_mode mode ATTRIBUTE_UNUSED;
1880 rtx operands[2] ATTRIBUTE_UNUSED;
1885 static rtx pic_label_rtx;
1886 static char pic_label_name [256];
1887 static int pic_label_no = 0;
1889 /* This function generates code for -fpic that loads %ebx with
1890 the return address of the caller and then returns. */
1893 asm_output_function_prefix (file, name)
1895 char *name ATTRIBUTE_UNUSED;
1898 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1899 || current_function_uses_const_pool);
1900 xops[0] = pic_offset_table_rtx;
1901 xops[1] = stack_pointer_rtx;
1903 /* Deep branch prediction favors having a return for every call. */
1904 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1908 if (pic_label_rtx == 0)
1910 pic_label_rtx = gen_label_rtx ();
1911 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1912 LABEL_NAME (pic_label_rtx) = pic_label_name;
1915 prologue_node = make_node (FUNCTION_DECL);
1916 DECL_RESULT (prologue_node) = 0;
1918 /* This used to call ASM_DECLARE_FUNCTION_NAME() but since it's an
1919 internal (non-global) label that's being emitted, it didn't make
1920 sense to have .type information for local labels. This caused
1921 the SCO OpenServer 5.0.4 ELF assembler grief (why are you giving
1922 me debug info for a label that you're declaring non-global?) this
1923 was changed to call ASM_OUTPUT_LABEL() instead. */
1926 ASM_OUTPUT_LABEL (file, pic_label_name);
1927 output_asm_insn ("movl (%1),%0", xops);
1928 output_asm_insn ("ret", xops);
1932 /* Generate the assembly code for function entry.
1933 FILE is an stdio stream to output the code to.
1934 SIZE is an int: how many units of temporary storage to allocate. */
1937 function_prologue (file, size)
1938 FILE *file ATTRIBUTE_UNUSED;
1939 int size ATTRIBUTE_UNUSED;
1941 if (TARGET_SCHEDULE_PROLOGUE)
1950 /* Expand the prologue into a bunch of separate insns. */
1953 ix86_expand_prologue ()
1955 if (! TARGET_SCHEDULE_PROLOGUE)
1962 load_pic_register (do_rtl)
1967 if (TARGET_DEEP_BRANCH_PREDICTION)
1969 xops[0] = pic_offset_table_rtx;
1970 if (pic_label_rtx == 0)
1972 pic_label_rtx = gen_label_rtx ();
1973 ASM_GENERATE_INTERNAL_LABEL (pic_label_name, "LPR", pic_label_no++);
1974 LABEL_NAME (pic_label_rtx) = pic_label_name;
1977 xops[1] = gen_rtx_MEM (QImode,
1978 gen_rtx (SYMBOL_REF, Pmode,
1979 LABEL_NAME (pic_label_rtx)));
1983 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1984 emit_insn (gen_prologue_set_got (xops[0],
1985 gen_rtx (SYMBOL_REF, Pmode,
1986 "$_GLOBAL_OFFSET_TABLE_"),
1991 output_asm_insn (AS1 (call,%X1), xops);
1992 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1999 xops[0] = pic_offset_table_rtx;
2000 xops[1] = gen_label_rtx ();
2004 /* We can't put a raw CODE_LABEL into the RTL, and we can't emit
2005 a new CODE_LABEL after reload, so we need a single pattern to
2006 emit the 3 necessary instructions. */
2007 emit_insn (gen_prologue_get_pc_and_set_got (xops[0]));
2011 output_asm_insn (AS1 (call,%P1), xops);
2012 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
2013 CODE_LABEL_NUMBER (xops[1]));
2014 output_asm_insn (AS1 (pop%L0,%0), xops);
2015 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
2019 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2020 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2021 moved before any instruction which implicitly uses the got. */
2024 emit_insn (gen_blockage ());
2028 ix86_prologue (do_rtl)
2034 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2035 || current_function_uses_const_pool);
2036 long tsize = get_frame_size ();
2038 int cfa_offset = INCOMING_FRAME_SP_OFFSET, cfa_store_offset = cfa_offset;
2040 xops[0] = stack_pointer_rtx;
2041 xops[1] = frame_pointer_rtx;
2042 xops[2] = GEN_INT (tsize);
2044 if (frame_pointer_needed)
2048 insn = emit_insn (gen_rtx (SET, VOIDmode,
2049 gen_rtx_MEM (SImode,
2050 gen_rtx (PRE_DEC, SImode,
2051 stack_pointer_rtx)),
2052 frame_pointer_rtx));
2054 RTX_FRAME_RELATED_P (insn) = 1;
2055 insn = emit_move_insn (xops[1], xops[0]);
2056 RTX_FRAME_RELATED_P (insn) = 1;
2061 output_asm_insn ("push%L1 %1", xops);
2062 #ifdef INCOMING_RETURN_ADDR_RTX
2063 if (dwarf2out_do_frame ())
2065 char *l = dwarf2out_cfi_label ();
2067 cfa_store_offset += 4;
2068 cfa_offset = cfa_store_offset;
2069 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2070 dwarf2out_reg_save (l, FRAME_POINTER_REGNUM, - cfa_store_offset);
2074 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
2075 #ifdef INCOMING_RETURN_ADDR_RTX
2076 if (dwarf2out_do_frame ())
2077 dwarf2out_def_cfa ("", FRAME_POINTER_REGNUM, cfa_offset);
2084 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
2088 insn = emit_insn (gen_prologue_set_stack_ptr (xops[2]));
2089 RTX_FRAME_RELATED_P (insn) = 1;
2093 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
2094 #ifdef INCOMING_RETURN_ADDR_RTX
2095 if (dwarf2out_do_frame ())
2097 cfa_store_offset += tsize;
2098 if (! frame_pointer_needed)
2100 cfa_offset = cfa_store_offset;
2101 dwarf2out_def_cfa ("", STACK_POINTER_REGNUM, cfa_offset);
2109 xops[3] = gen_rtx_REG (SImode, 0);
2111 emit_move_insn (xops[3], xops[2]);
2113 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
2115 xops[3] = gen_rtx_MEM (FUNCTION_MODE,
2116 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
2119 emit_call_insn (gen_rtx (CALL, VOIDmode, xops[3], const0_rtx));
2121 output_asm_insn (AS1 (call,%P3), xops);
2124 /* Note If use enter it is NOT reversed args.
2125 This one is not reversed from intel!!
2126 I think enter is slower. Also sdb doesn't like it.
2127 But if you want it the code is:
2129 xops[3] = const0_rtx;
2130 output_asm_insn ("enter %2,%3", xops);
2134 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2135 for (regno = limit - 1; regno >= 0; regno--)
2136 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2137 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2139 xops[0] = gen_rtx_REG (SImode, regno);
2142 insn = emit_insn (gen_rtx (SET, VOIDmode,
2143 gen_rtx_MEM (SImode,
2144 gen_rtx (PRE_DEC, SImode,
2145 stack_pointer_rtx)),
2148 RTX_FRAME_RELATED_P (insn) = 1;
2152 output_asm_insn ("push%L0 %0", xops);
2153 #ifdef INCOMING_RETURN_ADDR_RTX
2154 if (dwarf2out_do_frame ())
2156 char *l = dwarf2out_cfi_label ();
2158 cfa_store_offset += 4;
2159 if (! frame_pointer_needed)
2161 cfa_offset = cfa_store_offset;
2162 dwarf2out_def_cfa (l, STACK_POINTER_REGNUM, cfa_offset);
2165 dwarf2out_reg_save (l, regno, - cfa_store_offset);
2172 load_pic_register (do_rtl);
2174 /* If we are profiling, make sure no instructions are scheduled before
2175 the call to mcount. However, if -fpic, the above call will have
2177 if ((profile_flag || profile_block_flag)
2178 && ! pic_reg_used && do_rtl)
2179 emit_insn (gen_blockage ());
2182 /* Return 1 if it is appropriate to emit `ret' instructions in the
2183 body of a function. Do this only if the epilogue is simple, needing a
2184 couple of insns. Prior to reloading, we can't tell how many registers
2185 must be saved, so return 0 then. Return 0 if there is no frame
2186 marker to de-allocate.
2188 If NON_SAVING_SETJMP is defined and true, then it is not possible
2189 for the epilogue to be simple, so return 0. This is a special case
2190 since NON_SAVING_SETJMP will not cause regs_ever_live to change
2191 until final, but jump_optimize may need to know sooner if a
2195 ix86_can_use_return_insn_p ()
2199 int reglimit = (frame_pointer_needed
2200 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2201 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2202 || current_function_uses_const_pool);
2204 #ifdef NON_SAVING_SETJMP
2205 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
2209 if (! reload_completed)
2212 for (regno = reglimit - 1; regno >= 0; regno--)
2213 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2214 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2217 return nregs == 0 || ! frame_pointer_needed;
2220 /* This function generates the assembly code for function exit.
2221 FILE is an stdio stream to output the code to.
2222 SIZE is an int: how many units of temporary storage to deallocate. */
2225 function_epilogue (file, size)
2226 FILE *file ATTRIBUTE_UNUSED;
2227 int size ATTRIBUTE_UNUSED;
2232 /* Restore function stack, frame, and registers. */
2235 ix86_expand_epilogue ()
2241 ix86_epilogue (do_rtl)
2245 register int nregs, limit;
2248 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
2249 || current_function_uses_const_pool);
2250 long tsize = get_frame_size ();
2252 /* Compute the number of registers to pop */
2254 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
2258 for (regno = limit - 1; regno >= 0; regno--)
2259 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2260 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2263 /* sp is often unreliable so we must go off the frame pointer.
2265 In reality, we may not care if sp is unreliable, because we can restore
2266 the register relative to the frame pointer. In theory, since each move
2267 is the same speed as a pop, and we don't need the leal, this is faster.
2268 For now restore multiple registers the old way. */
2270 offset = - tsize - (nregs * UNITS_PER_WORD);
2272 xops[2] = stack_pointer_rtx;
2274 /* When -fpic, we must emit a scheduling barrier, so that the instruction
2275 that restores %ebx (which is PIC_OFFSET_TABLE_REGNUM), does not get
2276 moved before any instruction which implicitly uses the got. This
2277 includes any instruction which uses a SYMBOL_REF or a LABEL_REF.
2279 Alternatively, this could be fixed by making the dependence on the
2280 PIC_OFFSET_TABLE_REGNUM explicit in the RTL. */
2282 if (flag_pic || profile_flag || profile_block_flag)
2283 emit_insn (gen_blockage ());
2285 if (nregs > 1 || ! frame_pointer_needed)
2287 if (frame_pointer_needed)
2289 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2291 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2293 output_asm_insn (AS2 (lea%L2,%0,%2), xops);
2296 for (regno = 0; regno < limit; regno++)
2297 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2298 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2300 xops[0] = gen_rtx_REG (SImode, regno);
2303 emit_insn (gen_pop (xops[0]));
2305 output_asm_insn ("pop%L0 %0", xops);
2310 for (regno = 0; regno < limit; regno++)
2311 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2312 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2314 xops[0] = gen_rtx_REG (SImode, regno);
2315 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2318 emit_move_insn (xops[0], xops[1]);
2320 output_asm_insn (AS2 (mov%L0,%1,%0), xops);
2325 if (frame_pointer_needed)
2327 /* If not an i386, mov & pop is faster than "leave". */
2329 if (TARGET_USE_LEAVE)
2332 emit_insn (gen_leave());
2334 output_asm_insn ("leave", xops);
2338 xops[0] = frame_pointer_rtx;
2339 xops[1] = stack_pointer_rtx;
2343 emit_insn (gen_epilogue_set_stack_ptr());
2344 emit_insn (gen_pop (xops[0]));
2348 output_asm_insn (AS2 (mov%L2,%0,%2), xops);
2349 output_asm_insn ("pop%L0 %0", xops);
2356 /* Intel's docs say that for 4 or 8 bytes of stack frame one should
2357 use `pop' and not `add'. */
2358 int use_pop = tsize == 4;
2360 /* Use two pops only for the Pentium processors. */
2361 if (tsize == 8 && !TARGET_386 && !TARGET_486)
2363 rtx retval = current_function_return_rtx;
2365 xops[1] = gen_rtx_REG (SImode, 1); /* %edx */
2367 /* This case is a bit more complex. Since we cannot pop into
2368 %ecx twice we need a second register. But this is only
2369 available if the return value is not of DImode in which
2370 case the %edx register is not available. */
2371 use_pop = (retval == NULL
2372 || ! reg_overlap_mentioned_p (xops[1], retval));
2377 xops[0] = gen_rtx_REG (SImode, 2); /* %ecx */
2381 /* We have to prevent the two pops here from being scheduled.
2382 GCC otherwise would try in some situation to put other
2383 instructions in between them which has a bad effect. */
2384 emit_insn (gen_blockage ());
2385 emit_insn (gen_pop (xops[0]));
2387 emit_insn (gen_pop (xops[1]));
2391 output_asm_insn ("pop%L0 %0", xops);
2393 output_asm_insn ("pop%L1 %1", xops);
2398 /* If there is no frame pointer, we must still release the frame. */
2399 xops[0] = GEN_INT (tsize);
2402 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2403 gen_rtx (PLUS, SImode, xops[2], xops[0])));
2405 output_asm_insn (AS2 (add%L2,%0,%2), xops);
2409 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2410 if (profile_block_flag == 2)
2412 FUNCTION_BLOCK_PROFILER_EXIT(file);
2416 if (current_function_pops_args && current_function_args_size)
2418 xops[1] = GEN_INT (current_function_pops_args);
2420 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2421 asked to pop more, pop return address, do explicit add, and jump
2422 indirectly to the caller. */
2424 if (current_function_pops_args >= 32768)
2426 /* ??? Which register to use here? */
2427 xops[0] = gen_rtx_REG (SImode, 2);
2431 emit_insn (gen_pop (xops[0]));
2432 emit_insn (gen_rtx (SET, VOIDmode, xops[2],
2433 gen_rtx (PLUS, SImode, xops[1], xops[2])));
2434 emit_jump_insn (xops[0]);
2438 output_asm_insn ("pop%L0 %0", xops);
2439 output_asm_insn (AS2 (add%L2,%1,%2), xops);
2440 output_asm_insn ("jmp %*%0", xops);
2446 emit_jump_insn (gen_return_pop_internal (xops[1]));
2448 output_asm_insn ("ret %1", xops);
2454 emit_jump_insn (gen_return_internal ());
2456 output_asm_insn ("ret", xops);
2460 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2461 that is a valid memory address for an instruction.
2462 The MODE argument is the machine mode for the MEM expression
2463 that wants to use this address.
2465 On x86, legitimate addresses are:
2466 base movl (base),reg
2467 displacement movl disp,reg
2468 base + displacement movl disp(base),reg
2469 index + base movl (base,index),reg
2470 (index + base) + displacement movl disp(base,index),reg
2471 index*scale movl (,index,scale),reg
2472 index*scale + disp movl disp(,index,scale),reg
2473 index*scale + base movl (base,index,scale),reg
2474 (index*scale + base) + disp movl disp(base,index,scale),reg
2476 In each case, scale can be 1, 2, 4, 8. */
2478 /* This is exactly the same as print_operand_addr, except that
2479 it recognizes addresses instead of printing them.
2481 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2482 convert common non-canonical forms to canonical form so that they will
2485 #define ADDR_INVALID(msg,insn) \
2487 if (TARGET_DEBUG_ADDR) \
2489 fprintf (stderr, msg); \
2495 legitimate_address_p (mode, addr, strict)
2496 enum machine_mode mode;
2500 rtx base = NULL_RTX;
2501 rtx indx = NULL_RTX;
2502 rtx scale = NULL_RTX;
2503 rtx disp = NULL_RTX;
2505 if (TARGET_DEBUG_ADDR)
2508 "\n======\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2509 GET_MODE_NAME (mode), strict);
2514 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2517 else if (GET_CODE (addr) == PLUS)
2519 rtx op0 = XEXP (addr, 0);
2520 rtx op1 = XEXP (addr, 1);
2521 enum rtx_code code0 = GET_CODE (op0);
2522 enum rtx_code code1 = GET_CODE (op1);
2524 if (code0 == REG || code0 == SUBREG)
2526 if (code1 == REG || code1 == SUBREG)
2528 indx = op0; /* index + base */
2534 base = op0; /* base + displacement */
2539 else if (code0 == MULT)
2541 indx = XEXP (op0, 0);
2542 scale = XEXP (op0, 1);
2544 if (code1 == REG || code1 == SUBREG)
2545 base = op1; /* index*scale + base */
2548 disp = op1; /* index*scale + disp */
2551 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2553 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2554 scale = XEXP (XEXP (op0, 0), 1);
2555 base = XEXP (op0, 1);
2559 else if (code0 == PLUS)
2561 indx = XEXP (op0, 0); /* index + base + disp */
2562 base = XEXP (op0, 1);
2568 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2573 else if (GET_CODE (addr) == MULT)
2575 indx = XEXP (addr, 0); /* index*scale */
2576 scale = XEXP (addr, 1);
2580 disp = addr; /* displacement */
2582 /* Allow arg pointer and stack pointer as index if there is not scaling */
2583 if (base && indx && !scale
2584 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2591 /* Validate base register:
2593 Don't allow SUBREG's here, it can lead to spill failures when the base
2594 is one word out of a two word structure, which is represented internally
2599 if (GET_CODE (base) != REG)
2601 ADDR_INVALID ("Base is not a register.\n", base);
2605 if ((strict && ! REG_OK_FOR_BASE_STRICT_P (base))
2606 || (! strict && ! REG_OK_FOR_BASE_NONSTRICT_P (base)))
2608 ADDR_INVALID ("Base is not valid.\n", base);
2613 /* Validate index register:
2615 Don't allow SUBREG's here, it can lead to spill failures when the index
2616 is one word out of a two word structure, which is represented internally
2620 if (GET_CODE (indx) != REG)
2622 ADDR_INVALID ("Index is not a register.\n", indx);
2626 if ((strict && ! REG_OK_FOR_INDEX_STRICT_P (indx))
2627 || (! strict && ! REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2629 ADDR_INVALID ("Index is not valid.\n", indx);
2634 abort (); /* scale w/o index invalid */
2636 /* Validate scale factor: */
2639 HOST_WIDE_INT value;
2641 if (GET_CODE (scale) != CONST_INT)
2643 ADDR_INVALID ("Scale is not valid.\n", scale);
2647 value = INTVAL (scale);
2648 if (value != 1 && value != 2 && value != 4 && value != 8)
2650 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2655 /* Validate displacement
2656 Constant pool addresses must be handled special. They are
2657 considered legitimate addresses, but only if not used with regs.
2658 When printed, the output routines know to print the reference with the
2659 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2662 if (GET_CODE (disp) == SYMBOL_REF
2663 && CONSTANT_POOL_ADDRESS_P (disp)
2668 else if (!CONSTANT_ADDRESS_P (disp))
2670 ADDR_INVALID ("Displacement is not valid.\n", disp);
2674 else if (GET_CODE (disp) == CONST_DOUBLE)
2676 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2680 else if (flag_pic && SYMBOLIC_CONST (disp)
2681 && base != pic_offset_table_rtx
2682 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2684 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2688 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2689 && (base != NULL_RTX || indx != NULL_RTX))
2691 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n",
2697 if (TARGET_DEBUG_ADDR)
2698 fprintf (stderr, "Address is valid.\n");
2700 /* Everything looks valid, return true */
2704 /* Return a legitimate reference for ORIG (an address) using the
2705 register REG. If REG is 0, a new pseudo is generated.
2707 There are three types of references that must be handled:
2709 1. Global data references must load the address from the GOT, via
2710 the PIC reg. An insn is emitted to do this load, and the reg is
2713 2. Static data references must compute the address as an offset
2714 from the GOT, whose base is in the PIC reg. An insn is emitted to
2715 compute the address into a reg, and the reg is returned. Static
2716 data objects have SYMBOL_REF_FLAG set to differentiate them from
2717 global data objects.
2719 3. Constant pool addresses must be handled special. They are
2720 considered legitimate addresses, but only if not used with regs.
2721 When printed, the output routines know to print the reference with the
2722 PIC reg, even though the PIC reg doesn't appear in the RTL.
2724 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2725 reg also appears in the address (except for constant pool references,
2728 "switch" statements also require special handling when generating
2729 PIC code. See comments by the `casesi' insn in i386.md for details. */
2732 legitimize_pic_address (orig, reg)
2739 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2741 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2746 reg = gen_reg_rtx (Pmode);
2748 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2749 || GET_CODE (addr) == LABEL_REF)
2750 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2752 new = gen_rtx_MEM (Pmode,
2753 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig));
2755 emit_move_insn (reg, new);
2757 current_function_uses_pic_offset_table = 1;
2761 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2765 if (GET_CODE (addr) == CONST)
2767 addr = XEXP (addr, 0);
2768 if (GET_CODE (addr) != PLUS)
2772 if (XEXP (addr, 0) == pic_offset_table_rtx)
2776 reg = gen_reg_rtx (Pmode);
2778 base = legitimize_pic_address (XEXP (addr, 0), reg);
2779 addr = legitimize_pic_address (XEXP (addr, 1),
2780 base == reg ? NULL_RTX : reg);
2782 if (GET_CODE (addr) == CONST_INT)
2783 return plus_constant (base, INTVAL (addr));
2785 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2787 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2788 addr = XEXP (addr, 1);
2791 return gen_rtx (PLUS, Pmode, base, addr);
2796 /* Emit insns to move operands[1] into operands[0]. */
2799 emit_pic_move (operands, mode)
2801 enum machine_mode mode ATTRIBUTE_UNUSED;
2803 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2805 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2806 operands[1] = force_reg (SImode, operands[1]);
2808 operands[1] = legitimize_pic_address (operands[1], temp);
2811 /* Try machine-dependent ways of modifying an illegitimate address
2812 to be legitimate. If we find one, return the new, valid address.
2813 This macro is used in only one place: `memory_address' in explow.c.
2815 OLDX is the address as it was before break_out_memory_refs was called.
2816 In some cases it is useful to look at this to decide what needs to be done.
2818 MODE and WIN are passed so that this macro can use
2819 GO_IF_LEGITIMATE_ADDRESS.
2821 It is always safe for this macro to do nothing. It exists to recognize
2822 opportunities to optimize the output.
2824 For the 80386, we handle X+REG by loading X into a register R and
2825 using R+REG. R will go in a general reg and indexing will be used.
2826 However, if REG is a broken-out memory address or multiplication,
2827 nothing needs to be done because REG can certainly go in a general reg.
2829 When -fpic is used, special handling is needed for symbolic references.
2830 See comments by legitimize_pic_address in i386.c for details. */
2833 legitimize_address (x, oldx, mode)
2835 register rtx oldx ATTRIBUTE_UNUSED;
2836 enum machine_mode mode;
2841 if (TARGET_DEBUG_ADDR)
2843 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n",
2844 GET_MODE_NAME (mode));
2848 if (flag_pic && SYMBOLIC_CONST (x))
2849 return legitimize_pic_address (x, 0);
2851 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2852 if (GET_CODE (x) == ASHIFT
2853 && GET_CODE (XEXP (x, 1)) == CONST_INT
2854 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2857 x = gen_rtx (MULT, Pmode, force_reg (Pmode, XEXP (x, 0)),
2858 GEN_INT (1 << log));
2861 if (GET_CODE (x) == PLUS)
2863 /* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
2865 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2866 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2867 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2870 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2871 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2872 GEN_INT (1 << log));
2875 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2876 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2877 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2880 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2881 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2882 GEN_INT (1 << log));
2885 /* Put multiply first if it isn't already. */
2886 if (GET_CODE (XEXP (x, 1)) == MULT)
2888 rtx tmp = XEXP (x, 0);
2889 XEXP (x, 0) = XEXP (x, 1);
2894 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2895 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2896 created by virtual register instantiation, register elimination, and
2897 similar optimizations. */
2898 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2901 x = gen_rtx (PLUS, Pmode,
2902 gen_rtx (PLUS, Pmode, XEXP (x, 0),
2903 XEXP (XEXP (x, 1), 0)),
2904 XEXP (XEXP (x, 1), 1));
2908 (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2909 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2910 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2911 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2912 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2913 && CONSTANT_P (XEXP (x, 1)))
2916 rtx other = NULL_RTX;
2918 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2920 constant = XEXP (x, 1);
2921 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2923 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2925 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2926 other = XEXP (x, 1);
2934 x = gen_rtx (PLUS, Pmode,
2935 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2936 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2937 plus_constant (other, INTVAL (constant)));
2941 if (changed && legitimate_address_p (mode, x, FALSE))
2944 if (GET_CODE (XEXP (x, 0)) == MULT)
2947 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2950 if (GET_CODE (XEXP (x, 1)) == MULT)
2953 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2957 && GET_CODE (XEXP (x, 1)) == REG
2958 && GET_CODE (XEXP (x, 0)) == REG)
2961 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2964 x = legitimize_pic_address (x, 0);
2967 if (changed && legitimate_address_p (mode, x, FALSE))
2970 if (GET_CODE (XEXP (x, 0)) == REG)
2972 register rtx temp = gen_reg_rtx (Pmode);
2973 register rtx val = force_operand (XEXP (x, 1), temp);
2975 emit_move_insn (temp, val);
2981 else if (GET_CODE (XEXP (x, 1)) == REG)
2983 register rtx temp = gen_reg_rtx (Pmode);
2984 register rtx val = force_operand (XEXP (x, 0), temp);
2986 emit_move_insn (temp, val);
2996 /* Print an integer constant expression in assembler syntax. Addition
2997 and subtraction are the only arithmetic that may appear in these
2998 expressions. FILE is the stdio stream to write to, X is the rtx, and
2999 CODE is the operand print code from the output string. */
3002 output_pic_addr_const (file, x, code)
3009 switch (GET_CODE (x))
3020 if (GET_CODE (x) == SYMBOL_REF)
3021 assemble_name (file, XSTR (x, 0));
3024 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
3025 CODE_LABEL_NUMBER (XEXP (x, 0)));
3026 assemble_name (asm_out_file, buf);
3030 ; /* No suffix, dammit. */
3031 else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3032 fprintf (file, "@GOTOFF(%%ebx)");
3033 else if (code == 'P')
3034 fprintf (file, "@PLT");
3035 else if (GET_CODE (x) == LABEL_REF)
3036 fprintf (file, "@GOTOFF");
3037 else if (! SYMBOL_REF_FLAG (x))
3038 fprintf (file, "@GOT");
3040 fprintf (file, "@GOTOFF");
3045 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3046 assemble_name (asm_out_file, buf);
3050 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3054 /* This used to output parentheses around the expression,
3055 but that does not work on the 386 (either ATT or BSD assembler). */
3056 output_pic_addr_const (file, XEXP (x, 0), code);
3060 if (GET_MODE (x) == VOIDmode)
3062 /* We can use %d if the number is <32 bits and positive. */
3063 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
3064 fprintf (file, "0x%lx%08lx",
3065 (unsigned long) CONST_DOUBLE_HIGH (x),
3066 (unsigned long) CONST_DOUBLE_LOW (x));
3068 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3071 /* We can't handle floating point constants;
3072 PRINT_OPERAND must handle them. */
3073 output_operand_lossage ("floating constant misused");
3077 /* Some assemblers need integer constants to appear first. */
3078 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3080 output_pic_addr_const (file, XEXP (x, 0), code);
3081 if (INTVAL (XEXP (x, 1)) >= 0)
3082 fprintf (file, "+");
3083 output_pic_addr_const (file, XEXP (x, 1), code);
3087 output_pic_addr_const (file, XEXP (x, 1), code);
3088 if (INTVAL (XEXP (x, 0)) >= 0)
3089 fprintf (file, "+");
3090 output_pic_addr_const (file, XEXP (x, 0), code);
3095 output_pic_addr_const (file, XEXP (x, 0), code);
3096 fprintf (file, "-");
3097 output_pic_addr_const (file, XEXP (x, 1), code);
3101 output_operand_lossage ("invalid expression as operand");
3105 /* Append the correct conditional move suffix which corresponds to CODE. */
3108 put_condition_code (code, reverse_cc, mode, file)
3111 enum mode_class mode;
3114 int ieee = (TARGET_IEEE_FP && (cc_prev_status.flags & CC_IN_80387)
3115 && ! (cc_prev_status.flags & CC_FCOMI));
3116 if (reverse_cc && ! ieee)
3117 code = reverse_condition (code);
3119 if (mode == MODE_INT)
3123 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3130 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
3137 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3152 if (cc_prev_status.flags & CC_NO_OVERFLOW)
3175 output_operand_lossage ("Invalid %%C operand");
3178 else if (mode == MODE_FLOAT)
3182 fputs (ieee ? (reverse_cc ? "ne" : "e") : "ne", file);
3185 fputs (ieee ? (reverse_cc ? "ne" : "e") : "e", file);
3188 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3191 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3194 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3197 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3200 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nb", file);
3203 fputs (ieee ? (reverse_cc ? "ne" : "e") : "nbe", file);
3206 fputs (ieee ? (reverse_cc ? "nb" : "b") : "be", file);
3209 fputs (ieee ? (reverse_cc ? "ne" : "e") : "b", file);
3212 output_operand_lossage ("Invalid %%C operand");
3217 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
3218 C -- print opcode suffix for set/cmov insn.
3219 c -- like C, but print reversed condition
3220 F -- print opcode suffix for fcmov insn.
3221 f -- like C, but print reversed condition
3222 R -- print the prefix for register names.
3223 z -- print the opcode suffix for the size of the current operand.
3224 * -- print a star (in certain assembler syntax)
3225 w -- print the operand as if it's a "word" (HImode) even if it isn't.
3226 c -- don't print special prefixes before constant operands.
3227 J -- print the appropriate jump operand.
3228 s -- print a shift double count, followed by the assemblers argument
3230 b -- print the QImode name of the register for the indicated operand.
3231 %b0 would print %al if operands[0] is reg 0.
3232 w -- likewise, print the HImode name of the register.
3233 k -- likewise, print the SImode name of the register.
3234 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
3235 y -- print "st(0)" instead of "st" as a register.
3236 P -- print as a PIC constant */
3239 print_operand (file, x, code)
3254 PUT_OP_SIZE (code, 'l', file);
3258 PUT_OP_SIZE (code, 'w', file);
3262 PUT_OP_SIZE (code, 'b', file);
3266 PUT_OP_SIZE (code, 'l', file);
3270 PUT_OP_SIZE (code, 's', file);
3274 PUT_OP_SIZE (code, 't', file);
3278 /* 387 opcodes don't get size suffixes if the operands are
3281 if (STACK_REG_P (x))
3284 /* this is the size of op from size of operand */
3285 switch (GET_MODE_SIZE (GET_MODE (x)))
3288 PUT_OP_SIZE ('B', 'b', file);
3292 PUT_OP_SIZE ('W', 'w', file);
3296 if (GET_MODE (x) == SFmode)
3298 PUT_OP_SIZE ('S', 's', file);
3302 PUT_OP_SIZE ('L', 'l', file);
3306 PUT_OP_SIZE ('T', 't', file);
3310 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
3312 #ifdef GAS_MNEMONICS
3313 PUT_OP_SIZE ('Q', 'q', file);
3316 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
3320 PUT_OP_SIZE ('Q', 'l', file);
3334 switch (GET_CODE (x))
3336 /* These conditions are appropriate for testing the result
3337 of an arithmetic operation, not for a compare operation.
3338 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
3339 CC_Z_IN_NOT_C false and not floating point. */
3340 case NE: fputs ("jne", file); return;
3341 case EQ: fputs ("je", file); return;
3342 case GE: fputs ("jns", file); return;
3343 case LT: fputs ("js", file); return;
3344 case GEU: fputs ("jmp", file); return;
3345 case GTU: fputs ("jne", file); return;
3346 case LEU: fputs ("je", file); return;
3347 case LTU: fputs ("#branch never", file); return;
3349 /* no matching branches for GT nor LE */
3356 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
3358 PRINT_OPERAND (file, x, 0);
3359 fputs (AS2C (,) + 1, file);
3364 /* This is used by the conditional move instructions. */
3366 put_condition_code (GET_CODE (x), 0, MODE_INT, file);
3369 /* Like above, but reverse condition */
3371 put_condition_code (GET_CODE (x), 1, MODE_INT, file); return;
3374 put_condition_code (GET_CODE (x), 0, MODE_FLOAT, file);
3377 /* Like above, but reverse condition */
3379 put_condition_code (GET_CODE (x), 1, MODE_FLOAT, file);
3386 sprintf (str, "invalid operand code `%c'", code);
3387 output_operand_lossage (str);
3392 if (GET_CODE (x) == REG)
3394 PRINT_REG (x, code, file);
3397 else if (GET_CODE (x) == MEM)
3399 PRINT_PTR (x, file);
3400 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3403 output_pic_addr_const (file, XEXP (x, 0), code);
3405 output_addr_const (file, XEXP (x, 0));
3408 output_address (XEXP (x, 0));
3411 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3416 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3417 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3418 PRINT_IMMED_PREFIX (file);
3419 fprintf (file, "0x%lx", l);
3422 /* These float cases don't actually occur as immediate operands. */
3423 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3428 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3429 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3430 fprintf (file, "%s", dstr);
3433 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3438 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3439 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3440 fprintf (file, "%s", dstr);
3446 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3447 PRINT_IMMED_PREFIX (file);
3448 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3449 || GET_CODE (x) == LABEL_REF)
3450 PRINT_OFFSET_PREFIX (file);
3453 output_pic_addr_const (file, x, code);
3455 output_addr_const (file, x);
3459 /* Print a memory operand whose address is ADDR. */
3462 print_operand_address (file, addr)
3466 register rtx reg1, reg2, breg, ireg;
3469 switch (GET_CODE (addr))
3473 fprintf (file, "%se", RP);
3474 fputs (hi_reg_name[REGNO (addr)], file);
3484 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3486 offset = XEXP (addr, 0);
3487 addr = XEXP (addr, 1);
3489 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3491 offset = XEXP (addr, 1);
3492 addr = XEXP (addr, 0);
3495 if (GET_CODE (addr) != PLUS)
3497 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3498 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3499 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3500 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3501 else if (GET_CODE (XEXP (addr, 0)) == REG)
3502 reg1 = XEXP (addr, 0), addr = XEXP (addr, 1);
3503 else if (GET_CODE (XEXP (addr, 1)) == REG)
3504 reg1 = XEXP (addr, 1), addr = XEXP (addr, 0);
3506 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3523 if ((reg1 && GET_CODE (reg1) == MULT)
3524 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3529 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3535 if (ireg != 0 || breg != 0)
3542 output_pic_addr_const (file, addr, 0);
3543 else if (GET_CODE (addr) == LABEL_REF)
3544 output_asm_label (addr);
3546 output_addr_const (file, addr);
3549 if (ireg != 0 && GET_CODE (ireg) == MULT)
3551 scale = INTVAL (XEXP (ireg, 1));
3552 ireg = XEXP (ireg, 0);
3555 /* The stack pointer can only appear as a base register,
3556 never an index register, so exchange the regs if it is wrong. */
3558 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3567 /* output breg+ireg*scale */
3568 PRINT_B_I_S (breg, ireg, scale, file);
3576 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3578 scale = INTVAL (XEXP (addr, 0));
3579 ireg = XEXP (addr, 1);
3583 scale = INTVAL (XEXP (addr, 1));
3584 ireg = XEXP (addr, 0);
3587 output_addr_const (file, const0_rtx);
3588 PRINT_B_I_S (NULL_RTX, ireg, scale, file);
3593 if (GET_CODE (addr) == CONST_INT
3594 && INTVAL (addr) < 0x8000
3595 && INTVAL (addr) >= -0x8000)
3596 fprintf (file, "%d", (int) INTVAL (addr));
3600 output_pic_addr_const (file, addr, 0);
3602 output_addr_const (file, addr);
3607 /* Set the cc_status for the results of an insn whose pattern is EXP.
3608 On the 80386, we assume that only test and compare insns, as well
3609 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, BSF, ASHIFT,
3610 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3611 Also, we assume that jumps, moves and sCOND don't affect the condition
3612 codes. All else clobbers the condition codes, by assumption.
3614 We assume that ALL integer add, minus, etc. instructions effect the
3615 condition codes. This MUST be consistent with i386.md.
3617 We don't record any float test or compare - the redundant test &
3618 compare check in final.c does not handle stack-like regs correctly. */
3621 notice_update_cc (exp)
3624 if (GET_CODE (exp) == SET)
3626 /* Jumps do not alter the cc's. */
3627 if (SET_DEST (exp) == pc_rtx)
3630 /* Moving register or memory into a register:
3631 it doesn't alter the cc's, but it might invalidate
3632 the RTX's which we remember the cc's came from.
3633 (Note that moving a constant 0 or 1 MAY set the cc's). */
3634 if (REG_P (SET_DEST (exp))
3635 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3636 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'
3637 || (GET_CODE (SET_SRC (exp)) == IF_THEN_ELSE
3638 && GET_MODE_CLASS (GET_MODE (SET_DEST (exp))) == MODE_INT)))
3640 if (cc_status.value1
3641 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3642 cc_status.value1 = 0;
3644 if (cc_status.value2
3645 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3646 cc_status.value2 = 0;
3651 /* Moving register into memory doesn't alter the cc's.
3652 It may invalidate the RTX's which we remember the cc's came from. */
3653 if (GET_CODE (SET_DEST (exp)) == MEM
3654 && (REG_P (SET_SRC (exp))
3655 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3657 if (cc_status.value1
3658 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3659 cc_status.value1 = 0;
3660 if (cc_status.value2
3661 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3662 cc_status.value2 = 0;
3667 /* Function calls clobber the cc's. */
3668 else if (GET_CODE (SET_SRC (exp)) == CALL)
3674 /* Tests and compares set the cc's in predictable ways. */
3675 else if (SET_DEST (exp) == cc0_rtx)
3678 cc_status.value1 = SET_SRC (exp);
3682 /* Certain instructions effect the condition codes. */
3683 else if (GET_MODE (SET_SRC (exp)) == SImode
3684 || GET_MODE (SET_SRC (exp)) == HImode
3685 || GET_MODE (SET_SRC (exp)) == QImode)
3686 switch (GET_CODE (SET_SRC (exp)))
3688 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
3689 /* Shifts on the 386 don't set the condition codes if the
3690 shift count is zero. */
3691 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3697 /* We assume that the CONST_INT is non-zero (this rtx would
3698 have been deleted if it were zero. */
3700 case PLUS: case MINUS: case NEG:
3701 case AND: case IOR: case XOR:
3702 cc_status.flags = CC_NO_OVERFLOW;
3703 cc_status.value1 = SET_SRC (exp);
3704 cc_status.value2 = SET_DEST (exp);
3707 /* This is the bsf pattern used by ffs. */
3709 if (XINT (SET_SRC (exp), 1) == 5)
3711 /* Only the Z flag is defined after bsf. */
3713 = CC_NOT_POSITIVE | CC_NOT_NEGATIVE | CC_NO_OVERFLOW;
3714 cc_status.value1 = XVECEXP (SET_SRC (exp), 0, 0);
3715 cc_status.value2 = 0;
3728 else if (GET_CODE (exp) == PARALLEL
3729 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3731 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3733 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3737 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3739 cc_status.flags |= CC_IN_80387;
3740 if (0 && TARGET_CMOVE && stack_regs_mentioned_p
3741 (XEXP (SET_SRC (XVECEXP (exp, 0, 0)), 1)))
3742 cc_status.flags |= CC_FCOMI;
3745 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3757 /* Split one or more DImode RTL references into pairs of SImode
3758 references. The RTL can be REG, offsettable MEM, integer constant, or
3759 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3760 split and "num" is its length. lo_half and hi_half are output arrays
3761 that parallel "operands". */
3764 split_di (operands, num, lo_half, hi_half)
3767 rtx lo_half[], hi_half[];
3771 rtx op = operands[num];
3772 if (GET_CODE (op) == REG)
3774 lo_half[num] = gen_rtx_REG (SImode, REGNO (op));
3775 hi_half[num] = gen_rtx_REG (SImode, REGNO (op) + 1);
3777 else if (CONSTANT_P (op))
3778 split_double (op, &lo_half[num], &hi_half[num]);
3779 else if (offsettable_memref_p (op))
3781 rtx lo_addr = XEXP (op, 0);
3782 rtx hi_addr = XEXP (adj_offsettable_operand (op, 4), 0);
3783 lo_half[num] = change_address (op, SImode, lo_addr);
3784 hi_half[num] = change_address (op, SImode, hi_addr);
3791 /* Return 1 if this is a valid binary operation on a 387.
3792 OP is the expression matched, and MODE is its mode. */
3795 binary_387_op (op, mode)
3797 enum machine_mode mode;
3799 if (mode != VOIDmode && mode != GET_MODE (op))
3802 switch (GET_CODE (op))
3808 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3815 /* Return 1 if this is a valid shift or rotate operation on a 386.
3816 OP is the expression matched, and MODE is its mode. */
3821 enum machine_mode mode;
3823 rtx operand = XEXP (op, 0);
3825 if (mode != VOIDmode && mode != GET_MODE (op))
3828 if (GET_MODE (operand) != GET_MODE (op)
3829 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3832 return (GET_CODE (op) == ASHIFT
3833 || GET_CODE (op) == ASHIFTRT
3834 || GET_CODE (op) == LSHIFTRT
3835 || GET_CODE (op) == ROTATE
3836 || GET_CODE (op) == ROTATERT);
3839 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3840 MODE is not used. */
3843 VOIDmode_compare_op (op, mode)
3845 enum machine_mode mode ATTRIBUTE_UNUSED;
3847 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3850 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3851 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3852 is the expression of the binary operation. The output may either be
3853 emitted here, or returned to the caller, like all output_* functions.
3855 There is no guarantee that the operands are the same mode, as they
3856 might be within FLOAT or FLOAT_EXTEND expressions. */
3859 output_387_binary_op (insn, operands)
3865 static char buf[100];
3867 switch (GET_CODE (operands[3]))
3870 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3871 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3878 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3879 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3886 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3887 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3894 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3895 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3905 strcpy (buf, base_op);
3907 switch (GET_CODE (operands[3]))
3911 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3914 operands[2] = operands[1];
3918 if (GET_CODE (operands[2]) == MEM)
3919 return strcat (buf, AS1 (%z2,%2));
3921 if (NON_STACK_REG_P (operands[1]))
3923 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3927 else if (NON_STACK_REG_P (operands[2]))
3929 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3933 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3935 if (STACK_TOP_P (operands[0]))
3936 return strcat (buf, AS2 (p,%0,%2));
3938 return strcat (buf, AS2 (p,%2,%0));
3941 if (STACK_TOP_P (operands[0]))
3942 return strcat (buf, AS2C (%y2,%0));
3944 return strcat (buf, AS2C (%2,%0));
3948 if (GET_CODE (operands[1]) == MEM)
3949 return strcat (buf, AS1 (r%z1,%1));
3951 if (GET_CODE (operands[2]) == MEM)
3952 return strcat (buf, AS1 (%z2,%2));
3954 if (NON_STACK_REG_P (operands[1]))
3956 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3960 else if (NON_STACK_REG_P (operands[2]))
3962 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3966 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3969 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3971 if (STACK_TOP_P (operands[0]))
3972 return strcat (buf, AS2 (p,%0,%2));
3974 return strcat (buf, AS2 (rp,%2,%0));
3977 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3979 if (STACK_TOP_P (operands[0]))
3980 return strcat (buf, AS2 (rp,%0,%1));
3982 return strcat (buf, AS2 (p,%1,%0));
3985 if (STACK_TOP_P (operands[0]))
3987 if (STACK_TOP_P (operands[1]))
3988 return strcat (buf, AS2C (%y2,%0));
3990 return strcat (buf, AS2 (r,%y1,%0));
3992 else if (STACK_TOP_P (operands[1]))
3993 return strcat (buf, AS2C (%1,%0));
3995 return strcat (buf, AS2 (r,%2,%0));
4002 /* Output code for INSN to convert a float to a signed int. OPERANDS
4003 are the insn operands. The output may be SFmode or DFmode and the
4004 input operand may be SImode or DImode. As a special case, make sure
4005 that the 387 stack top dies if the output mode is DImode, because the
4006 hardware requires this. */
4009 output_fix_trunc (insn, operands)
4013 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4016 if (! STACK_TOP_P (operands[1]))
4019 xops[0] = GEN_INT (12);
4020 xops[1] = operands[4];
4022 output_asm_insn (AS1 (fnstc%W2,%2), operands);
4023 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
4024 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
4025 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
4026 output_asm_insn (AS1 (fldc%W3,%3), operands);
4028 if (NON_STACK_REG_P (operands[0]))
4029 output_to_reg (operands[0], stack_top_dies, operands[3]);
4031 else if (GET_CODE (operands[0]) == MEM)
4034 output_asm_insn (AS1 (fistp%z0,%0), operands);
4035 else if (GET_MODE (operands[0]) == DImode && ! stack_top_dies)
4037 /* There is no DImode version of this without a stack pop, so
4038 we must emulate it. It doesn't matter much what the second
4039 instruction is, because the value being pushed on the FP stack
4040 is not used except for the following stack popping store.
4041 This case can only happen without optimization, so it doesn't
4042 matter that it is inefficient. */
4043 output_asm_insn (AS1 (fistp%z0,%0), operands);
4044 output_asm_insn (AS1 (fild%z0,%0), operands);
4047 output_asm_insn (AS1 (fist%z0,%0), operands);
4052 return AS1 (fldc%W2,%2);
4055 /* Output code for INSN to compare OPERANDS. The two operands might
4056 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
4057 expression. If the compare is in mode CCFPEQmode, use an opcode that
4058 will not fault if a qNaN is present. */
4061 output_float_compare (insn, operands)
4066 rtx body = XVECEXP (PATTERN (insn), 0, 0);
4067 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
4070 if (0 && TARGET_CMOVE && STACK_REG_P (operands[1]))
4072 cc_status.flags |= CC_FCOMI;
4073 cc_prev_status.flags &= ~CC_TEST_AX;
4076 if (! STACK_TOP_P (operands[0]))
4079 operands[0] = operands[1];
4081 cc_status.flags |= CC_REVERSED;
4084 if (! STACK_TOP_P (operands[0]))
4087 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
4089 if (STACK_REG_P (operands[1])
4091 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
4092 && REGNO (operands[1]) != FIRST_STACK_REG)
4094 /* If both the top of the 387 stack dies, and the other operand
4095 is also a stack register that dies, then this must be a
4096 `fcompp' float compare */
4098 if (unordered_compare)
4100 if (cc_status.flags & CC_FCOMI)
4102 output_asm_insn (AS2 (fucomip,%y1,%0), operands);
4103 output_asm_insn (AS1 (fstp, %y0), operands);
4107 output_asm_insn ("fucompp", operands);
4111 if (cc_status.flags & CC_FCOMI)
4113 output_asm_insn (AS2 (fcomip, %y1,%0), operands);
4114 output_asm_insn (AS1 (fstp, %y0), operands);
4118 output_asm_insn ("fcompp", operands);
4123 static char buf[100];
4125 /* Decide if this is the integer or float compare opcode, or the
4126 unordered float compare. */
4128 if (unordered_compare)
4129 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
4130 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
4131 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
4133 strcpy (buf, "ficom");
4135 /* Modify the opcode if the 387 stack is to be popped. */
4140 if (NON_STACK_REG_P (operands[1]))
4141 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
4142 else if (cc_status.flags & CC_FCOMI)
4144 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%0)), operands);
4148 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
4151 /* Now retrieve the condition code. */
4153 return output_fp_cc0_set (insn);
4156 /* Output opcodes to transfer the results of FP compare or test INSN
4157 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
4158 result of the compare or test is unordered, no comparison operator
4159 succeeds except NE. Return an output template, if any. */
4162 output_fp_cc0_set (insn)
4169 xops[0] = gen_rtx_REG (HImode, 0);
4170 output_asm_insn (AS1 (fnsts%W0,%0), xops);
4172 if (! TARGET_IEEE_FP)
4174 if (!(cc_status.flags & CC_REVERSED))
4176 next = next_cc0_user (insn);
4178 if (GET_CODE (next) == JUMP_INSN
4179 && GET_CODE (PATTERN (next)) == SET
4180 && SET_DEST (PATTERN (next)) == pc_rtx
4181 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4182 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4183 else if (GET_CODE (PATTERN (next)) == SET)
4184 code = GET_CODE (SET_SRC (PATTERN (next)));
4188 if (code == GT || code == LT || code == EQ || code == NE
4189 || code == LE || code == GE)
4191 /* We will test eax directly. */
4192 cc_status.flags |= CC_TEST_AX;
4200 next = next_cc0_user (insn);
4201 if (next == NULL_RTX)
4204 if (GET_CODE (next) == JUMP_INSN
4205 && GET_CODE (PATTERN (next)) == SET
4206 && SET_DEST (PATTERN (next)) == pc_rtx
4207 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4208 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4209 else if (GET_CODE (PATTERN (next)) == SET)
4211 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
4212 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
4214 code = GET_CODE (SET_SRC (PATTERN (next)));
4217 else if (GET_CODE (PATTERN (next)) == PARALLEL
4218 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
4220 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
4221 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
4223 code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
4228 xops[0] = gen_rtx_REG (QImode, 0);
4233 xops[1] = GEN_INT (0x45);
4234 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4239 xops[1] = GEN_INT (0x45);
4240 xops[2] = GEN_INT (0x01);
4241 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4242 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4247 xops[1] = GEN_INT (0x05);
4248 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4253 xops[1] = GEN_INT (0x45);
4254 xops[2] = GEN_INT (0x40);
4255 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4256 output_asm_insn (AS1 (dec%B0,%h0), xops);
4257 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4262 xops[1] = GEN_INT (0x45);
4263 xops[2] = GEN_INT (0x40);
4264 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4265 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
4270 xops[1] = GEN_INT (0x44);
4271 xops[2] = GEN_INT (0x40);
4272 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
4273 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
4288 #define MAX_386_STACK_LOCALS 2
4290 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4292 /* Define the structure for the machine field in struct function. */
4293 struct machine_function
4295 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
4297 char pic_label_name[256];
4300 /* Functions to save and restore i386_stack_locals.
4301 These will be called, via pointer variables,
4302 from push_function_context and pop_function_context. */
4305 save_386_machine_status (p)
4309 = (struct machine_function *) xmalloc (sizeof (struct machine_function));
4310 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
4311 sizeof i386_stack_locals);
4312 p->machine->pic_label_rtx = pic_label_rtx;
4313 bcopy (pic_label_name, p->machine->pic_label_name, 256);
4317 restore_386_machine_status (p)
4320 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
4321 sizeof i386_stack_locals);
4322 pic_label_rtx = p->machine->pic_label_rtx;
4323 bcopy (p->machine->pic_label_name, pic_label_name, 256);
4328 /* Clear stack slot assignments remembered from previous functions.
4329 This is called from INIT_EXPANDERS once before RTL is emitted for each
4333 clear_386_stack_locals ()
4335 enum machine_mode mode;
4338 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
4339 mode = (enum machine_mode) ((int) mode + 1))
4340 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
4341 i386_stack_locals[(int) mode][n] = NULL_RTX;
4343 pic_label_rtx = NULL_RTX;
4344 bzero (pic_label_name, 256);
4345 /* Arrange to save and restore i386_stack_locals around nested functions. */
4346 save_machine_status = save_386_machine_status;
4347 restore_machine_status = restore_386_machine_status;
4350 /* Return a MEM corresponding to a stack slot with mode MODE.
4351 Allocate a new slot if necessary.
4353 The RTL for a function can have several slots available: N is
4354 which slot to use. */
4357 assign_386_stack_local (mode, n)
4358 enum machine_mode mode;
4361 if (n < 0 || n >= MAX_386_STACK_LOCALS)
4364 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
4365 i386_stack_locals[(int) mode][n]
4366 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
4368 return i386_stack_locals[(int) mode][n];
4373 enum machine_mode mode ATTRIBUTE_UNUSED;
4375 return (GET_CODE (op) == MULT);
4380 enum machine_mode mode ATTRIBUTE_UNUSED;
4382 return (GET_CODE (op) == DIV);
4386 /* Create a new copy of an rtx.
4387 Recursively copies the operands of the rtx,
4388 except for those few rtx codes that are sharable.
4389 Doesn't share CONST */
4397 register RTX_CODE code;
4398 register char *format_ptr;
4400 code = GET_CODE (orig);
4413 /* SCRATCH must be shared because they represent distinct values. */
4418 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4419 a LABEL_REF, it isn't sharable. */
4420 if (GET_CODE (XEXP (orig, 0)) == PLUS
4421 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4422 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4426 /* A MEM with a constant address is not sharable. The problem is that
4427 the constant address may need to be reloaded. If the mem is shared,
4428 then reloading one copy of this mem will cause all copies to appear
4429 to have been reloaded. */
4432 copy = rtx_alloc (code);
4433 PUT_MODE (copy, GET_MODE (orig));
4434 copy->in_struct = orig->in_struct;
4435 copy->volatil = orig->volatil;
4436 copy->unchanging = orig->unchanging;
4437 copy->integrated = orig->integrated;
4439 copy->is_spill_rtx = orig->is_spill_rtx;
4441 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4443 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4445 switch (*format_ptr++)
4448 XEXP (copy, i) = XEXP (orig, i);
4449 if (XEXP (orig, i) != NULL)
4450 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
4455 XEXP (copy, i) = XEXP (orig, i);
4460 XVEC (copy, i) = XVEC (orig, i);
4461 if (XVEC (orig, i) != NULL)
4463 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4464 for (j = 0; j < XVECLEN (copy, i); j++)
4465 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4470 XWINT (copy, i) = XWINT (orig, i);
4474 XINT (copy, i) = XINT (orig, i);
4479 XSTR (copy, i) = XSTR (orig, i);
4490 /* Try to rewrite a memory address to make it valid */
4493 rewrite_address (mem_rtx)
4496 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4498 int offset_adjust = 0;
4499 int was_only_offset = 0;
4500 rtx mem_addr = XEXP (mem_rtx, 0);
4501 char *storage = oballoc (0);
4503 int is_spill_rtx = 0;
4505 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4506 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4508 if (GET_CODE (mem_addr) == PLUS
4509 && GET_CODE (XEXP (mem_addr, 1)) == PLUS
4510 && GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4512 /* This part is utilized by the combiner. */
4514 = gen_rtx (PLUS, GET_MODE (mem_addr),
4515 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4516 XEXP (mem_addr, 0), XEXP (XEXP (mem_addr, 1), 0)),
4517 XEXP (XEXP (mem_addr, 1), 1));
4519 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4521 XEXP (mem_rtx, 0) = ret_rtx;
4522 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4529 /* This part is utilized by loop.c.
4530 If the address contains PLUS (reg,const) and this pattern is invalid
4531 in this case - try to rewrite the address to make it valid. */
4532 storage = oballoc (0);
4533 index_rtx = base_rtx = offset_rtx = NULL;
4535 /* Find the base index and offset elements of the memory address. */
4536 if (GET_CODE (mem_addr) == PLUS)
4538 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4540 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4541 base_rtx = XEXP (mem_addr, 1), index_rtx = XEXP (mem_addr, 0);
4543 base_rtx = XEXP (mem_addr, 0), offset_rtx = XEXP (mem_addr, 1);
4546 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4548 index_rtx = XEXP (mem_addr, 0);
4549 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4550 base_rtx = XEXP (mem_addr, 1);
4552 offset_rtx = XEXP (mem_addr, 1);
4555 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4557 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS
4558 && GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT
4559 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0))
4561 && (GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1))
4563 && (GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1))
4565 && GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG
4566 && GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4568 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4569 offset_rtx = XEXP (mem_addr, 1);
4570 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4571 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4575 offset_rtx = XEXP (mem_addr, 1);
4576 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4577 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4581 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4583 was_only_offset = 1;
4586 offset_rtx = XEXP (mem_addr, 1);
4587 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4588 if (offset_adjust == 0)
4590 XEXP (mem_rtx, 0) = offset_rtx;
4591 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4601 else if (GET_CODE (mem_addr) == MULT)
4602 index_rtx = mem_addr;
4609 if (index_rtx != 0 && GET_CODE (index_rtx) == MULT)
4611 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4617 scale_rtx = XEXP (index_rtx, 1);
4618 scale = INTVAL (scale_rtx);
4619 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4622 /* Now find which of the elements are invalid and try to fix them. */
4623 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4625 offset_adjust = INTVAL (index_rtx) * scale;
4627 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4628 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4629 else if (offset_rtx == 0)
4630 offset_rtx = const0_rtx;
4632 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4633 XEXP (mem_rtx, 0) = offset_rtx;
4637 if (base_rtx && GET_CODE (base_rtx) == PLUS
4638 && GET_CODE (XEXP (base_rtx, 0)) == REG
4639 && GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4641 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4642 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4645 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4647 offset_adjust += INTVAL (base_rtx);
4651 if (index_rtx && GET_CODE (index_rtx) == PLUS
4652 && GET_CODE (XEXP (index_rtx, 0)) == REG
4653 && GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4655 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4656 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4661 if (! LEGITIMATE_INDEX_P (index_rtx)
4662 && ! (index_rtx == stack_pointer_rtx && scale == 1
4663 && base_rtx == NULL))
4672 if (! LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4679 if (offset_adjust != 0)
4681 if (offset_rtx != 0 && CONSTANT_P (offset_rtx))
4682 offset_rtx = plus_constant (offset_rtx, offset_adjust);
4684 offset_rtx = const0_rtx;
4692 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4693 gen_rtx (MULT, GET_MODE (index_rtx),
4694 index_rtx, scale_rtx),
4697 if (GET_CODE (offset_rtx) != CONST_INT
4698 || INTVAL (offset_rtx) != 0)
4699 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4700 ret_rtx, offset_rtx);
4704 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4705 index_rtx, base_rtx);
4707 if (GET_CODE (offset_rtx) != CONST_INT
4708 || INTVAL (offset_rtx) != 0)
4709 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4710 ret_rtx, offset_rtx);
4717 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx),
4718 index_rtx, scale_rtx);
4720 if (GET_CODE (offset_rtx) != CONST_INT
4721 || INTVAL (offset_rtx) != 0)
4722 ret_rtx = gen_rtx (PLUS, GET_MODE (ret_rtx),
4723 ret_rtx, offset_rtx);
4727 if (GET_CODE (offset_rtx) == CONST_INT
4728 && INTVAL (offset_rtx) == 0)
4729 ret_rtx = index_rtx;
4731 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx),
4732 index_rtx, offset_rtx);
4740 if (GET_CODE (offset_rtx) == CONST_INT
4741 && INTVAL (offset_rtx) == 0)
4744 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx,
4747 else if (was_only_offset)
4748 ret_rtx = offset_rtx;
4756 XEXP (mem_rtx, 0) = ret_rtx;
4757 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4768 /* Return 1 if the first insn to set cc before INSN also sets the register
4769 REG_RTX; otherwise return 0. */
4771 last_to_set_cc (reg_rtx, insn)
4774 rtx prev_insn = PREV_INSN (insn);
4778 if (GET_CODE (prev_insn) == NOTE)
4781 else if (GET_CODE (prev_insn) == INSN)
4783 if (GET_CODE (PATTERN (prev_insn)) != SET)
4786 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4788 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4794 else if (! doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4801 prev_insn = PREV_INSN (prev_insn);
4808 doesnt_set_condition_code (pat)
4811 switch (GET_CODE (pat))
4824 sets_condition_code (pat)
4827 switch (GET_CODE (pat))
4849 str_immediate_operand (op, mode)
4851 enum machine_mode mode ATTRIBUTE_UNUSED;
4853 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4863 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4864 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4865 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4866 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4872 /* Return 1 if the mode of the SET_DEST of insn is floating point
4873 and it is not an fld or a move from memory to memory.
4874 Otherwise return 0 */
4880 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4881 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4882 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4883 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4884 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4885 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4886 && GET_CODE (SET_SRC (PATTERN (insn))) != MEM)
4892 /* Return 1 if the mode of the SET_DEST of INSN is floating point and is
4893 memory and the source is a register. */
4899 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4900 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4901 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4902 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4903 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4904 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4910 /* Return 1 if DEP_INSN sets a register which INSN uses as a base
4911 or index to reference memory.
4912 otherwise return 0 */
4915 agi_dependent (insn, dep_insn)
4918 if (GET_CODE (dep_insn) == INSN
4919 && GET_CODE (PATTERN (dep_insn)) == SET
4920 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4921 return reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn);
4923 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4924 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4925 && push_operand (SET_DEST (PATTERN (dep_insn)),
4926 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4927 return reg_mentioned_in_mem (stack_pointer_rtx, insn);
4932 /* Return 1 if reg is used in rtl as a base or index for a memory ref
4933 otherwise return 0. */
4936 reg_mentioned_in_mem (reg, rtl)
4941 register enum rtx_code code;
4946 code = GET_CODE (rtl);
4964 if (code == MEM && reg_mentioned_p (reg, rtl))
4967 fmt = GET_RTX_FORMAT (code);
4968 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4972 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4973 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4977 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4984 /* Output the appropriate insns for doing strlen if not just doing repnz; scasb
4986 operands[0] = result, initialized with the startaddress
4987 operands[1] = alignment of the address.
4988 operands[2] = scratch register, initialized with the startaddress when
4989 not aligned, otherwise undefined
4991 This is just the body. It needs the initialisations mentioned above and
4992 some address computing at the end. These things are done in i386.md. */
4995 output_strlen_unroll (operands)
5000 xops[0] = operands[0]; /* Result */
5001 /* operands[1]; * Alignment */
5002 xops[1] = operands[2]; /* Scratch */
5003 xops[2] = GEN_INT (0);
5004 xops[3] = GEN_INT (2);
5005 xops[4] = GEN_INT (3);
5006 xops[5] = GEN_INT (4);
5007 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
5008 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
5009 xops[8] = gen_label_rtx (); /* label of main loop */
5011 if (TARGET_USE_Q_REG && QI_REG_P (xops[1]))
5012 xops[9] = gen_label_rtx (); /* pentium optimisation */
5014 xops[10] = gen_label_rtx (); /* end label 2 */
5015 xops[11] = gen_label_rtx (); /* end label 1 */
5016 xops[12] = gen_label_rtx (); /* end label */
5017 /* xops[13] * Temporary used */
5018 xops[14] = GEN_INT (0xff);
5019 xops[15] = GEN_INT (0xff00);
5020 xops[16] = GEN_INT (0xff0000);
5021 xops[17] = GEN_INT (0xff000000);
5023 /* Loop to check 1..3 bytes for null to get an aligned pointer. */
5025 /* Is there a known alignment and is it less than 4? */
5026 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
5028 /* Is there a known alignment and is it not 2? */
5029 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5031 xops[6] = gen_label_rtx (); /* Label when aligned to 3-byte */
5032 xops[7] = gen_label_rtx (); /* Label when aligned to 2-byte */
5034 /* Leave just the 3 lower bits.
5035 If this is a q-register, then the high part is used later
5036 therefore use andl rather than andb. */
5037 output_asm_insn (AS2 (and%L1,%4,%1), xops);
5039 /* Is aligned to 4-byte address when zero */
5040 output_asm_insn (AS1 (je,%l8), xops);
5042 /* Side-effect even Parity when %eax == 3 */
5043 output_asm_insn (AS1 (jp,%6), xops);
5045 /* Is it aligned to 2 bytes ? */
5046 if (QI_REG_P (xops[1]))
5047 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5049 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
5051 output_asm_insn (AS1 (je,%7), xops);
5055 /* Since the alignment is 2, we have to check 2 or 0 bytes;
5056 check if is aligned to 4 - byte. */
5057 output_asm_insn (AS2 (and%L1,%3,%1), xops);
5059 /* Is aligned to 4-byte address when zero */
5060 output_asm_insn (AS1 (je,%l8), xops);
5063 xops[13] = gen_rtx_MEM (QImode, xops[0]);
5065 /* Now compare the bytes; compare with the high part of a q-reg
5066 gives shorter code. */
5067 if (QI_REG_P (xops[1]))
5069 /* Compare the first n unaligned byte on a byte per byte basis. */
5070 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5072 /* When zero we reached the end. */
5073 output_asm_insn (AS1 (je,%l12), xops);
5075 /* Increment the address. */
5076 output_asm_insn (AS1 (inc%L0,%0), xops);
5078 /* Not needed with an alignment of 2 */
5079 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
5081 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5082 CODE_LABEL_NUMBER (xops[7]));
5083 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5084 output_asm_insn (AS1 (je,%l12), xops);
5085 output_asm_insn (AS1 (inc%L0,%0), xops);
5087 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5088 CODE_LABEL_NUMBER (xops[6]));
5091 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
5095 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5096 output_asm_insn (AS1 (je,%l12), xops);
5097 output_asm_insn (AS1 (inc%L0,%0), xops);
5099 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5100 CODE_LABEL_NUMBER (xops[7]));
5101 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5102 output_asm_insn (AS1 (je,%l12), xops);
5103 output_asm_insn (AS1 (inc%L0,%0), xops);
5105 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5106 CODE_LABEL_NUMBER (xops[6]));
5107 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
5110 output_asm_insn (AS1 (je,%l12), xops);
5111 output_asm_insn (AS1 (inc%L0,%0), xops);
5114 /* Generate loop to check 4 bytes at a time. It is not a good idea to
5115 align this loop. It gives only huge programs, but does not help to
5117 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
5119 xops[13] = gen_rtx_MEM (SImode, xops[0]);
5120 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
5122 if (QI_REG_P (xops[1]))
5124 /* On i586 it is faster to combine the hi- and lo- part as
5125 a kind of lookahead. If anding both yields zero, then one
5126 of both *could* be zero, otherwise none of both is zero;
5127 this saves one instruction, on i486 this is slower
5128 tested with P-90, i486DX2-66, AMD486DX2-66 */
5131 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
5132 output_asm_insn (AS1 (jne,%l9), xops);
5135 /* Check first byte. */
5136 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
5137 output_asm_insn (AS1 (je,%l12), xops);
5139 /* Check second byte. */
5140 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
5141 output_asm_insn (AS1 (je,%l11), xops);
5144 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L",
5145 CODE_LABEL_NUMBER (xops[9]));
5150 /* Check first byte. */
5151 output_asm_insn (AS2 (test%L1,%14,%1), xops);
5152 output_asm_insn (AS1 (je,%l12), xops);
5154 /* Check second byte. */
5155 output_asm_insn (AS2 (test%L1,%15,%1), xops);
5156 output_asm_insn (AS1 (je,%l11), xops);
5159 /* Check third byte. */
5160 output_asm_insn (AS2 (test%L1,%16,%1), xops);
5161 output_asm_insn (AS1 (je,%l10), xops);
5163 /* Check fourth byte and increment address. */
5164 output_asm_insn (AS2 (add%L0,%5,%0), xops);
5165 output_asm_insn (AS2 (test%L1,%17,%1), xops);
5166 output_asm_insn (AS1 (jne,%l8), xops);
5168 /* Now generate fixups when the compare stops within a 4-byte word. */
5169 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
5171 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
5172 output_asm_insn (AS1 (inc%L0,%0), xops);
5174 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
5175 output_asm_insn (AS1 (inc%L0,%0), xops);
5177 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));
5183 output_fp_conditional_move (which_alternative, operands)
5184 int which_alternative;
5187 switch (which_alternative)
5190 /* r <- cond ? arg : r */
5191 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5195 /* r <- cond ? r : arg */
5196 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5200 /* r <- cond ? r : arg */
5201 output_asm_insn (AS2 (fcmov%F1,%2,%0), operands);
5202 output_asm_insn (AS2 (fcmov%f1,%3,%0), operands);
5213 output_int_conditional_move (which_alternative, operands)
5214 int which_alternative;
5217 int code = GET_CODE (operands[1]);
5218 enum machine_mode mode;
5221 /* This is very tricky. We have to do it right. For a code segement
5230 final_scan_insn () may delete the insn which sets CC. We have to
5231 tell final_scan_insn () if it should be reinserted. When CODE is
5232 GT or LE, we have to check the CC_NO_OVERFLOW bit and return
5233 NULL_PTR to tell final to reinsert the test insn because the
5234 conditional move cannot be handled properly without it. */
5235 if ((code == GT || code == LE)
5236 && (cc_prev_status.flags & CC_NO_OVERFLOW))
5239 mode = GET_MODE (operands [0]);
5242 xops [0] = gen_rtx_SUBREG (SImode, operands [0], 1);
5243 xops [1] = operands [1];
5244 xops [2] = gen_rtx_SUBREG (SImode, operands [2], 1);
5245 xops [3] = gen_rtx_SUBREG (SImode, operands [3], 1);
5248 switch (which_alternative)
5251 /* r <- cond ? arg : r */
5252 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5254 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5258 /* r <- cond ? r : arg */
5259 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5261 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);
5265 /* rm <- cond ? arg1 : arg2 */
5266 output_asm_insn (AS2 (cmov%C1,%2,%0), operands);
5267 output_asm_insn (AS2 (cmov%c1,%3,%0), operands);
5270 output_asm_insn (AS2 (cmov%C1,%2,%0), xops);
5271 output_asm_insn (AS2 (cmov%c1,%3,%0), xops);