1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 3, /* variable shift costs */
84 1, /* constant shift costs */
85 12, /* cost of starting a multiply */
86 1, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs *ix86_cost = &pentium_cost;
92 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
94 extern FILE *asm_out_file;
95 extern char *strcat ();
97 char *singlemove_string ();
98 char *output_move_const_single ();
99 char *output_fp_cc0_set ();
101 char *hi_reg_name[] = HI_REGISTER_NAMES;
102 char *qi_reg_name[] = QI_REGISTER_NAMES;
103 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
105 /* Array of the smallest class containing reg number REGNO, indexed by
106 REGNO. Used by REGNO_REG_CLASS in i386.h. */
108 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
111 AREG, DREG, CREG, BREG,
113 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
115 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
116 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
121 /* Test and compare insns in i386.md store the information needed to
122 generate branch and scc insns here. */
124 struct rtx_def *i386_compare_op0 = NULL_RTX;
125 struct rtx_def *i386_compare_op1 = NULL_RTX;
126 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
128 /* which cpu are we scheduling for */
129 enum processor_type ix86_cpu;
131 /* which instruction set architecture to use. */
134 /* Strings to hold which cpu and instruction set architecture to use. */
135 char *ix86_cpu_string; /* for -mcpu=<xxx> */
136 char *ix86_arch_string; /* for -march=<xxx> */
138 /* Register allocation order */
139 char *i386_reg_alloc_order;
140 static char regs_allocated[FIRST_PSEUDO_REGISTER];
142 /* # of registers to use to pass arguments. */
143 char *i386_regparm_string; /* # registers to use to pass args */
144 int i386_regparm; /* i386_regparm_string as a number */
146 /* Alignment to use for loops and jumps */
147 char *i386_align_loops_string; /* power of two alignment for loops */
148 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
149 char *i386_align_funcs_string; /* power of two alignment for functions */
150 char *i386_branch_cost_string; /* values 1-5: see jump.c */
152 int i386_align_loops; /* power of two alignment for loops */
153 int i386_align_jumps; /* power of two alignment for non-loop jumps */
154 int i386_align_funcs; /* power of two alignment for functions */
155 int i386_branch_cost; /* values 1-5: see jump.c */
157 /* Sometimes certain combinations of command options do not make
158 sense on a particular target machine. You can define a macro
159 `OVERRIDE_OPTIONS' to take account of this. This macro, if
160 defined, is executed once just after all the command options have
163 Don't use this macro to turn on various extra optimizations for
164 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
175 char *name; /* Canonical processor name. */
176 enum processor_type processor; /* Processor type enum value. */
177 struct processor_costs *cost; /* Processor costs */
178 int target_enable; /* Target flags to enable. */
179 int target_disable; /* Target flags to disable. */
180 } processor_target_table[]
181 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
182 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
183 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
184 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
185 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
186 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
188 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
190 #ifdef SUBTARGET_OVERRIDE_OPTIONS
191 SUBTARGET_OVERRIDE_OPTIONS;
194 /* Validate registers in register allocation order */
195 if (i386_reg_alloc_order)
197 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
201 case 'a': regno = 0; break;
202 case 'd': regno = 1; break;
203 case 'c': regno = 2; break;
204 case 'b': regno = 3; break;
205 case 'S': regno = 4; break;
206 case 'D': regno = 5; break;
207 case 'B': regno = 6; break;
209 default: fatal ("Register '%c' is unknown", ch);
212 if (regs_allocated[regno])
213 fatal ("Register '%c' was already specified in the allocation order", ch);
215 regs_allocated[regno] = 1;
219 /* Get the architectural level. */
220 if (ix86_arch_string == (char *)0)
221 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
223 for (i = 0; i < ptt_size; i++)
224 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
226 ix86_arch = processor_target_table[i].processor;
227 if (ix86_cpu_string == (char *)0)
228 ix86_cpu_string = processor_target_table[i].name;
234 error ("bad value (%s) for -march= switch", ix86_arch_string);
235 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
236 ix86_arch = PROCESSOR_DEFAULT;
239 if (ix86_cpu_string == (char *)0)
240 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
242 for (j = 0; j < ptt_size; j++)
243 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
245 ix86_cpu = processor_target_table[j].processor;
246 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
247 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
249 target_flags |= processor_target_table[j].target_enable;
250 target_flags &= ~processor_target_table[j].target_disable;
256 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
257 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
258 ix86_cpu = PROCESSOR_DEFAULT;
261 /* Validate -mregparm= value */
262 if (i386_regparm_string)
264 i386_regparm = atoi (i386_regparm_string);
265 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
266 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
269 def_align = (TARGET_386) ? 2 : 4;
271 /* Validate -malign-loops= value, or provide default */
272 if (i386_align_loops_string)
274 i386_align_loops = atoi (i386_align_loops_string);
275 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
276 fatal ("-malign-loops=%d is not between 0 and %d",
277 i386_align_loops, MAX_CODE_ALIGN);
280 i386_align_loops = 2;
282 /* Validate -malign-jumps= value, or provide default */
283 if (i386_align_jumps_string)
285 i386_align_jumps = atoi (i386_align_jumps_string);
286 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
287 fatal ("-malign-jumps=%d is not between 0 and %d",
288 i386_align_jumps, MAX_CODE_ALIGN);
291 i386_align_jumps = def_align;
293 /* Validate -malign-functions= value, or provide default */
294 if (i386_align_funcs_string)
296 i386_align_funcs = atoi (i386_align_funcs_string);
297 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
298 fatal ("-malign-functions=%d is not between 0 and %d",
299 i386_align_funcs, MAX_CODE_ALIGN);
302 i386_align_funcs = def_align;
304 /* Validate -mbranch-cost= value, or provide default */
305 if (i386_branch_cost_string)
307 i386_branch_cost = atoi (i386_branch_cost_string);
308 if (i386_branch_cost < 0 || i386_branch_cost > 5)
309 fatal ("-mbranch-cost=%d is not between 0 and 5",
313 i386_branch_cost = 1;
315 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
316 flag_omit_frame_pointer = 1;
318 /* pic references don't explicitly mention pic_offset_table_rtx */
319 /* code threaded into the prologue may conflict with profiling */
320 if (flag_pic || profile_flag || profile_block_flag)
321 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
324 /* A C statement (sans semicolon) to choose the order in which to
325 allocate hard registers for pseudo-registers local to a basic
328 Store the desired register order in the array `reg_alloc_order'.
329 Element 0 should be the register to allocate first; element 1, the
330 next register; and so on.
332 The macro body should not assume anything about the contents of
333 `reg_alloc_order' before execution of the macro.
335 On most machines, it is not necessary to define this macro. */
338 order_regs_for_local_alloc ()
340 int i, ch, order, regno;
342 /* User specified the register allocation order */
343 if (i386_reg_alloc_order)
345 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
349 case 'a': regno = 0; break;
350 case 'd': regno = 1; break;
351 case 'c': regno = 2; break;
352 case 'b': regno = 3; break;
353 case 'S': regno = 4; break;
354 case 'D': regno = 5; break;
355 case 'B': regno = 6; break;
358 reg_alloc_order[order++] = regno;
361 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
363 if (!regs_allocated[i])
364 reg_alloc_order[order++] = i;
368 /* If users did not specify a register allocation order, use natural order */
371 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
372 reg_alloc_order[i] = i;
378 optimization_options (level)
381 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
382 make the problem with not enough registers even worse */
383 #ifdef INSN_SCHEDULING
385 flag_schedule_insns = 0;
389 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
390 attribute for DECL. The attributes in ATTRIBUTES have previously been
394 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
403 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
404 attribute for TYPE. The attributes in ATTRIBUTES have previously been
408 i386_valid_type_attribute_p (type, attributes, identifier, args)
414 if (TREE_CODE (type) != FUNCTION_TYPE
415 && TREE_CODE (type) != FIELD_DECL
416 && TREE_CODE (type) != TYPE_DECL)
419 /* Stdcall attribute says callee is responsible for popping arguments
420 if they are not variable. */
421 if (is_attribute_p ("stdcall", identifier))
422 return (args == NULL_TREE);
424 /* Cdecl attribute says the callee is a normal C declaration */
425 if (is_attribute_p ("cdecl", identifier))
426 return (args == NULL_TREE);
428 /* Regparm attribute specifies how many integer arguments are to be
429 passed in registers */
430 if (is_attribute_p ("regparm", identifier))
434 if (!args || TREE_CODE (args) != TREE_LIST
435 || TREE_CHAIN (args) != NULL_TREE
436 || TREE_VALUE (args) == NULL_TREE)
439 cst = TREE_VALUE (args);
440 if (TREE_CODE (cst) != INTEGER_CST)
443 if (TREE_INT_CST_HIGH (cst) != 0
444 || TREE_INT_CST_LOW (cst) < 0
445 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
454 /* Return 0 if the attributes for two types are incompatible, 1 if they
455 are compatible, and 2 if they are nearly compatible (which causes a
456 warning to be generated). */
459 i386_comp_type_attributes (type1, type2)
467 /* Value is the number of bytes of arguments automatically
468 popped when returning from a subroutine call.
469 FUNDECL is the declaration node of the function (as a tree),
470 FUNTYPE is the data type of the function (as a tree),
471 or for a library call it is an identifier node for the subroutine name.
472 SIZE is the number of bytes of arguments passed on the stack.
474 On the 80386, the RTD insn may be used to pop them if the number
475 of args is fixed, but if the number is variable then the caller
476 must pop them all. RTD can't be used for library calls now
477 because the library is compiled with the Unix compiler.
478 Use of RTD is a selectable option, since it is incompatible with
479 standard Unix calling sequences. If the option is not selected,
480 the caller must always pop the args.
482 The attribute stdcall is equivalent to RTD on a per module basis. */
485 i386_return_pops_args (fundecl, funtype, size)
490 int rtd = TARGET_RTD;
492 /* Cdecl functions override -mrtd, and never pop the stack */
493 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
495 /* Stdcall functions will pop the stack if not variable args */
496 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
500 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
501 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
505 /* Lose any fake structure return argument */
506 if (aggregate_value_p (TREE_TYPE (funtype)))
507 return GET_MODE_SIZE (Pmode);
513 /* Argument support functions. */
515 /* Initialize a variable CUM of type CUMULATIVE_ARGS
516 for a call to a function whose data type is FNTYPE.
517 For a library call, FNTYPE is 0. */
520 init_cumulative_args (cum, fntype, libname)
521 CUMULATIVE_ARGS *cum; /* argument info to initialize */
522 tree fntype; /* tree ptr for function decl */
523 rtx libname; /* SYMBOL_REF of library name or 0 */
525 static CUMULATIVE_ARGS zero_cum;
526 tree param, next_param;
528 if (TARGET_DEBUG_ARG)
530 fprintf (stderr, "\ninit_cumulative_args (");
533 tree ret_type = TREE_TYPE (fntype);
534 fprintf (stderr, "fntype code = %s, ret code = %s",
535 tree_code_name[ (int)TREE_CODE (fntype) ],
536 tree_code_name[ (int)TREE_CODE (ret_type) ]);
539 fprintf (stderr, "no fntype");
542 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
547 /* Set up the number of registers to use for passing arguments. */
548 cum->nregs = i386_regparm;
551 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
553 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
556 /* Determine if this function has variable arguments. This is
557 indicated by the last argument being 'void_type_mode' if there
558 are no variable arguments. If there are variable arguments, then
559 we won't pass anything in registers */
563 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
567 next_param = TREE_CHAIN (param);
568 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
573 if (TARGET_DEBUG_ARG)
574 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
579 /* Update the data in CUM to advance over an argument
580 of mode MODE and data type TYPE.
581 (TYPE is null for libcalls where that information may not be available.) */
584 function_arg_advance (cum, mode, type, named)
585 CUMULATIVE_ARGS *cum; /* current arg information */
586 enum machine_mode mode; /* current arg mode */
587 tree type; /* type of the argument or 0 if lib support */
588 int named; /* whether or not the argument was named */
590 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
591 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
593 if (TARGET_DEBUG_ARG)
595 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
596 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
611 /* Define where to put the arguments to a function.
612 Value is zero to push the argument on the stack,
613 or a hard register in which to store the argument.
615 MODE is the argument's machine mode.
616 TYPE is the data type of the argument (as a tree).
617 This is null for libcalls where that information may
619 CUM is a variable of type CUMULATIVE_ARGS which gives info about
620 the preceding args and about the function being called.
621 NAMED is nonzero if this argument is a named parameter
622 (otherwise it is an extra parameter matching an ellipsis). */
625 function_arg (cum, mode, type, named)
626 CUMULATIVE_ARGS *cum; /* current arg information */
627 enum machine_mode mode; /* current arg mode */
628 tree type; /* type of the argument or 0 if lib support */
629 int named; /* != 0 for normal args, == 0 for ... args */
632 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
633 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
637 default: /* for now, pass fp/complex values on the stack */
645 if (words <= cum->nregs)
646 ret = gen_rtx (REG, mode, cum->regno);
650 if (TARGET_DEBUG_ARG)
653 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
654 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
657 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
659 fprintf (stderr, ", stack");
661 fprintf (stderr, " )\n");
667 /* For an arg passed partly in registers and partly in memory,
668 this is the number of registers used.
669 For args passed entirely in registers or entirely in memory, zero. */
672 function_arg_partial_nregs (cum, mode, type, named)
673 CUMULATIVE_ARGS *cum; /* current arg information */
674 enum machine_mode mode; /* current arg mode */
675 tree type; /* type of the argument or 0 if lib support */
676 int named; /* != 0 for normal args, == 0 for ... args */
682 /* Output an insn whose source is a 386 integer register. SRC is the
683 rtx for the register, and TEMPLATE is the op-code template. SRC may
684 be either SImode or DImode.
686 The template will be output with operands[0] as SRC, and operands[1]
687 as a pointer to the top of the 386 stack. So a call from floatsidf2
688 would look like this:
690 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
692 where %z0 corresponds to the caller's operands[1], and is used to
693 emit the proper size suffix.
695 ??? Extend this to handle HImode - a 387 can load and store HImode
699 output_op_from_reg (src, template)
704 int size = GET_MODE_SIZE (GET_MODE (src));
707 xops[1] = AT_SP (Pmode);
708 xops[2] = GEN_INT (size);
709 xops[3] = stack_pointer_rtx;
711 if (size > UNITS_PER_WORD)
714 if (size > 2 * UNITS_PER_WORD)
716 high = gen_rtx (REG, SImode, REGNO (src) + 2);
717 output_asm_insn (AS1 (push%L0,%0), &high);
719 high = gen_rtx (REG, SImode, REGNO (src) + 1);
720 output_asm_insn (AS1 (push%L0,%0), &high);
722 output_asm_insn (AS1 (push%L0,%0), &src);
724 output_asm_insn (template, xops);
726 output_asm_insn (AS2 (add%L3,%2,%3), xops);
729 /* Output an insn to pop an value from the 387 top-of-stack to 386
730 register DEST. The 387 register stack is popped if DIES is true. If
731 the mode of DEST is an integer mode, a `fist' integer store is done,
732 otherwise a `fst' float store is done. */
735 output_to_reg (dest, dies)
740 int size = GET_MODE_SIZE (GET_MODE (dest));
742 xops[0] = AT_SP (Pmode);
743 xops[1] = stack_pointer_rtx;
744 xops[2] = GEN_INT (size);
747 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
749 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
752 output_asm_insn (AS1 (fistp%z3,%y0), xops);
754 output_asm_insn (AS1 (fist%z3,%y0), xops);
756 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
759 output_asm_insn (AS1 (fstp%z3,%y0), xops);
762 if (GET_MODE (dest) == XFmode)
764 output_asm_insn (AS1 (fstp%z3,%y0), xops);
765 output_asm_insn (AS1 (fld%z3,%y0), xops);
768 output_asm_insn (AS1 (fst%z3,%y0), xops);
774 output_asm_insn (AS1 (pop%L0,%0), &dest);
776 if (size > UNITS_PER_WORD)
778 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
779 output_asm_insn (AS1 (pop%L0,%0), &dest);
780 if (size > 2 * UNITS_PER_WORD)
782 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
783 output_asm_insn (AS1 (pop%L0,%0), &dest);
789 singlemove_string (operands)
793 if (GET_CODE (operands[0]) == MEM
794 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
796 if (XEXP (x, 0) != stack_pointer_rtx)
800 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
802 return output_move_const_single (operands);
804 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
805 return AS2 (mov%L0,%1,%0);
806 else if (CONSTANT_P (operands[1]))
807 return AS2 (mov%L0,%1,%0);
810 output_asm_insn ("push%L1 %1", operands);
815 /* Return a REG that occurs in ADDR with coefficient 1.
816 ADDR can be effectively incremented by incrementing REG. */
822 while (GET_CODE (addr) == PLUS)
824 if (GET_CODE (XEXP (addr, 0)) == REG)
825 addr = XEXP (addr, 0);
826 else if (GET_CODE (XEXP (addr, 1)) == REG)
827 addr = XEXP (addr, 1);
828 else if (CONSTANT_P (XEXP (addr, 0)))
829 addr = XEXP (addr, 1);
830 else if (CONSTANT_P (XEXP (addr, 1)))
831 addr = XEXP (addr, 0);
835 if (GET_CODE (addr) == REG)
841 /* Output an insn to add the constant N to the register X. */
852 output_asm_insn (AS1 (dec%L0,%0), xops);
854 output_asm_insn (AS1 (inc%L0,%0), xops);
857 xops[1] = GEN_INT (-n);
858 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
862 xops[1] = GEN_INT (n);
863 output_asm_insn (AS2 (add%L0,%1,%0), xops);
868 /* Output assembler code to perform a doubleword move insn
869 with operands OPERANDS. */
872 output_move_double (operands)
875 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
879 rtx addreg0 = 0, addreg1 = 0;
880 int dest_overlapped_low = 0;
881 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
886 /* First classify both operands. */
888 if (REG_P (operands[0]))
890 else if (offsettable_memref_p (operands[0]))
892 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
894 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
896 else if (GET_CODE (operands[0]) == MEM)
901 if (REG_P (operands[1]))
903 else if (CONSTANT_P (operands[1]))
905 else if (offsettable_memref_p (operands[1]))
907 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
909 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
911 else if (GET_CODE (operands[1]) == MEM)
916 /* Check for the cases that the operand constraints are not
917 supposed to allow to happen. Abort if we get one,
918 because generating code for these cases is painful. */
920 if (optype0 == RNDOP || optype1 == RNDOP)
923 /* If one operand is decrementing and one is incrementing
924 decrement the former register explicitly
925 and change that operand into ordinary indexing. */
927 if (optype0 == PUSHOP && optype1 == POPOP)
929 /* ??? Can this ever happen on i386? */
930 operands[0] = XEXP (XEXP (operands[0], 0), 0);
931 asm_add (-size, operands[0]);
932 if (GET_MODE (operands[1]) == XFmode)
933 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
934 else if (GET_MODE (operands[0]) == DFmode)
935 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
937 operands[0] = gen_rtx (MEM, DImode, operands[0]);
941 if (optype0 == POPOP && optype1 == PUSHOP)
943 /* ??? Can this ever happen on i386? */
944 operands[1] = XEXP (XEXP (operands[1], 0), 0);
945 asm_add (-size, operands[1]);
946 if (GET_MODE (operands[1]) == XFmode)
947 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
948 else if (GET_MODE (operands[1]) == DFmode)
949 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
951 operands[1] = gen_rtx (MEM, DImode, operands[1]);
955 /* If an operand is an unoffsettable memory ref, find a register
956 we can increment temporarily to make it refer to the second word. */
958 if (optype0 == MEMOP)
959 addreg0 = find_addr_reg (XEXP (operands[0], 0));
961 if (optype1 == MEMOP)
962 addreg1 = find_addr_reg (XEXP (operands[1], 0));
964 /* Ok, we can do one word at a time.
965 Normally we do the low-numbered word first,
966 but if either operand is autodecrementing then we
967 do the high-numbered word first.
969 In either case, set up in LATEHALF the operands to use
970 for the high-numbered word and in some cases alter the
971 operands in OPERANDS to be suitable for the low-numbered word. */
975 if (optype0 == REGOP)
977 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
978 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
980 else if (optype0 == OFFSOP)
982 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
983 latehalf[0] = adj_offsettable_operand (operands[0], 8);
987 middlehalf[0] = operands[0];
988 latehalf[0] = operands[0];
991 if (optype1 == REGOP)
993 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
994 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
996 else if (optype1 == OFFSOP)
998 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
999 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1001 else if (optype1 == CNSTOP)
1003 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1005 REAL_VALUE_TYPE r; long l[3];
1007 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1008 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1009 operands[1] = GEN_INT (l[0]);
1010 middlehalf[1] = GEN_INT (l[1]);
1011 latehalf[1] = GEN_INT (l[2]);
1013 else if (CONSTANT_P (operands[1]))
1014 /* No non-CONST_DOUBLE constant should ever appear here. */
1019 middlehalf[1] = operands[1];
1020 latehalf[1] = operands[1];
1023 else /* size is not 12: */
1025 if (optype0 == REGOP)
1026 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1027 else if (optype0 == OFFSOP)
1028 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1030 latehalf[0] = operands[0];
1032 if (optype1 == REGOP)
1033 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1034 else if (optype1 == OFFSOP)
1035 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1036 else if (optype1 == CNSTOP)
1037 split_double (operands[1], &operands[1], &latehalf[1]);
1039 latehalf[1] = operands[1];
1042 /* If insn is effectively movd N (sp),-(sp) then we will do the
1043 high word first. We should use the adjusted operand 1
1044 (which is N+4 (sp) or N+8 (sp))
1045 for the low word and middle word as well,
1046 to compensate for the first decrement of sp. */
1047 if (optype0 == PUSHOP
1048 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1049 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1050 middlehalf[1] = operands[1] = latehalf[1];
1052 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1053 if the upper part of reg N does not appear in the MEM, arrange to
1054 emit the move late-half first. Otherwise, compute the MEM address
1055 into the upper part of N and use that as a pointer to the memory
1057 if (optype0 == REGOP
1058 && (optype1 == OFFSOP || optype1 == MEMOP))
1060 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1061 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1063 /* If both halves of dest are used in the src memory address,
1064 compute the address into latehalf of dest. */
1066 xops[0] = latehalf[0];
1067 xops[1] = XEXP (operands[1], 0);
1068 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1069 if( GET_MODE (operands[1]) == XFmode )
1072 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1073 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1074 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1078 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1079 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1083 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1085 /* Check for two regs used by both source and dest. */
1086 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1087 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1090 /* JRV says this can't happen: */
1091 if (addreg0 || addreg1)
1094 /* Only the middle reg conflicts; simply put it last. */
1095 output_asm_insn (singlemove_string (operands), operands);
1096 output_asm_insn (singlemove_string (latehalf), latehalf);
1097 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1100 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1101 /* If the low half of dest is mentioned in the source memory
1102 address, the arrange to emit the move late half first. */
1103 dest_overlapped_low = 1;
1106 /* If one or both operands autodecrementing,
1107 do the two words, high-numbered first. */
1109 /* Likewise, the first move would clobber the source of the second one,
1110 do them in the other order. This happens only for registers;
1111 such overlap can't happen in memory unless the user explicitly
1112 sets it up, and that is an undefined circumstance. */
1115 if (optype0 == PUSHOP || optype1 == PUSHOP
1116 || (optype0 == REGOP && optype1 == REGOP
1117 && REGNO (operands[0]) == REGNO (latehalf[1]))
1118 || dest_overlapped_low)
1120 if (optype0 == PUSHOP || optype1 == PUSHOP
1121 || (optype0 == REGOP && optype1 == REGOP
1122 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1123 || REGNO (operands[0]) == REGNO (latehalf[1])))
1124 || dest_overlapped_low)
1126 /* Make any unoffsettable addresses point at high-numbered word. */
1128 asm_add (size-4, addreg0);
1130 asm_add (size-4, addreg1);
1133 output_asm_insn (singlemove_string (latehalf), latehalf);
1135 /* Undo the adds we just did. */
1137 asm_add (-4, addreg0);
1139 asm_add (-4, addreg1);
1143 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1145 asm_add (-4, addreg0);
1147 asm_add (-4, addreg1);
1150 /* Do low-numbered word. */
1151 return singlemove_string (operands);
1154 /* Normal case: do the two words, low-numbered first. */
1156 output_asm_insn (singlemove_string (operands), operands);
1158 /* Do the middle one of the three words for long double */
1162 asm_add (4, addreg0);
1164 asm_add (4, addreg1);
1166 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1169 /* Make any unoffsettable addresses point at high-numbered word. */
1171 asm_add (4, addreg0);
1173 asm_add (4, addreg1);
1176 output_asm_insn (singlemove_string (latehalf), latehalf);
1178 /* Undo the adds we just did. */
1180 asm_add (4-size, addreg0);
1182 asm_add (4-size, addreg1);
1188 #define MAX_TMPS 2 /* max temporary registers used */
1190 /* Output the appropriate code to move push memory on the stack */
1193 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1205 } tmp_info[MAX_TMPS];
1207 rtx src = operands[1];
1210 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1211 int stack_offset = 0;
1215 if (!offsettable_memref_p (src))
1216 fatal_insn ("Source is not offsettable", insn);
1218 if ((length & 3) != 0)
1219 fatal_insn ("Pushing non-word aligned size", insn);
1221 /* Figure out which temporary registers we have available */
1222 for (i = tmp_start; i < n_operands; i++)
1224 if (GET_CODE (operands[i]) == REG)
1226 if (reg_overlap_mentioned_p (operands[i], src))
1229 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1230 if (max_tmps == MAX_TMPS)
1236 for (offset = length - 4; offset >= 0; offset -= 4)
1238 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1239 output_asm_insn (AS1(push%L0,%0), xops);
1245 for (offset = length - 4; offset >= 0; )
1247 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1249 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1250 tmp_info[num_tmps].push = AS1(push%L0,%1);
1251 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1255 for (i = 0; i < num_tmps; i++)
1256 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1258 for (i = 0; i < num_tmps; i++)
1259 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1262 stack_offset += 4*num_tmps;
1270 /* Output the appropriate code to move data between two memory locations */
1273 output_move_memory (operands, insn, length, tmp_start, n_operands)
1284 } tmp_info[MAX_TMPS];
1286 rtx dest = operands[0];
1287 rtx src = operands[1];
1288 rtx qi_tmp = NULL_RTX;
1294 if (GET_CODE (dest) == MEM
1295 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1296 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1297 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1299 if (!offsettable_memref_p (src))
1300 fatal_insn ("Source is not offsettable", insn);
1302 if (!offsettable_memref_p (dest))
1303 fatal_insn ("Destination is not offsettable", insn);
1305 /* Figure out which temporary registers we have available */
1306 for (i = tmp_start; i < n_operands; i++)
1308 if (GET_CODE (operands[i]) == REG)
1310 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1311 qi_tmp = operands[i];
1313 if (reg_overlap_mentioned_p (operands[i], dest))
1314 fatal_insn ("Temporary register overlaps the destination", insn);
1316 if (reg_overlap_mentioned_p (operands[i], src))
1317 fatal_insn ("Temporary register overlaps the source", insn);
1319 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1320 if (max_tmps == MAX_TMPS)
1326 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1328 if ((length & 1) != 0)
1331 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1336 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1340 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1341 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1342 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1343 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1347 else if (length >= 2)
1349 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1350 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1351 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1352 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1360 for (i = 0; i < num_tmps; i++)
1361 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1363 for (i = 0; i < num_tmps; i++)
1364 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1369 xops[0] = adj_offsettable_operand (dest, offset);
1370 xops[1] = adj_offsettable_operand (src, offset);
1372 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1373 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1381 standard_80387_constant_p (x)
1384 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1389 if (setjmp (handler))
1392 set_float_handler (handler);
1393 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1394 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1395 is1 = REAL_VALUES_EQUAL (d, dconst1);
1396 set_float_handler (NULL_PTR);
1404 /* Note that on the 80387, other constants, such as pi,
1405 are much slower to load as standard constants
1406 than to load from doubles in memory! */
1413 output_move_const_single (operands)
1416 if (FP_REG_P (operands[0]))
1418 int conval = standard_80387_constant_p (operands[1]);
1426 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1428 REAL_VALUE_TYPE r; long l;
1430 if (GET_MODE (operands[1]) == XFmode)
1433 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1434 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1435 operands[1] = GEN_INT (l);
1437 return singlemove_string (operands);
1440 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1441 reference and a constant. */
1444 symbolic_operand (op, mode)
1446 enum machine_mode mode;
1448 switch (GET_CODE (op))
1455 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1456 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1457 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1463 /* Test for a valid operand for a call instruction.
1464 Don't allow the arg pointer register or virtual regs
1465 since they may change into reg + const, which the patterns
1466 can't handle yet. */
1469 call_insn_operand (op, mode)
1471 enum machine_mode mode;
1473 if (GET_CODE (op) == MEM
1474 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1475 /* This makes a difference for PIC. */
1476 && general_operand (XEXP (op, 0), Pmode))
1477 || (GET_CODE (XEXP (op, 0)) == REG
1478 && XEXP (op, 0) != arg_pointer_rtx
1479 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1480 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1485 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1489 expander_call_insn_operand (op, mode)
1491 enum machine_mode mode;
1493 if (GET_CODE (op) == MEM
1494 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1495 || (GET_CODE (XEXP (op, 0)) == REG
1496 && XEXP (op, 0) != arg_pointer_rtx
1497 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1498 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1503 /* Return 1 if OP is a comparison operator that can use the condition code
1504 generated by an arithmetic operation. */
1507 arithmetic_comparison_operator (op, mode)
1509 enum machine_mode mode;
1513 if (mode != VOIDmode && mode != GET_MODE (op))
1515 code = GET_CODE (op);
1516 if (GET_RTX_CLASS (code) != '<')
1519 return (code != GT && code != LE);
1522 /* Returns 1 if OP contains a symbol reference */
1525 symbolic_reference_mentioned_p (op)
1531 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1534 fmt = GET_RTX_FORMAT (GET_CODE (op));
1535 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1541 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1542 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1545 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1552 /* Attempt to expand a binary operator. Make the expansion closer to the
1553 actual machine, then just general_operand, which will allow 3 separate
1554 memory references (one output, two input) in a single insn. Return
1555 whether the insn fails, or succeeds. */
1558 ix86_expand_binary_operator (code, mode, operands)
1560 enum machine_mode mode;
1567 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1568 if (GET_RTX_CLASS (code) == 'c'
1569 && (rtx_equal_p (operands[0], operands[2])
1570 || immediate_operand (operands[1], mode)))
1572 rtx temp = operands[1];
1573 operands[1] = operands[2];
1577 /* If optimizing, copy to regs to improve CSE */
1578 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1580 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1581 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1583 if (GET_CODE (operands[2]) == MEM)
1584 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1586 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1588 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1589 emit_move_insn (temp, operands[1]);
1595 if (!ix86_binary_operator_ok (code, mode, operands))
1597 /* If not optimizing, try to make a valid insn (optimize code previously did
1598 this above to improve chances of CSE) */
1600 if ((!TARGET_PSEUDO || !optimize)
1601 && ((reload_in_progress | reload_completed) == 0)
1602 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1605 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1607 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1611 if (GET_CODE (operands[2]) == MEM)
1613 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1617 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1619 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1620 emit_move_insn (temp, operands[1]);
1625 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1635 /* Return TRUE or FALSE depending on whether the binary operator meets the
1636 appropriate constraints. */
1639 ix86_binary_operator_ok (code, mode, operands)
1641 enum machine_mode mode;
1644 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1645 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1648 /* Attempt to expand a unary operator. Make the expansion closer to the
1649 actual machine, then just general_operand, which will allow 2 separate
1650 memory references (one output, one input) in a single insn. Return
1651 whether the insn fails, or succeeds. */
1654 ix86_expand_unary_operator (code, mode, operands)
1656 enum machine_mode mode;
1661 /* If optimizing, copy to regs to improve CSE */
1664 && ((reload_in_progress | reload_completed) == 0)
1665 && GET_CODE (operands[1]) == MEM)
1667 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1670 if (!ix86_unary_operator_ok (code, mode, operands))
1672 if ((!TARGET_PSEUDO || !optimize)
1673 && ((reload_in_progress | reload_completed) == 0)
1674 && GET_CODE (operands[1]) == MEM)
1676 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1677 if (!ix86_unary_operator_ok (code, mode, operands))
1687 /* Return TRUE or FALSE depending on whether the unary operator meets the
1688 appropriate constraints. */
1691 ix86_unary_operator_ok (code, mode, operands)
1693 enum machine_mode mode;
1701 static rtx pic_label_rtx;
1702 static char pic_label_name [256];
1703 static int pic_label_no = 0;
1705 /* This function generates code for -fpic that loads %ebx with
1706 with the return address of the caller and then returns. */
1708 asm_output_function_prefix (file, name)
1713 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1714 || current_function_uses_const_pool);
1715 xops[0] = pic_offset_table_rtx;
1716 xops[1] = stack_pointer_rtx;
1718 /* deep branch prediction favors having a return for every call */
1719 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1723 if (pic_label_rtx == 0)
1725 pic_label_rtx = (rtx) gen_label_rtx ();
1726 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1727 LABEL_NAME (pic_label_rtx) = pic_label_name;
1729 prologue_node = make_node (FUNCTION_DECL);
1730 DECL_RESULT (prologue_node) = 0;
1731 #ifdef ASM_DECLARE_FUNCTION_NAME
1732 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1734 output_asm_insn ("movl (%1),%0", xops);
1735 output_asm_insn ("ret", xops);
1739 /* Set up the stack and frame (if desired) for the function. */
1742 function_prologue (file, size)
1749 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1750 || current_function_uses_const_pool);
1751 long tsize = get_frame_size ();
1753 /* pic references don't explicitly mention pic_offset_table_rtx */
1754 if (TARGET_SCHEDULE_PROLOGUE)
1760 xops[0] = stack_pointer_rtx;
1761 xops[1] = frame_pointer_rtx;
1762 xops[2] = GEN_INT (tsize);
1764 if (frame_pointer_needed)
1766 output_asm_insn ("push%L1 %1", xops);
1767 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1772 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1773 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1776 xops[3] = gen_rtx (REG, SImode, 0);
1777 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1779 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1780 output_asm_insn (AS1 (call,%P3), xops);
1783 /* Note If use enter it is NOT reversed args.
1784 This one is not reversed from intel!!
1785 I think enter is slower. Also sdb doesn't like it.
1786 But if you want it the code is:
1788 xops[3] = const0_rtx;
1789 output_asm_insn ("enter %2,%3", xops);
1792 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1793 for (regno = limit - 1; regno >= 0; regno--)
1794 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1795 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1797 xops[0] = gen_rtx (REG, SImode, regno);
1798 output_asm_insn ("push%L0 %0", xops);
1801 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1803 xops[0] = pic_offset_table_rtx;
1804 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1806 output_asm_insn (AS1 (call,%P1), xops);
1807 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1810 else if (pic_reg_used)
1812 xops[0] = pic_offset_table_rtx;
1813 xops[1] = (rtx) gen_label_rtx ();
1815 output_asm_insn (AS1 (call,%P1), xops);
1816 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1817 output_asm_insn (AS1 (pop%L0,%0), xops);
1818 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1822 /* This function generates the assembly code for function entry.
1823 FILE is an stdio stream to output the code to.
1824 SIZE is an int: how many units of temporary storage to allocate. */
1827 ix86_expand_prologue ()
1832 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1833 || current_function_uses_const_pool);
1834 long tsize = get_frame_size ();
1836 if (!TARGET_SCHEDULE_PROLOGUE)
1839 xops[0] = stack_pointer_rtx;
1840 xops[1] = frame_pointer_rtx;
1841 xops[2] = GEN_INT (tsize);
1842 if (frame_pointer_needed)
1844 emit_insn (gen_rtx (SET, 0,
1845 gen_rtx (MEM, SImode,
1846 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1847 frame_pointer_rtx));
1848 emit_move_insn (xops[1], xops[0]);
1853 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1854 emit_insn (gen_subsi3 (xops[0], xops[0], xops[2]));
1857 xops[3] = gen_rtx (REG, SImode, 0);
1858 emit_move_insn (xops[3], xops[2]);
1859 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
1860 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
1861 emit_call_insn (gen_rtx (CALL, VOIDmode,
1862 xops[3], const0_rtx));
1865 /* Note If use enter it is NOT reversed args.
1866 This one is not reversed from intel!!
1867 I think enter is slower. Also sdb doesn't like it.
1868 But if you want it the code is:
1870 xops[3] = const0_rtx;
1871 output_asm_insn ("enter %2,%3", xops);
1874 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1875 for (regno = limit - 1; regno >= 0; regno--)
1876 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1877 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1879 xops[0] = gen_rtx (REG, SImode, regno);
1880 emit_insn (gen_rtx (SET, 0,
1881 gen_rtx (MEM, SImode,
1882 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1886 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1888 xops[0] = pic_offset_table_rtx;
1889 if (pic_label_rtx == 0)
1891 pic_label_rtx = (rtx) gen_label_rtx ();
1892 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1893 LABEL_NAME (pic_label_rtx) = pic_label_name;
1895 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
1897 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1898 emit_insn (gen_prologue_set_got (xops[0],
1899 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1900 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1902 else if (pic_reg_used)
1904 xops[0] = pic_offset_table_rtx;
1905 xops[1] = (rtx) gen_label_rtx ();
1907 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1908 emit_insn (gen_pop (xops[0]));
1909 emit_insn (gen_prologue_set_got (xops[0],
1910 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1911 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1915 /* Restore function stack, frame, and registers. */
1918 function_epilogue (file, size)
1924 /* Return 1 if it is appropriate to emit `ret' instructions in the
1925 body of a function. Do this only if the epilogue is simple, needing a
1926 couple of insns. Prior to reloading, we can't tell how many registers
1927 must be saved, so return 0 then. Return 0 if there is no frame
1928 marker to de-allocate.
1930 If NON_SAVING_SETJMP is defined and true, then it is not possible
1931 for the epilogue to be simple, so return 0. This is a special case
1932 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1933 until final, but jump_optimize may need to know sooner if a
1937 ix86_can_use_return_insn_p ()
1941 int reglimit = (frame_pointer_needed
1942 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1943 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1944 || current_function_uses_const_pool);
1946 #ifdef NON_SAVING_SETJMP
1947 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1951 if (! reload_completed)
1954 for (regno = reglimit - 1; regno >= 0; regno--)
1955 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1956 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1959 return nregs == 0 || ! frame_pointer_needed;
1963 /* This function generates the assembly code for function exit.
1964 FILE is an stdio stream to output the code to.
1965 SIZE is an int: how many units of temporary storage to deallocate. */
1968 ix86_expand_epilogue ()
1971 register int nregs, limit;
1974 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1975 || current_function_uses_const_pool);
1976 long tsize = get_frame_size ();
1978 /* Compute the number of registers to pop */
1980 limit = (frame_pointer_needed
1981 ? FRAME_POINTER_REGNUM
1982 : STACK_POINTER_REGNUM);
1986 for (regno = limit - 1; regno >= 0; regno--)
1987 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1988 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1991 /* sp is often unreliable so we must go off the frame pointer,
1994 /* In reality, we may not care if sp is unreliable, because we can
1995 restore the register relative to the frame pointer. In theory,
1996 since each move is the same speed as a pop, and we don't need the
1997 leal, this is faster. For now restore multiple registers the old
2000 offset = -tsize - (nregs * UNITS_PER_WORD);
2002 xops[2] = stack_pointer_rtx;
2004 if (nregs > 1 || ! frame_pointer_needed)
2006 if (frame_pointer_needed)
2008 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2009 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2010 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2013 for (regno = 0; regno < limit; regno++)
2014 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2015 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2017 xops[0] = gen_rtx (REG, SImode, regno);
2018 emit_insn (gen_pop (xops[0]));
2019 /* output_asm_insn ("pop%L0 %0", xops);*/
2023 for (regno = 0; regno < limit; regno++)
2024 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2025 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2027 xops[0] = gen_rtx (REG, SImode, regno);
2028 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2029 emit_move_insn (xops[0], xops[1]);
2030 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2034 if (frame_pointer_needed)
2036 /* If not an i386, mov & pop is faster than "leave". */
2038 if (TARGET_USE_LEAVE)
2039 emit_insn (gen_leave());
2040 /* output_asm_insn ("leave", xops);*/
2043 xops[0] = frame_pointer_rtx;
2044 xops[1] = stack_pointer_rtx;
2045 emit_insn (gen_epilogue_set_stack_ptr());
2046 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2047 emit_insn (gen_pop (xops[0]));
2048 /* output_asm_insn ("pop%L0 %0", xops);*/
2053 /* If there is no frame pointer, we must still release the frame. */
2055 xops[0] = GEN_INT (tsize);
2056 emit_insn (gen_rtx (SET, SImode,
2058 gen_rtx (PLUS, SImode,
2061 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2064 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2065 if (profile_block_flag == 2)
2067 FUNCTION_BLOCK_PROFILER_EXIT(file);
2071 if (current_function_pops_args && current_function_args_size)
2073 xops[1] = GEN_INT (current_function_pops_args);
2075 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2076 asked to pop more, pop return address, do explicit add, and jump
2077 indirectly to the caller. */
2079 if (current_function_pops_args >= 32768)
2081 /* ??? Which register to use here? */
2082 xops[0] = gen_rtx (REG, SImode, 2);
2083 emit_insn (gen_pop (xops[0]));
2084 /* output_asm_insn ("pop%L0 %0", xops);*/
2085 emit_insn (gen_rtx (SET, SImode,
2087 gen_rtx (PLUS, SImode,
2090 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2091 emit_jump_insn (xops[0]);
2092 /* output_asm_insn ("jmp %*%0", xops);*/
2095 emit_jump_insn (gen_return_pop_internal (xops[1]));
2096 /* output_asm_insn ("ret %1", xops);*/
2099 /* output_asm_insn ("ret", xops);*/
2100 emit_jump_insn (gen_return_internal ());
2104 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2105 that is a valid memory address for an instruction.
2106 The MODE argument is the machine mode for the MEM expression
2107 that wants to use this address.
2109 On x86, legitimate addresses are:
2110 base movl (base),reg
2111 displacement movl disp,reg
2112 base + displacement movl disp(base),reg
2113 index + base movl (base,index),reg
2114 (index + base) + displacement movl disp(base,index),reg
2115 index*scale movl (,index,scale),reg
2116 index*scale + disp movl disp(,index,scale),reg
2117 index*scale + base movl (base,index,scale),reg
2118 (index*scale + base) + disp movl disp(base,index,scale),reg
2120 In each case, scale can be 1, 2, 4, 8. */
2122 /* This is exactly the same as print_operand_addr, except that
2123 it recognizes addresses instead of printing them.
2125 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2126 convert common non-canonical forms to canonical form so that they will
2129 #define ADDR_INVALID(msg,insn) \
2131 if (TARGET_DEBUG_ADDR) \
2133 fprintf (stderr, msg); \
2139 legitimate_address_p (mode, addr, strict)
2140 enum machine_mode mode;
2144 rtx base = NULL_RTX;
2145 rtx indx = NULL_RTX;
2146 rtx scale = NULL_RTX;
2147 rtx disp = NULL_RTX;
2149 if (TARGET_DEBUG_ADDR)
2152 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2153 GET_MODE_NAME (mode), strict);
2158 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2159 base = addr; /* base reg */
2161 else if (GET_CODE (addr) == PLUS)
2163 rtx op0 = XEXP (addr, 0);
2164 rtx op1 = XEXP (addr, 1);
2165 enum rtx_code code0 = GET_CODE (op0);
2166 enum rtx_code code1 = GET_CODE (op1);
2168 if (code0 == REG || code0 == SUBREG)
2170 if (code1 == REG || code1 == SUBREG)
2172 indx = op0; /* index + base */
2178 base = op0; /* base + displacement */
2183 else if (code0 == MULT)
2185 indx = XEXP (op0, 0);
2186 scale = XEXP (op0, 1);
2188 if (code1 == REG || code1 == SUBREG)
2189 base = op1; /* index*scale + base */
2192 disp = op1; /* index*scale + disp */
2195 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2197 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2198 scale = XEXP (XEXP (op0, 0), 1);
2199 base = XEXP (op0, 1);
2203 else if (code0 == PLUS)
2205 indx = XEXP (op0, 0); /* index + base + disp */
2206 base = XEXP (op0, 1);
2212 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2217 else if (GET_CODE (addr) == MULT)
2219 indx = XEXP (addr, 0); /* index*scale */
2220 scale = XEXP (addr, 1);
2224 disp = addr; /* displacement */
2226 /* Allow arg pointer and stack pointer as index if there is not scaling */
2227 if (base && indx && !scale
2228 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2235 /* Validate base register */
2236 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2237 is one word out of a two word structure, which is represented internally
2241 if (GET_CODE (base) != REG)
2243 ADDR_INVALID ("Base is not a register.\n", base);
2247 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2248 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2250 ADDR_INVALID ("Base is not valid.\n", base);
2255 /* Validate index register */
2256 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2257 is one word out of a two word structure, which is represented internally
2261 if (GET_CODE (indx) != REG)
2263 ADDR_INVALID ("Index is not a register.\n", indx);
2267 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2268 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2270 ADDR_INVALID ("Index is not valid.\n", indx);
2275 abort (); /* scale w/o index invalid */
2277 /* Validate scale factor */
2280 HOST_WIDE_INT value;
2282 if (GET_CODE (scale) != CONST_INT)
2284 ADDR_INVALID ("Scale is not valid.\n", scale);
2288 value = INTVAL (scale);
2289 if (value != 1 && value != 2 && value != 4 && value != 8)
2291 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2296 /* Validate displacement
2297 Constant pool addresses must be handled special. They are
2298 considered legitimate addresses, but only if not used with regs.
2299 When printed, the output routines know to print the reference with the
2300 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2303 if (GET_CODE (disp) == SYMBOL_REF
2304 && CONSTANT_POOL_ADDRESS_P (disp)
2309 else if (!CONSTANT_ADDRESS_P (disp))
2311 ADDR_INVALID ("Displacement is not valid.\n", disp);
2315 else if (GET_CODE (disp) == CONST_DOUBLE)
2317 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2321 else if (flag_pic && SYMBOLIC_CONST (disp)
2322 && base != pic_offset_table_rtx
2323 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2325 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2329 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2330 && (base != NULL_RTX || indx != NULL_RTX))
2332 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2337 if (TARGET_DEBUG_ADDR)
2338 fprintf (stderr, "Address is valid.\n");
2340 /* Everything looks valid, return true */
2345 /* Return a legitimate reference for ORIG (an address) using the
2346 register REG. If REG is 0, a new pseudo is generated.
2348 There are three types of references that must be handled:
2350 1. Global data references must load the address from the GOT, via
2351 the PIC reg. An insn is emitted to do this load, and the reg is
2354 2. Static data references must compute the address as an offset
2355 from the GOT, whose base is in the PIC reg. An insn is emitted to
2356 compute the address into a reg, and the reg is returned. Static
2357 data objects have SYMBOL_REF_FLAG set to differentiate them from
2358 global data objects.
2360 3. Constant pool addresses must be handled special. They are
2361 considered legitimate addresses, but only if not used with regs.
2362 When printed, the output routines know to print the reference with the
2363 PIC reg, even though the PIC reg doesn't appear in the RTL.
2365 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2366 reg also appears in the address (except for constant pool references,
2369 "switch" statements also require special handling when generating
2370 PIC code. See comments by the `casesi' insn in i386.md for details. */
2373 legitimize_pic_address (orig, reg)
2380 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2382 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2387 reg = gen_reg_rtx (Pmode);
2389 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2390 || GET_CODE (addr) == LABEL_REF)
2391 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2393 new = gen_rtx (MEM, Pmode,
2394 gen_rtx (PLUS, Pmode,
2395 pic_offset_table_rtx, orig));
2397 emit_move_insn (reg, new);
2399 current_function_uses_pic_offset_table = 1;
2402 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2406 if (GET_CODE (addr) == CONST)
2408 addr = XEXP (addr, 0);
2409 if (GET_CODE (addr) != PLUS)
2413 if (XEXP (addr, 0) == pic_offset_table_rtx)
2417 reg = gen_reg_rtx (Pmode);
2419 base = legitimize_pic_address (XEXP (addr, 0), reg);
2420 addr = legitimize_pic_address (XEXP (addr, 1),
2421 base == reg ? NULL_RTX : reg);
2423 if (GET_CODE (addr) == CONST_INT)
2424 return plus_constant (base, INTVAL (addr));
2426 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2428 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2429 addr = XEXP (addr, 1);
2431 return gen_rtx (PLUS, Pmode, base, addr);
2437 /* Emit insns to move operands[1] into operands[0]. */
2440 emit_pic_move (operands, mode)
2442 enum machine_mode mode;
2444 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2446 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2447 operands[1] = (rtx) force_reg (SImode, operands[1]);
2449 operands[1] = legitimize_pic_address (operands[1], temp);
2453 /* Try machine-dependent ways of modifying an illegitimate address
2454 to be legitimate. If we find one, return the new, valid address.
2455 This macro is used in only one place: `memory_address' in explow.c.
2457 OLDX is the address as it was before break_out_memory_refs was called.
2458 In some cases it is useful to look at this to decide what needs to be done.
2460 MODE and WIN are passed so that this macro can use
2461 GO_IF_LEGITIMATE_ADDRESS.
2463 It is always safe for this macro to do nothing. It exists to recognize
2464 opportunities to optimize the output.
2466 For the 80386, we handle X+REG by loading X into a register R and
2467 using R+REG. R will go in a general reg and indexing will be used.
2468 However, if REG is a broken-out memory address or multiplication,
2469 nothing needs to be done because REG can certainly go in a general reg.
2471 When -fpic is used, special handling is needed for symbolic references.
2472 See comments by legitimize_pic_address in i386.c for details. */
2475 legitimize_address (x, oldx, mode)
2478 enum machine_mode mode;
2483 if (TARGET_DEBUG_ADDR)
2485 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2489 if (flag_pic && SYMBOLIC_CONST (x))
2490 return legitimize_pic_address (x, 0);
2492 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2493 if (GET_CODE (x) == ASHIFT
2494 && GET_CODE (XEXP (x, 1)) == CONST_INT
2495 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2498 x = gen_rtx (MULT, Pmode,
2499 force_reg (Pmode, XEXP (x, 0)),
2500 GEN_INT (1 << log));
2503 if (GET_CODE (x) == PLUS)
2505 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2506 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2507 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2508 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2511 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2512 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2513 GEN_INT (1 << log));
2516 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2517 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2518 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2521 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2522 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2523 GEN_INT (1 << log));
2526 /* Put multiply first if it isn't already */
2527 if (GET_CODE (XEXP (x, 1)) == MULT)
2529 rtx tmp = XEXP (x, 0);
2530 XEXP (x, 0) = XEXP (x, 1);
2535 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2536 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2537 created by virtual register instantiation, register elimination, and
2538 similar optimizations. */
2539 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2542 x = gen_rtx (PLUS, Pmode,
2543 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2544 XEXP (XEXP (x, 1), 1));
2547 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2548 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2549 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2550 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2551 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2552 && CONSTANT_P (XEXP (x, 1)))
2554 rtx constant, other;
2556 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2558 constant = XEXP (x, 1);
2559 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2561 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2563 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2564 other = XEXP (x, 1);
2572 x = gen_rtx (PLUS, Pmode,
2573 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2574 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2575 plus_constant (other, INTVAL (constant)));
2579 if (changed && legitimate_address_p (mode, x, FALSE))
2582 if (GET_CODE (XEXP (x, 0)) == MULT)
2585 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2588 if (GET_CODE (XEXP (x, 1)) == MULT)
2591 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2595 && GET_CODE (XEXP (x, 1)) == REG
2596 && GET_CODE (XEXP (x, 0)) == REG)
2599 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2602 x = legitimize_pic_address (x, 0);
2605 if (changed && legitimate_address_p (mode, x, FALSE))
2608 if (GET_CODE (XEXP (x, 0)) == REG)
2610 register rtx temp = gen_reg_rtx (Pmode);
2611 register rtx val = force_operand (XEXP (x, 1), temp);
2613 emit_move_insn (temp, val);
2619 else if (GET_CODE (XEXP (x, 1)) == REG)
2621 register rtx temp = gen_reg_rtx (Pmode);
2622 register rtx val = force_operand (XEXP (x, 0), temp);
2624 emit_move_insn (temp, val);
2635 /* Print an integer constant expression in assembler syntax. Addition
2636 and subtraction are the only arithmetic that may appear in these
2637 expressions. FILE is the stdio stream to write to, X is the rtx, and
2638 CODE is the operand print code from the output string. */
2641 output_pic_addr_const (file, x, code)
2648 switch (GET_CODE (x))
2659 if (GET_CODE (x) == SYMBOL_REF)
2660 assemble_name (file, XSTR (x, 0));
2663 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2664 CODE_LABEL_NUMBER (XEXP (x, 0)));
2665 assemble_name (asm_out_file, buf);
2668 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2669 fprintf (file, "@GOTOFF(%%ebx)");
2670 else if (code == 'P')
2671 fprintf (file, "@PLT");
2672 else if (GET_CODE (x) == LABEL_REF)
2673 fprintf (file, "@GOTOFF");
2674 else if (! SYMBOL_REF_FLAG (x))
2675 fprintf (file, "@GOT");
2677 fprintf (file, "@GOTOFF");
2682 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2683 assemble_name (asm_out_file, buf);
2687 fprintf (file, "%d", INTVAL (x));
2691 /* This used to output parentheses around the expression,
2692 but that does not work on the 386 (either ATT or BSD assembler). */
2693 output_pic_addr_const (file, XEXP (x, 0), code);
2697 if (GET_MODE (x) == VOIDmode)
2699 /* We can use %d if the number is <32 bits and positive. */
2700 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2701 fprintf (file, "0x%x%08x",
2702 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2704 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2707 /* We can't handle floating point constants;
2708 PRINT_OPERAND must handle them. */
2709 output_operand_lossage ("floating constant misused");
2713 /* Some assemblers need integer constants to appear last (eg masm). */
2714 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2716 output_pic_addr_const (file, XEXP (x, 1), code);
2717 if (INTVAL (XEXP (x, 0)) >= 0)
2718 fprintf (file, "+");
2719 output_pic_addr_const (file, XEXP (x, 0), code);
2723 output_pic_addr_const (file, XEXP (x, 0), code);
2724 if (INTVAL (XEXP (x, 1)) >= 0)
2725 fprintf (file, "+");
2726 output_pic_addr_const (file, XEXP (x, 1), code);
2731 output_pic_addr_const (file, XEXP (x, 0), code);
2732 fprintf (file, "-");
2733 output_pic_addr_const (file, XEXP (x, 1), code);
2737 output_operand_lossage ("invalid expression as operand");
2741 /* Append the correct conditional move suffix which corresponds to CODE */
2744 put_condition_code (code, mode, file)
2746 enum mode_class mode;
2749 if (mode == MODE_INT)
2753 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2759 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2765 fputs ("ge", file); return;
2767 fputs ("g", file); return;
2769 fputs ("le", file); return;
2771 fputs ("l", file); return;
2773 fputs ("ae", file); return;
2775 fputs ("a", file); return;
2777 fputs ("be", file); return;
2779 fputs ("b", file); return;
2780 default: output_operand_lossage ("Invalid %%C operand");
2782 else if (mode == MODE_FLOAT)
2786 fputs ("ne", file); return;
2788 fputs ("e", file); return;
2790 fputs ("nb", file); return;
2792 fputs ("nbe", file); return;
2794 fputs ("be", file); return;
2796 fputs ("b", file); return;
2798 fputs ("nb", file); return;
2800 fputs ("nbe", file); return;
2802 fputs ("be", file); return;
2804 fputs ("b", file); return;
2805 default: output_operand_lossage ("Invalid %%C operand");
2810 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2811 C -- print opcode suffix for set/cmov insn.
2812 c -- like C, but print reversed condition
2813 F -- print opcode suffix for fcmov insn.
2814 f -- like C, but print reversed condition
2815 R -- print the prefix for register names.
2816 z -- print the opcode suffix for the size of the current operand.
2817 * -- print a star (in certain assembler syntax)
2818 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2819 c -- don't print special prefixes before constant operands.
2820 J -- print the appropriate jump operand.
2821 s -- print a shift double count, followed by the assemblers argument
2823 b -- print the QImode name of the register for the indicated operand.
2824 %b0 would print %al if operands[0] is reg 0.
2825 w -- likewise, print the HImode name of the register.
2826 k -- likewise, print the SImode name of the register.
2827 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2828 y -- print "st(0)" instead of "st" as a register.
2829 P -- print as a PIC constant
2833 print_operand (file, x, code)
2848 PUT_OP_SIZE (code, 'l', file);
2852 PUT_OP_SIZE (code, 'w', file);
2856 PUT_OP_SIZE (code, 'b', file);
2860 PUT_OP_SIZE (code, 'l', file);
2864 PUT_OP_SIZE (code, 's', file);
2868 PUT_OP_SIZE (code, 't', file);
2872 /* 387 opcodes don't get size suffixes if the operands are
2875 if (STACK_REG_P (x))
2878 /* this is the size of op from size of operand */
2879 switch (GET_MODE_SIZE (GET_MODE (x)))
2882 PUT_OP_SIZE ('B', 'b', file);
2886 PUT_OP_SIZE ('W', 'w', file);
2890 if (GET_MODE (x) == SFmode)
2892 PUT_OP_SIZE ('S', 's', file);
2896 PUT_OP_SIZE ('L', 'l', file);
2900 PUT_OP_SIZE ('T', 't', file);
2904 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2906 #ifdef GAS_MNEMONICS
2907 PUT_OP_SIZE ('Q', 'q', file);
2910 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2914 PUT_OP_SIZE ('Q', 'l', file);
2927 switch (GET_CODE (x))
2929 /* These conditions are appropriate for testing the result
2930 of an arithmetic operation, not for a compare operation.
2931 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2932 CC_Z_IN_NOT_C false and not floating point. */
2933 case NE: fputs ("jne", file); return;
2934 case EQ: fputs ("je", file); return;
2935 case GE: fputs ("jns", file); return;
2936 case LT: fputs ("js", file); return;
2937 case GEU: fputs ("jmp", file); return;
2938 case GTU: fputs ("jne", file); return;
2939 case LEU: fputs ("je", file); return;
2940 case LTU: fputs ("#branch never", file); return;
2942 /* no matching branches for GT nor LE */
2947 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2949 PRINT_OPERAND (file, x, 0);
2950 fputs (AS2C (,) + 1, file);
2954 /* This is used by the conditional move instructions. */
2956 put_condition_code (GET_CODE (x), MODE_INT, file);
2959 /* like above, but reverse condition */
2961 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
2965 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
2968 /* like above, but reverse condition */
2970 put_condition_code (reverse_condition (GET_CODE (x)),
2978 sprintf (str, "invalid operand code `%c'", code);
2979 output_operand_lossage (str);
2983 if (GET_CODE (x) == REG)
2985 PRINT_REG (x, code, file);
2987 else if (GET_CODE (x) == MEM)
2989 PRINT_PTR (x, file);
2990 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2993 output_pic_addr_const (file, XEXP (x, 0), code);
2995 output_addr_const (file, XEXP (x, 0));
2998 output_address (XEXP (x, 0));
3000 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3002 REAL_VALUE_TYPE r; long l;
3003 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3004 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3005 PRINT_IMMED_PREFIX (file);
3006 fprintf (file, "0x%x", l);
3008 /* These float cases don't actually occur as immediate operands. */
3009 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3011 REAL_VALUE_TYPE r; char dstr[30];
3012 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3013 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3014 fprintf (file, "%s", dstr);
3016 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3018 REAL_VALUE_TYPE r; char dstr[30];
3019 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3020 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3021 fprintf (file, "%s", dstr);
3027 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3028 PRINT_IMMED_PREFIX (file);
3029 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3030 || GET_CODE (x) == LABEL_REF)
3031 PRINT_OFFSET_PREFIX (file);
3034 output_pic_addr_const (file, x, code);
3036 output_addr_const (file, x);
3040 /* Print a memory operand whose address is ADDR. */
3043 print_operand_address (file, addr)
3047 register rtx reg1, reg2, breg, ireg;
3050 switch (GET_CODE (addr))
3054 fprintf (file, "%se", RP);
3055 fputs (hi_reg_name[REGNO (addr)], file);
3065 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3067 offset = XEXP (addr, 0);
3068 addr = XEXP (addr, 1);
3070 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3072 offset = XEXP (addr, 1);
3073 addr = XEXP (addr, 0);
3075 if (GET_CODE (addr) != PLUS) ;
3076 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3078 reg1 = XEXP (addr, 0);
3079 addr = XEXP (addr, 1);
3081 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3083 reg1 = XEXP (addr, 1);
3084 addr = XEXP (addr, 0);
3086 else if (GET_CODE (XEXP (addr, 0)) == REG)
3088 reg1 = XEXP (addr, 0);
3089 addr = XEXP (addr, 1);
3091 else if (GET_CODE (XEXP (addr, 1)) == REG)
3093 reg1 = XEXP (addr, 1);
3094 addr = XEXP (addr, 0);
3096 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3098 if (reg1 == 0) reg1 = addr;
3104 if (addr != 0) abort ();
3107 if ((reg1 && GET_CODE (reg1) == MULT)
3108 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3113 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3119 if (ireg != 0 || breg != 0)
3126 output_pic_addr_const (file, addr, 0);
3128 else if (GET_CODE (addr) == LABEL_REF)
3129 output_asm_label (addr);
3132 output_addr_const (file, addr);
3135 if (ireg != 0 && GET_CODE (ireg) == MULT)
3137 scale = INTVAL (XEXP (ireg, 1));
3138 ireg = XEXP (ireg, 0);
3141 /* The stack pointer can only appear as a base register,
3142 never an index register, so exchange the regs if it is wrong. */
3144 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3153 /* output breg+ireg*scale */
3154 PRINT_B_I_S (breg, ireg, scale, file);
3161 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3163 scale = INTVAL (XEXP (addr, 0));
3164 ireg = XEXP (addr, 1);
3168 scale = INTVAL (XEXP (addr, 1));
3169 ireg = XEXP (addr, 0);
3171 output_addr_const (file, const0_rtx);
3172 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3177 if (GET_CODE (addr) == CONST_INT
3178 && INTVAL (addr) < 0x8000
3179 && INTVAL (addr) >= -0x8000)
3180 fprintf (file, "%d", INTVAL (addr));
3184 output_pic_addr_const (file, addr, 0);
3186 output_addr_const (file, addr);
3191 /* Set the cc_status for the results of an insn whose pattern is EXP.
3192 On the 80386, we assume that only test and compare insns, as well
3193 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3194 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3195 Also, we assume that jumps, moves and sCOND don't affect the condition
3196 codes. All else clobbers the condition codes, by assumption.
3198 We assume that ALL integer add, minus, etc. instructions effect the
3199 condition codes. This MUST be consistent with i386.md.
3201 We don't record any float test or compare - the redundant test &
3202 compare check in final.c does not handle stack-like regs correctly. */
3205 notice_update_cc (exp)
3208 if (GET_CODE (exp) == SET)
3210 /* Jumps do not alter the cc's. */
3211 if (SET_DEST (exp) == pc_rtx)
3213 #ifdef IS_STACK_MODE
3214 /* Moving into a memory of stack_mode may have been moved
3215 in between the use and set of cc0 by loop_spl(). So
3216 old value of cc.status must be retained */
3217 if(GET_CODE(SET_DEST(exp))==MEM
3218 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3223 /* Moving register or memory into a register:
3224 it doesn't alter the cc's, but it might invalidate
3225 the RTX's which we remember the cc's came from.
3226 (Note that moving a constant 0 or 1 MAY set the cc's). */
3227 if (REG_P (SET_DEST (exp))
3228 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3229 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3231 if (cc_status.value1
3232 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3233 cc_status.value1 = 0;
3234 if (cc_status.value2
3235 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3236 cc_status.value2 = 0;
3239 /* Moving register into memory doesn't alter the cc's.
3240 It may invalidate the RTX's which we remember the cc's came from. */
3241 if (GET_CODE (SET_DEST (exp)) == MEM
3242 && (REG_P (SET_SRC (exp))
3243 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3245 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3246 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3247 cc_status.value1 = 0;
3248 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3249 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3250 cc_status.value2 = 0;
3253 /* Function calls clobber the cc's. */
3254 else if (GET_CODE (SET_SRC (exp)) == CALL)
3259 /* Tests and compares set the cc's in predictable ways. */
3260 else if (SET_DEST (exp) == cc0_rtx)
3263 cc_status.value1 = SET_SRC (exp);
3266 /* Certain instructions effect the condition codes. */
3267 else if (GET_MODE (SET_SRC (exp)) == SImode
3268 || GET_MODE (SET_SRC (exp)) == HImode
3269 || GET_MODE (SET_SRC (exp)) == QImode)
3270 switch (GET_CODE (SET_SRC (exp)))
3272 case ASHIFTRT: case LSHIFTRT:
3274 /* Shifts on the 386 don't set the condition codes if the
3275 shift count is zero. */
3276 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3281 /* We assume that the CONST_INT is non-zero (this rtx would
3282 have been deleted if it were zero. */
3284 case PLUS: case MINUS: case NEG:
3285 case AND: case IOR: case XOR:
3286 cc_status.flags = CC_NO_OVERFLOW;
3287 cc_status.value1 = SET_SRC (exp);
3288 cc_status.value2 = SET_DEST (exp);
3299 else if (GET_CODE (exp) == PARALLEL
3300 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3302 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3304 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3307 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3308 cc_status.flags |= CC_IN_80387;
3310 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3321 /* Split one or more DImode RTL references into pairs of SImode
3322 references. The RTL can be REG, offsettable MEM, integer constant, or
3323 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3324 split and "num" is its length. lo_half and hi_half are output arrays
3325 that parallel "operands". */
3328 split_di (operands, num, lo_half, hi_half)
3331 rtx lo_half[], hi_half[];
3335 if (GET_CODE (operands[num]) == REG)
3337 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3338 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3340 else if (CONSTANT_P (operands[num]))
3342 split_double (operands[num], &lo_half[num], &hi_half[num]);
3344 else if (offsettable_memref_p (operands[num]))
3346 lo_half[num] = operands[num];
3347 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3354 /* Return 1 if this is a valid binary operation on a 387.
3355 OP is the expression matched, and MODE is its mode. */
3358 binary_387_op (op, mode)
3360 enum machine_mode mode;
3362 if (mode != VOIDmode && mode != GET_MODE (op))
3365 switch (GET_CODE (op))
3371 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3379 /* Return 1 if this is a valid shift or rotate operation on a 386.
3380 OP is the expression matched, and MODE is its mode. */
3385 enum machine_mode mode;
3387 rtx operand = XEXP (op, 0);
3389 if (mode != VOIDmode && mode != GET_MODE (op))
3392 if (GET_MODE (operand) != GET_MODE (op)
3393 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3396 return (GET_CODE (op) == ASHIFT
3397 || GET_CODE (op) == ASHIFTRT
3398 || GET_CODE (op) == LSHIFTRT
3399 || GET_CODE (op) == ROTATE
3400 || GET_CODE (op) == ROTATERT);
3403 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3404 MODE is not used. */
3407 VOIDmode_compare_op (op, mode)
3409 enum machine_mode mode;
3411 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3414 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3415 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3416 is the expression of the binary operation. The output may either be
3417 emitted here, or returned to the caller, like all output_* functions.
3419 There is no guarantee that the operands are the same mode, as they
3420 might be within FLOAT or FLOAT_EXTEND expressions. */
3423 output_387_binary_op (insn, operands)
3429 static char buf[100];
3431 switch (GET_CODE (operands[3]))
3434 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3435 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3442 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3443 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3450 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3451 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3458 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3459 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3469 strcpy (buf, base_op);
3471 switch (GET_CODE (operands[3]))
3475 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3478 operands[2] = operands[1];
3482 if (GET_CODE (operands[2]) == MEM)
3483 return strcat (buf, AS1 (%z2,%2));
3485 if (NON_STACK_REG_P (operands[1]))
3487 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3490 else if (NON_STACK_REG_P (operands[2]))
3492 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3496 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3497 return strcat (buf, AS2 (p,%2,%0));
3499 if (STACK_TOP_P (operands[0]))
3500 return strcat (buf, AS2C (%y2,%0));
3502 return strcat (buf, AS2C (%2,%0));
3506 if (GET_CODE (operands[1]) == MEM)
3507 return strcat (buf, AS1 (r%z1,%1));
3509 if (GET_CODE (operands[2]) == MEM)
3510 return strcat (buf, AS1 (%z2,%2));
3512 if (NON_STACK_REG_P (operands[1]))
3514 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3517 else if (NON_STACK_REG_P (operands[2]))
3519 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3523 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3526 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3527 return strcat (buf, AS2 (rp,%2,%0));
3529 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3530 return strcat (buf, AS2 (p,%1,%0));
3532 if (STACK_TOP_P (operands[0]))
3534 if (STACK_TOP_P (operands[1]))
3535 return strcat (buf, AS2C (%y2,%0));
3537 return strcat (buf, AS2 (r,%y1,%0));
3539 else if (STACK_TOP_P (operands[1]))
3540 return strcat (buf, AS2C (%1,%0));
3542 return strcat (buf, AS2 (r,%2,%0));
3549 /* Output code for INSN to convert a float to a signed int. OPERANDS
3550 are the insn operands. The output may be SFmode or DFmode and the
3551 input operand may be SImode or DImode. As a special case, make sure
3552 that the 387 stack top dies if the output mode is DImode, because the
3553 hardware requires this. */
3556 output_fix_trunc (insn, operands)
3560 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3563 if (! STACK_TOP_P (operands[1]) ||
3564 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3567 xops[0] = GEN_INT (12);
3568 xops[1] = operands[4];
3570 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3571 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3572 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3573 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3574 output_asm_insn (AS1 (fldc%W3,%3), operands);
3576 if (NON_STACK_REG_P (operands[0]))
3577 output_to_reg (operands[0], stack_top_dies);
3578 else if (GET_CODE (operands[0]) == MEM)
3581 output_asm_insn (AS1 (fistp%z0,%0), operands);
3583 output_asm_insn (AS1 (fist%z0,%0), operands);
3588 return AS1 (fldc%W2,%2);
3591 /* Output code for INSN to compare OPERANDS. The two operands might
3592 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3593 expression. If the compare is in mode CCFPEQmode, use an opcode that
3594 will not fault if a qNaN is present. */
3597 output_float_compare (insn, operands)
3602 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3603 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3606 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3608 cc_status.flags |= CC_FCOMI;
3609 cc_prev_status.flags &= ~CC_TEST_AX;
3612 if (! STACK_TOP_P (operands[0]))
3615 operands[0] = operands[1];
3617 cc_status.flags |= CC_REVERSED;
3620 if (! STACK_TOP_P (operands[0]))
3623 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3625 if (STACK_REG_P (operands[1])
3627 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3628 && REGNO (operands[1]) != FIRST_STACK_REG)
3630 /* If both the top of the 387 stack dies, and the other operand
3631 is also a stack register that dies, then this must be a
3632 `fcompp' float compare */
3634 if (unordered_compare)
3635 output_asm_insn ("fucompp", operands);
3637 output_asm_insn ("fcompp", operands);
3641 static char buf[100];
3643 /* Decide if this is the integer or float compare opcode, or the
3644 unordered float compare. */
3646 if (unordered_compare)
3647 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3648 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3649 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3651 strcpy (buf, "ficom");
3653 /* Modify the opcode if the 387 stack is to be popped. */
3658 if (NON_STACK_REG_P (operands[1]))
3659 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3660 else if (cc_status.flags & CC_FCOMI)
3664 xops[0] = operands[0];
3665 xops[1] = operands[1];
3666 xops[2] = operands[0];
3668 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%2)), xops);
3672 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3675 /* Now retrieve the condition code. */
3677 return output_fp_cc0_set (insn);
3680 /* Output opcodes to transfer the results of FP compare or test INSN
3681 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3682 result of the compare or test is unordered, no comparison operator
3683 succeeds except NE. Return an output template, if any. */
3686 output_fp_cc0_set (insn)
3690 rtx unordered_label;
3694 xops[0] = gen_rtx (REG, HImode, 0);
3695 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3697 if (! TARGET_IEEE_FP)
3699 if (!(cc_status.flags & CC_REVERSED))
3701 next = next_cc0_user (insn);
3703 if (GET_CODE (next) == JUMP_INSN
3704 && GET_CODE (PATTERN (next)) == SET
3705 && SET_DEST (PATTERN (next)) == pc_rtx
3706 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3708 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3710 else if (GET_CODE (PATTERN (next)) == SET)
3712 code = GET_CODE (SET_SRC (PATTERN (next)));
3718 if (code == GT || code == LT || code == EQ || code == NE
3719 || code == LE || code == GE)
3720 { /* We will test eax directly */
3721 cc_status.flags |= CC_TEST_AX;
3728 next = next_cc0_user (insn);
3729 if (next == NULL_RTX)
3732 if (GET_CODE (next) == JUMP_INSN
3733 && GET_CODE (PATTERN (next)) == SET
3734 && SET_DEST (PATTERN (next)) == pc_rtx
3735 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3737 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3739 else if (GET_CODE (PATTERN (next)) == SET)
3741 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3742 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3743 else code = GET_CODE (SET_SRC (PATTERN (next)));
3745 else if (GET_CODE (PATTERN (next)) == PARALLEL
3746 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3748 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3749 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3750 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3755 xops[0] = gen_rtx (REG, QImode, 0);
3760 xops[1] = GEN_INT (0x45);
3761 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3766 xops[1] = GEN_INT (0x45);
3767 xops[2] = GEN_INT (0x01);
3768 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3769 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3774 xops[1] = GEN_INT (0x05);
3775 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3780 xops[1] = GEN_INT (0x45);
3781 xops[2] = GEN_INT (0x40);
3782 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3783 output_asm_insn (AS1 (dec%B0,%h0), xops);
3784 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3789 xops[1] = GEN_INT (0x45);
3790 xops[2] = GEN_INT (0x40);
3791 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3792 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3797 xops[1] = GEN_INT (0x44);
3798 xops[2] = GEN_INT (0x40);
3799 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3800 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3814 #define MAX_386_STACK_LOCALS 2
3816 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3818 /* Define the structure for the machine field in struct function. */
3819 struct machine_function
3821 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3824 /* Functions to save and restore i386_stack_locals.
3825 These will be called, via pointer variables,
3826 from push_function_context and pop_function_context. */
3829 save_386_machine_status (p)
3832 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3833 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3834 sizeof i386_stack_locals);
3838 restore_386_machine_status (p)
3841 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3842 sizeof i386_stack_locals);
3846 /* Clear stack slot assignments remembered from previous functions.
3847 This is called from INIT_EXPANDERS once before RTL is emitted for each
3851 clear_386_stack_locals ()
3853 enum machine_mode mode;
3856 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3857 mode = (enum machine_mode) ((int) mode + 1))
3858 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3859 i386_stack_locals[(int) mode][n] = NULL_RTX;
3861 /* Arrange to save and restore i386_stack_locals around nested functions. */
3862 save_machine_status = save_386_machine_status;
3863 restore_machine_status = restore_386_machine_status;
3866 /* Return a MEM corresponding to a stack slot with mode MODE.
3867 Allocate a new slot if necessary.
3869 The RTL for a function can have several slots available: N is
3870 which slot to use. */
3873 assign_386_stack_local (mode, n)
3874 enum machine_mode mode;
3877 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3880 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3881 i386_stack_locals[(int) mode][n]
3882 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3884 return i386_stack_locals[(int) mode][n];
3890 enum machine_mode mode;
3892 return (GET_CODE (op) == MULT);
3897 enum machine_mode mode;
3899 return (GET_CODE (op) == DIV);
3904 /* Create a new copy of an rtx.
3905 Recursively copies the operands of the rtx,
3906 except for those few rtx codes that are sharable.
3907 Doesn't share CONST */
3915 register RTX_CODE code;
3916 register char *format_ptr;
3918 code = GET_CODE (orig);
3931 /* SCRATCH must be shared because they represent distinct values. */
3936 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3937 a LABEL_REF, it isn't sharable. */
3938 if (GET_CODE (XEXP (orig, 0)) == PLUS
3939 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3940 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3944 /* A MEM with a constant address is not sharable. The problem is that
3945 the constant address may need to be reloaded. If the mem is shared,
3946 then reloading one copy of this mem will cause all copies to appear
3947 to have been reloaded. */
3950 copy = rtx_alloc (code);
3951 PUT_MODE (copy, GET_MODE (orig));
3952 copy->in_struct = orig->in_struct;
3953 copy->volatil = orig->volatil;
3954 copy->unchanging = orig->unchanging;
3955 copy->integrated = orig->integrated;
3957 copy->is_spill_rtx = orig->is_spill_rtx;
3959 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3961 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3963 switch (*format_ptr++)
3966 XEXP (copy, i) = XEXP (orig, i);
3967 if (XEXP (orig, i) != NULL)
3968 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3973 XEXP (copy, i) = XEXP (orig, i);
3978 XVEC (copy, i) = XVEC (orig, i);
3979 if (XVEC (orig, i) != NULL)
3981 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3982 for (j = 0; j < XVECLEN (copy, i); j++)
3983 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3988 XWINT (copy, i) = XWINT (orig, i);
3992 XINT (copy, i) = XINT (orig, i);
3997 XSTR (copy, i) = XSTR (orig, i);
4008 /* try to rewrite a memory address to make it valid */
4010 rewrite_address (mem_rtx)
4013 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4015 int offset_adjust = 0;
4016 int was_only_offset = 0;
4017 rtx mem_addr = XEXP (mem_rtx, 0);
4018 char *storage = (char *) oballoc (0);
4020 int is_spill_rtx = 0;
4022 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4023 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4025 if (GET_CODE (mem_addr) == PLUS &&
4026 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4027 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4028 { /* this part is utilized by the combiner */
4030 gen_rtx (PLUS, GET_MODE (mem_addr),
4031 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4033 XEXP (XEXP (mem_addr, 1), 0)),
4034 XEXP (XEXP (mem_addr, 1), 1));
4035 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4037 XEXP (mem_rtx, 0) = ret_rtx;
4038 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4044 /* this part is utilized by loop.c */
4045 /* If the address contains PLUS (reg,const) and this pattern is invalid
4046 in this case - try to rewrite the address to make it valid intel1
4048 storage = (char *) oballoc (0);
4049 index_rtx = base_rtx = offset_rtx = NULL;
4050 /* find the base index and offset elements of the memory address */
4051 if (GET_CODE (mem_addr) == PLUS)
4053 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4055 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4057 base_rtx = XEXP (mem_addr, 1);
4058 index_rtx = XEXP (mem_addr, 0);
4062 base_rtx = XEXP (mem_addr, 0);
4063 offset_rtx = XEXP (mem_addr, 1);
4066 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4068 index_rtx = XEXP (mem_addr, 0);
4069 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4071 base_rtx = XEXP (mem_addr, 1);
4075 offset_rtx = XEXP (mem_addr, 1);
4078 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4081 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4082 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4083 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4084 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4085 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4086 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4087 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4089 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4090 offset_rtx = XEXP (mem_addr, 1);
4091 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4092 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4096 offset_rtx = XEXP (mem_addr, 1);
4097 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4098 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4101 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4103 was_only_offset = 1;
4106 offset_rtx = XEXP (mem_addr, 1);
4107 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4108 if (offset_adjust == 0)
4110 XEXP (mem_rtx, 0) = offset_rtx;
4111 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4121 else if (GET_CODE (mem_addr) == MULT)
4123 index_rtx = mem_addr;
4130 if (index_rtx && GET_CODE (index_rtx) == MULT)
4132 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4137 scale_rtx = XEXP (index_rtx, 1);
4138 scale = INTVAL (scale_rtx);
4139 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4141 /* now find which of the elements are invalid and try to fix them */
4142 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4144 offset_adjust = INTVAL (index_rtx) * scale;
4145 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4146 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4148 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4149 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4151 offset_rtx = copy_all_rtx (offset_rtx);
4152 XEXP (XEXP (offset_rtx, 0), 1) =
4153 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4154 if (!CONSTANT_P (offset_rtx))
4161 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4164 gen_rtx (CONST, GET_MODE (offset_rtx),
4165 gen_rtx (PLUS, GET_MODE (offset_rtx),
4167 gen_rtx (CONST_INT, 0, offset_adjust)));
4168 if (!CONSTANT_P (offset_rtx))
4174 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4176 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4178 else if (!offset_rtx)
4180 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4182 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4183 XEXP (mem_rtx, 0) = offset_rtx;
4186 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4187 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4188 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4190 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4191 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4193 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4195 offset_adjust += INTVAL (base_rtx);
4198 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4199 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4200 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4202 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4203 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4207 if (!LEGITIMATE_INDEX_P (index_rtx)
4208 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4216 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4222 if (offset_adjust != 0)
4226 if (GET_CODE (offset_rtx) == CONST &&
4227 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4229 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4230 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4232 offset_rtx = copy_all_rtx (offset_rtx);
4233 XEXP (XEXP (offset_rtx, 0), 1) =
4234 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4235 if (!CONSTANT_P (offset_rtx))
4242 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4245 gen_rtx (CONST, GET_MODE (offset_rtx),
4246 gen_rtx (PLUS, GET_MODE (offset_rtx),
4248 gen_rtx (CONST_INT, 0, offset_adjust)));
4249 if (!CONSTANT_P (offset_rtx))
4255 else if (GET_CODE (offset_rtx) == CONST_INT)
4257 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4267 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4275 if (GET_CODE (offset_rtx) == CONST_INT &&
4276 INTVAL (offset_rtx) == 0)
4278 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4279 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4285 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4286 gen_rtx (PLUS, GET_MODE (base_rtx),
4287 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4295 if (GET_CODE (offset_rtx) == CONST_INT &&
4296 INTVAL (offset_rtx) == 0)
4298 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4302 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4303 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4313 if (GET_CODE (offset_rtx) == CONST_INT &&
4314 INTVAL (offset_rtx) == 0)
4316 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4321 gen_rtx (PLUS, GET_MODE (offset_rtx),
4322 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4329 if (GET_CODE (offset_rtx) == CONST_INT &&
4330 INTVAL (offset_rtx) == 0)
4332 ret_rtx = index_rtx;
4336 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4345 if (GET_CODE (offset_rtx) == CONST_INT &&
4346 INTVAL (offset_rtx) == 0)
4352 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4355 else if (was_only_offset)
4357 ret_rtx = offset_rtx;
4365 XEXP (mem_rtx, 0) = ret_rtx;
4366 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4378 /* return 1 if the first insn to set cc before insn also sets the register
4379 reg_rtx - otherwise return 0 */
4381 last_to_set_cc (reg_rtx, insn)
4384 rtx prev_insn = PREV_INSN (insn);
4388 if (GET_CODE (prev_insn) == NOTE)
4391 else if (GET_CODE (prev_insn) == INSN)
4393 if (GET_CODE (PATTERN (prev_insn)) != SET)
4396 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4398 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4404 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4411 prev_insn = PREV_INSN (prev_insn);
4419 doesnt_set_condition_code (pat)
4422 switch (GET_CODE (pat))
4436 sets_condition_code (pat)
4439 switch (GET_CODE (pat))
4463 str_immediate_operand (op, mode)
4465 enum machine_mode mode;
4467 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4479 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4480 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4481 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4482 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4491 Return 1 if the mode of the SET_DEST of insn is floating point
4492 and it is not an fld or a move from memory to memory.
4493 Otherwise return 0 */
4498 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4499 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4500 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4501 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4502 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4503 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4504 && GET_CODE (SET_SRC (insn)) != MEM)
4513 Return 1 if the mode of the SET_DEST floating point and is memory
4514 and the source is a register.
4520 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4521 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4522 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4523 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4524 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4525 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4535 Return 1 if dep_insn sets a register which insn uses as a base
4536 or index to reference memory.
4537 otherwise return 0 */
4540 agi_dependent (insn, dep_insn)
4543 if (GET_CODE (dep_insn) == INSN
4544 && GET_CODE (PATTERN (dep_insn)) == SET
4545 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4547 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4550 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4551 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4552 && push_operand (SET_DEST (PATTERN (dep_insn)),
4553 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4555 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4563 Return 1 if reg is used in rtl as a base or index for a memory ref
4564 otherwise return 0. */
4567 reg_mentioned_in_mem (reg, rtl)
4572 register enum rtx_code code;
4577 code = GET_CODE (rtl);
4595 if (code == MEM && reg_mentioned_p (reg, rtl))
4598 fmt = GET_RTX_FORMAT (code);
4599 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4604 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4606 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4611 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4618 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4620 operands[0] = result, initialized with the startaddress
4621 operands[1] = alignment of the address.
4622 operands[2] = scratch register, initialized with the startaddress when
4623 not aligned, otherwise undefined
4625 This is just the body. It needs the initialisations mentioned above and
4626 some address computing at the end. These things are done in i386.md. */
4629 output_strlen_unroll (operands)
4634 xops[0] = operands[0]; /* Result */
4635 /* operands[1]; * Alignment */
4636 xops[1] = operands[2]; /* Scratch */
4637 xops[2] = GEN_INT (0);
4638 xops[3] = GEN_INT (2);
4639 xops[4] = GEN_INT (3);
4640 xops[5] = GEN_INT (4);
4641 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4642 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4643 xops[8] = gen_label_rtx (); /* label of main loop */
4644 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4645 xops[9] = gen_label_rtx (); /* pentium optimisation */
4646 xops[10] = gen_label_rtx (); /* end label 2 */
4647 xops[11] = gen_label_rtx (); /* end label 1 */
4648 xops[12] = gen_label_rtx (); /* end label */
4649 /* xops[13] * Temporary used */
4650 xops[14] = GEN_INT (0xff);
4651 xops[15] = GEN_INT (0xff00);
4652 xops[16] = GEN_INT (0xff0000);
4653 xops[17] = GEN_INT (0xff000000);
4655 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4657 /* is there a known alignment and is it less then 4 */
4658 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4660 /* is there a known alignment and is it not 2 */
4661 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4663 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4664 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4666 /* leave just the 3 lower bits */
4667 /* if this is a q-register, then the high part is used later */
4668 /* therefore user andl rather than andb */
4669 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4670 /* is aligned to 4-byte adress when zero */
4671 output_asm_insn (AS1 (je,%l8), xops);
4672 /* side-effect even Parity when %eax == 3 */
4673 output_asm_insn (AS1 (jp,%6), xops);
4675 /* is it aligned to 2 bytes ? */
4676 if (QI_REG_P (xops[1]))
4677 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4679 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4680 output_asm_insn (AS1 (je,%7), xops);
4684 /* since the alignment is 2, we have to check 2 or 0 bytes */
4686 /* check if is aligned to 4 - byte */
4687 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4688 /* is aligned to 4-byte adress when zero */
4689 output_asm_insn (AS1 (je,%l8), xops);
4692 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4693 /* now, compare the bytes */
4694 /* compare with the high part of a q-reg gives shorter code */
4695 if (QI_REG_P (xops[1]))
4697 /* compare the first n unaligned byte on a byte per byte basis */
4698 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4699 /* when zero we reached the end */
4700 output_asm_insn (AS1 (je,%l12), xops);
4701 /* increment the address */
4702 output_asm_insn (AS1 (inc%L0,%0), xops);
4704 /* not needed with an alignment of 2 */
4705 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4707 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4708 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4709 output_asm_insn (AS1 (je,%l12), xops);
4710 output_asm_insn (AS1 (inc%L0,%0), xops);
4712 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4714 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4718 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4719 output_asm_insn (AS1 (je,%l12), xops);
4720 output_asm_insn (AS1 (inc%L0,%0), xops);
4722 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4723 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4724 output_asm_insn (AS1 (je,%l12), xops);
4725 output_asm_insn (AS1 (inc%L0,%0), xops);
4727 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4728 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4730 output_asm_insn (AS1 (je,%l12), xops);
4731 output_asm_insn (AS1 (inc%L0,%0), xops);
4734 /* Generate loop to check 4 bytes at a time */
4735 /* IMHO it is not a good idea to align this loop. It gives only */
4736 /* huge programs, but does not help to speed up */
4737 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4738 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4740 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4741 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4743 if (QI_REG_P (xops[1]))
4745 /* On i586 it is faster to combine the hi- and lo- part as
4746 a kind of lookahead. If anding both yields zero, then one
4747 of both *could* be zero, otherwise none of both is zero;
4748 this saves one instruction, on i486 this is slower
4749 tested with P-90, i486DX2-66, AMD486DX2-66 */
4752 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4753 output_asm_insn (AS1 (jne,%l9), xops);
4756 /* check first byte */
4757 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4758 output_asm_insn (AS1 (je,%l12), xops);
4760 /* check second byte */
4761 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4762 output_asm_insn (AS1 (je,%l11), xops);
4765 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4769 /* check first byte */
4770 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4771 output_asm_insn (AS1 (je,%l12), xops);
4773 /* check second byte */
4774 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4775 output_asm_insn (AS1 (je,%l11), xops);
4778 /* check third byte */
4779 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4780 output_asm_insn (AS1 (je,%l10), xops);
4782 /* check fourth byte and increment address */
4783 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4784 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4785 output_asm_insn (AS1 (jne,%l8), xops);
4787 /* now generate fixups when the compare stops within a 4-byte word */
4788 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4790 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4791 output_asm_insn (AS1 (inc%L0,%0), xops);
4793 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4794 output_asm_insn (AS1 (inc%L0,%0), xops);
4796 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));