1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 #ifndef CHECK_STACK_LIMIT
49 #define CHECK_STACK_LIMIT -1
52 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
59 /* Processor costs (relative to an add) */
60 struct processor_costs i386_cost = { /* 386 specific costs */
61 1, /* cost of an add instruction (2 cycles) */
62 1, /* cost of a lea instruction */
63 3, /* variable shift costs */
64 2, /* constant shift costs */
65 6, /* cost of starting a multiply */
66 1, /* cost of multiply per each bit set */
67 23 /* cost of a divide/mod */
70 struct processor_costs i486_cost = { /* 486 specific costs */
71 1, /* cost of an add instruction */
72 1, /* cost of a lea instruction */
73 3, /* variable shift costs */
74 2, /* constant shift costs */
75 12, /* cost of starting a multiply */
76 1, /* cost of multiply per each bit set */
77 40 /* cost of a divide/mod */
80 struct processor_costs pentium_cost = {
81 1, /* cost of an add instruction */
82 1, /* cost of a lea instruction */
83 3, /* variable shift costs */
84 1, /* constant shift costs */
85 12, /* cost of starting a multiply */
86 1, /* cost of multiply per each bit set */
87 25 /* cost of a divide/mod */
90 struct processor_costs *ix86_cost = &pentium_cost;
92 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
94 extern FILE *asm_out_file;
95 extern char *strcat ();
97 char *singlemove_string ();
98 char *output_move_const_single ();
99 char *output_fp_cc0_set ();
101 char *hi_reg_name[] = HI_REGISTER_NAMES;
102 char *qi_reg_name[] = QI_REGISTER_NAMES;
103 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
105 /* Array of the smallest class containing reg number REGNO, indexed by
106 REGNO. Used by REGNO_REG_CLASS in i386.h. */
108 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
111 AREG, DREG, CREG, BREG,
113 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
115 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
116 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
121 /* Test and compare insns in i386.md store the information needed to
122 generate branch and scc insns here. */
124 struct rtx_def *i386_compare_op0 = NULL_RTX;
125 struct rtx_def *i386_compare_op1 = NULL_RTX;
126 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
128 /* which cpu are we scheduling for */
129 enum processor_type ix86_cpu;
131 /* which instruction set architecture to use. */
134 /* Strings to hold which cpu and instruction set architecture to use. */
135 char *ix86_cpu_string; /* for -mcpu=<xxx> */
136 char *ix86_arch_string; /* for -march=<xxx> */
138 /* Register allocation order */
139 char *i386_reg_alloc_order;
140 static char regs_allocated[FIRST_PSEUDO_REGISTER];
142 /* # of registers to use to pass arguments. */
143 char *i386_regparm_string; /* # registers to use to pass args */
144 int i386_regparm; /* i386_regparm_string as a number */
146 /* Alignment to use for loops and jumps */
147 char *i386_align_loops_string; /* power of two alignment for loops */
148 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
149 char *i386_align_funcs_string; /* power of two alignment for functions */
150 char *i386_branch_cost_string; /* values 1-5: see jump.c */
152 int i386_align_loops; /* power of two alignment for loops */
153 int i386_align_jumps; /* power of two alignment for non-loop jumps */
154 int i386_align_funcs; /* power of two alignment for functions */
155 int i386_branch_cost; /* values 1-5: see jump.c */
157 /* Sometimes certain combinations of command options do not make
158 sense on a particular target machine. You can define a macro
159 `OVERRIDE_OPTIONS' to take account of this. This macro, if
160 defined, is executed once just after all the command options have
163 Don't use this macro to turn on various extra optimizations for
164 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
175 char *name; /* Canonical processor name. */
176 enum processor_type processor; /* Processor type enum value. */
177 struct processor_costs *cost; /* Processor costs */
178 int target_enable; /* Target flags to enable. */
179 int target_disable; /* Target flags to disable. */
180 } processor_target_table[]
181 = {{PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
182 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
183 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
184 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
185 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
186 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
188 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
190 #ifdef SUBTARGET_OVERRIDE_OPTIONS
191 SUBTARGET_OVERRIDE_OPTIONS;
194 /* Validate registers in register allocation order */
195 if (i386_reg_alloc_order)
197 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
201 case 'a': regno = 0; break;
202 case 'd': regno = 1; break;
203 case 'c': regno = 2; break;
204 case 'b': regno = 3; break;
205 case 'S': regno = 4; break;
206 case 'D': regno = 5; break;
207 case 'B': regno = 6; break;
209 default: fatal ("Register '%c' is unknown", ch);
212 if (regs_allocated[regno])
213 fatal ("Register '%c' was already specified in the allocation order", ch);
215 regs_allocated[regno] = 1;
219 if (ix86_arch_string == (char *)0)
221 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
222 if (ix86_cpu_string == (char *)0)
223 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
226 for (i = 0; i < ptt_size; i++)
227 if (! strcmp (ix86_arch_string, processor_target_table[i].name))
229 ix86_arch = processor_target_table[i].processor;
230 if (ix86_cpu_string == (char *)0)
231 ix86_cpu_string = processor_target_table[i].name;
237 error ("bad value (%s) for -march= switch", ix86_arch_string);
238 ix86_arch_string = PROCESSOR_PENTIUM_STRING;
239 ix86_arch = PROCESSOR_DEFAULT;
242 if (ix86_cpu_string == (char *)0)
243 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
245 for (j = 0; j < ptt_size; j++)
246 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
248 ix86_cpu = processor_target_table[j].processor;
249 if (i > j && (int)ix86_arch >= (int)PROCESSOR_PENTIUMPRO)
250 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_arch_string);
252 target_flags |= processor_target_table[j].target_enable;
253 target_flags &= ~processor_target_table[j].target_disable;
259 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
260 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
261 ix86_cpu = PROCESSOR_DEFAULT;
263 /* Validate -mregparm= value */
264 if (i386_regparm_string)
266 i386_regparm = atoi (i386_regparm_string);
267 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
268 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
271 def_align = (TARGET_386) ? 2 : 4;
273 /* Validate -malign-loops= value, or provide default */
274 if (i386_align_loops_string)
276 i386_align_loops = atoi (i386_align_loops_string);
277 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
278 fatal ("-malign-loops=%d is not between 0 and %d",
279 i386_align_loops, MAX_CODE_ALIGN);
282 i386_align_loops = 2;
284 /* Validate -malign-jumps= value, or provide default */
285 if (i386_align_jumps_string)
287 i386_align_jumps = atoi (i386_align_jumps_string);
288 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
289 fatal ("-malign-jumps=%d is not between 0 and %d",
290 i386_align_jumps, MAX_CODE_ALIGN);
293 i386_align_jumps = def_align;
295 /* Validate -malign-functions= value, or provide default */
296 if (i386_align_funcs_string)
298 i386_align_funcs = atoi (i386_align_funcs_string);
299 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
300 fatal ("-malign-functions=%d is not between 0 and %d",
301 i386_align_funcs, MAX_CODE_ALIGN);
304 i386_align_funcs = def_align;
306 /* Validate -mbranch-cost= value, or provide default */
307 if (i386_branch_cost_string)
309 i386_branch_cost = atoi (i386_branch_cost_string);
310 if (i386_branch_cost < 0 || i386_branch_cost > 5)
311 fatal ("-mbranch-cost=%d is not between 0 and 5",
315 i386_branch_cost = 1;
317 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
318 flag_omit_frame_pointer = 1;
320 /* pic references don't explicitly mention pic_offset_table_rtx */
321 /* code threaded into the prologue may conflict with profiling */
322 if (flag_pic || profile_flag || profile_block_flag)
323 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
326 /* A C statement (sans semicolon) to choose the order in which to
327 allocate hard registers for pseudo-registers local to a basic
330 Store the desired register order in the array `reg_alloc_order'.
331 Element 0 should be the register to allocate first; element 1, the
332 next register; and so on.
334 The macro body should not assume anything about the contents of
335 `reg_alloc_order' before execution of the macro.
337 On most machines, it is not necessary to define this macro. */
340 order_regs_for_local_alloc ()
342 int i, ch, order, regno;
344 /* User specified the register allocation order */
345 if (i386_reg_alloc_order)
347 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
351 case 'a': regno = 0; break;
352 case 'd': regno = 1; break;
353 case 'c': regno = 2; break;
354 case 'b': regno = 3; break;
355 case 'S': regno = 4; break;
356 case 'D': regno = 5; break;
357 case 'B': regno = 6; break;
360 reg_alloc_order[order++] = regno;
363 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
365 if (!regs_allocated[i])
366 reg_alloc_order[order++] = i;
370 /* If users did not specify a register allocation order, use natural order */
373 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
374 reg_alloc_order[i] = i;
380 optimization_options (level)
383 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
384 make the problem with not enough registers even worse */
385 #ifdef INSN_SCHEDULING
387 flag_schedule_insns = 0;
391 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
392 attribute for DECL. The attributes in ATTRIBUTES have previously been
396 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
405 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
406 attribute for TYPE. The attributes in ATTRIBUTES have previously been
410 i386_valid_type_attribute_p (type, attributes, identifier, args)
416 if (TREE_CODE (type) != FUNCTION_TYPE
417 && TREE_CODE (type) != FIELD_DECL
418 && TREE_CODE (type) != TYPE_DECL)
421 /* Stdcall attribute says callee is responsible for popping arguments
422 if they are not variable. */
423 if (is_attribute_p ("stdcall", identifier))
424 return (args == NULL_TREE);
426 /* Cdecl attribute says the callee is a normal C declaration */
427 if (is_attribute_p ("cdecl", identifier))
428 return (args == NULL_TREE);
430 /* Regparm attribute specifies how many integer arguments are to be
431 passed in registers */
432 if (is_attribute_p ("regparm", identifier))
436 if (!args || TREE_CODE (args) != TREE_LIST
437 || TREE_CHAIN (args) != NULL_TREE
438 || TREE_VALUE (args) == NULL_TREE)
441 cst = TREE_VALUE (args);
442 if (TREE_CODE (cst) != INTEGER_CST)
445 if (TREE_INT_CST_HIGH (cst) != 0
446 || TREE_INT_CST_LOW (cst) < 0
447 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
456 /* Return 0 if the attributes for two types are incompatible, 1 if they
457 are compatible, and 2 if they are nearly compatible (which causes a
458 warning to be generated). */
461 i386_comp_type_attributes (type1, type2)
469 /* Value is the number of bytes of arguments automatically
470 popped when returning from a subroutine call.
471 FUNDECL is the declaration node of the function (as a tree),
472 FUNTYPE is the data type of the function (as a tree),
473 or for a library call it is an identifier node for the subroutine name.
474 SIZE is the number of bytes of arguments passed on the stack.
476 On the 80386, the RTD insn may be used to pop them if the number
477 of args is fixed, but if the number is variable then the caller
478 must pop them all. RTD can't be used for library calls now
479 because the library is compiled with the Unix compiler.
480 Use of RTD is a selectable option, since it is incompatible with
481 standard Unix calling sequences. If the option is not selected,
482 the caller must always pop the args.
484 The attribute stdcall is equivalent to RTD on a per module basis. */
487 i386_return_pops_args (fundecl, funtype, size)
492 int rtd = TARGET_RTD && TREE_CODE (fundecl) != IDENTIFIER_NODE;
494 /* Cdecl functions override -mrtd, and never pop the stack */
495 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
497 /* Stdcall functions will pop the stack if not variable args */
498 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
502 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
503 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
507 /* Lose any fake structure return argument */
508 if (aggregate_value_p (TREE_TYPE (funtype)))
509 return GET_MODE_SIZE (Pmode);
515 /* Argument support functions. */
517 /* Initialize a variable CUM of type CUMULATIVE_ARGS
518 for a call to a function whose data type is FNTYPE.
519 For a library call, FNTYPE is 0. */
522 init_cumulative_args (cum, fntype, libname)
523 CUMULATIVE_ARGS *cum; /* argument info to initialize */
524 tree fntype; /* tree ptr for function decl */
525 rtx libname; /* SYMBOL_REF of library name or 0 */
527 static CUMULATIVE_ARGS zero_cum;
528 tree param, next_param;
530 if (TARGET_DEBUG_ARG)
532 fprintf (stderr, "\ninit_cumulative_args (");
535 tree ret_type = TREE_TYPE (fntype);
536 fprintf (stderr, "fntype code = %s, ret code = %s",
537 tree_code_name[ (int)TREE_CODE (fntype) ],
538 tree_code_name[ (int)TREE_CODE (ret_type) ]);
541 fprintf (stderr, "no fntype");
544 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
549 /* Set up the number of registers to use for passing arguments. */
550 cum->nregs = i386_regparm;
553 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
555 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
558 /* Determine if this function has variable arguments. This is
559 indicated by the last argument being 'void_type_mode' if there
560 are no variable arguments. If there are variable arguments, then
561 we won't pass anything in registers */
565 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
569 next_param = TREE_CHAIN (param);
570 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
575 if (TARGET_DEBUG_ARG)
576 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
581 /* Update the data in CUM to advance over an argument
582 of mode MODE and data type TYPE.
583 (TYPE is null for libcalls where that information may not be available.) */
586 function_arg_advance (cum, mode, type, named)
587 CUMULATIVE_ARGS *cum; /* current arg information */
588 enum machine_mode mode; /* current arg mode */
589 tree type; /* type of the argument or 0 if lib support */
590 int named; /* whether or not the argument was named */
592 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
593 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
595 if (TARGET_DEBUG_ARG)
597 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
598 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
613 /* Define where to put the arguments to a function.
614 Value is zero to push the argument on the stack,
615 or a hard register in which to store the argument.
617 MODE is the argument's machine mode.
618 TYPE is the data type of the argument (as a tree).
619 This is null for libcalls where that information may
621 CUM is a variable of type CUMULATIVE_ARGS which gives info about
622 the preceding args and about the function being called.
623 NAMED is nonzero if this argument is a named parameter
624 (otherwise it is an extra parameter matching an ellipsis). */
627 function_arg (cum, mode, type, named)
628 CUMULATIVE_ARGS *cum; /* current arg information */
629 enum machine_mode mode; /* current arg mode */
630 tree type; /* type of the argument or 0 if lib support */
631 int named; /* != 0 for normal args, == 0 for ... args */
634 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
635 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
639 default: /* for now, pass fp/complex values on the stack */
647 if (words <= cum->nregs)
648 ret = gen_rtx (REG, mode, cum->regno);
652 if (TARGET_DEBUG_ARG)
655 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
656 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
659 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
661 fprintf (stderr, ", stack");
663 fprintf (stderr, " )\n");
669 /* For an arg passed partly in registers and partly in memory,
670 this is the number of registers used.
671 For args passed entirely in registers or entirely in memory, zero. */
674 function_arg_partial_nregs (cum, mode, type, named)
675 CUMULATIVE_ARGS *cum; /* current arg information */
676 enum machine_mode mode; /* current arg mode */
677 tree type; /* type of the argument or 0 if lib support */
678 int named; /* != 0 for normal args, == 0 for ... args */
684 /* Output an insn whose source is a 386 integer register. SRC is the
685 rtx for the register, and TEMPLATE is the op-code template. SRC may
686 be either SImode or DImode.
688 The template will be output with operands[0] as SRC, and operands[1]
689 as a pointer to the top of the 386 stack. So a call from floatsidf2
690 would look like this:
692 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
694 where %z0 corresponds to the caller's operands[1], and is used to
695 emit the proper size suffix.
697 ??? Extend this to handle HImode - a 387 can load and store HImode
701 output_op_from_reg (src, template)
706 int size = GET_MODE_SIZE (GET_MODE (src));
709 xops[1] = AT_SP (Pmode);
710 xops[2] = GEN_INT (size);
711 xops[3] = stack_pointer_rtx;
713 if (size > UNITS_PER_WORD)
716 if (size > 2 * UNITS_PER_WORD)
718 high = gen_rtx (REG, SImode, REGNO (src) + 2);
719 output_asm_insn (AS1 (push%L0,%0), &high);
721 high = gen_rtx (REG, SImode, REGNO (src) + 1);
722 output_asm_insn (AS1 (push%L0,%0), &high);
724 output_asm_insn (AS1 (push%L0,%0), &src);
726 output_asm_insn (template, xops);
728 output_asm_insn (AS2 (add%L3,%2,%3), xops);
731 /* Output an insn to pop an value from the 387 top-of-stack to 386
732 register DEST. The 387 register stack is popped if DIES is true. If
733 the mode of DEST is an integer mode, a `fist' integer store is done,
734 otherwise a `fst' float store is done. */
737 output_to_reg (dest, dies)
742 int size = GET_MODE_SIZE (GET_MODE (dest));
744 xops[0] = AT_SP (Pmode);
745 xops[1] = stack_pointer_rtx;
746 xops[2] = GEN_INT (size);
749 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
751 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
754 output_asm_insn (AS1 (fistp%z3,%y0), xops);
756 output_asm_insn (AS1 (fist%z3,%y0), xops);
758 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
761 output_asm_insn (AS1 (fstp%z3,%y0), xops);
764 if (GET_MODE (dest) == XFmode)
766 output_asm_insn (AS1 (fstp%z3,%y0), xops);
767 output_asm_insn (AS1 (fld%z3,%y0), xops);
770 output_asm_insn (AS1 (fst%z3,%y0), xops);
776 output_asm_insn (AS1 (pop%L0,%0), &dest);
778 if (size > UNITS_PER_WORD)
780 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
781 output_asm_insn (AS1 (pop%L0,%0), &dest);
782 if (size > 2 * UNITS_PER_WORD)
784 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
785 output_asm_insn (AS1 (pop%L0,%0), &dest);
791 singlemove_string (operands)
795 if (GET_CODE (operands[0]) == MEM
796 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
798 if (XEXP (x, 0) != stack_pointer_rtx)
802 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
804 return output_move_const_single (operands);
806 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
807 return AS2 (mov%L0,%1,%0);
808 else if (CONSTANT_P (operands[1]))
809 return AS2 (mov%L0,%1,%0);
812 output_asm_insn ("push%L1 %1", operands);
817 /* Return a REG that occurs in ADDR with coefficient 1.
818 ADDR can be effectively incremented by incrementing REG. */
824 while (GET_CODE (addr) == PLUS)
826 if (GET_CODE (XEXP (addr, 0)) == REG)
827 addr = XEXP (addr, 0);
828 else if (GET_CODE (XEXP (addr, 1)) == REG)
829 addr = XEXP (addr, 1);
830 else if (CONSTANT_P (XEXP (addr, 0)))
831 addr = XEXP (addr, 1);
832 else if (CONSTANT_P (XEXP (addr, 1)))
833 addr = XEXP (addr, 0);
837 if (GET_CODE (addr) == REG)
843 /* Output an insn to add the constant N to the register X. */
854 output_asm_insn (AS1 (dec%L0,%0), xops);
856 output_asm_insn (AS1 (inc%L0,%0), xops);
859 xops[1] = GEN_INT (-n);
860 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
864 xops[1] = GEN_INT (n);
865 output_asm_insn (AS2 (add%L0,%1,%0), xops);
870 /* Output assembler code to perform a doubleword move insn
871 with operands OPERANDS. */
874 output_move_double (operands)
877 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
881 rtx addreg0 = 0, addreg1 = 0;
882 int dest_overlapped_low = 0;
883 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
888 /* First classify both operands. */
890 if (REG_P (operands[0]))
892 else if (offsettable_memref_p (operands[0]))
894 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
896 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
898 else if (GET_CODE (operands[0]) == MEM)
903 if (REG_P (operands[1]))
905 else if (CONSTANT_P (operands[1]))
907 else if (offsettable_memref_p (operands[1]))
909 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
911 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
913 else if (GET_CODE (operands[1]) == MEM)
918 /* Check for the cases that the operand constraints are not
919 supposed to allow to happen. Abort if we get one,
920 because generating code for these cases is painful. */
922 if (optype0 == RNDOP || optype1 == RNDOP)
925 /* If one operand is decrementing and one is incrementing
926 decrement the former register explicitly
927 and change that operand into ordinary indexing. */
929 if (optype0 == PUSHOP && optype1 == POPOP)
931 /* ??? Can this ever happen on i386? */
932 operands[0] = XEXP (XEXP (operands[0], 0), 0);
933 asm_add (-size, operands[0]);
934 if (GET_MODE (operands[1]) == XFmode)
935 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
936 else if (GET_MODE (operands[0]) == DFmode)
937 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
939 operands[0] = gen_rtx (MEM, DImode, operands[0]);
943 if (optype0 == POPOP && optype1 == PUSHOP)
945 /* ??? Can this ever happen on i386? */
946 operands[1] = XEXP (XEXP (operands[1], 0), 0);
947 asm_add (-size, operands[1]);
948 if (GET_MODE (operands[1]) == XFmode)
949 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
950 else if (GET_MODE (operands[1]) == DFmode)
951 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
953 operands[1] = gen_rtx (MEM, DImode, operands[1]);
957 /* If an operand is an unoffsettable memory ref, find a register
958 we can increment temporarily to make it refer to the second word. */
960 if (optype0 == MEMOP)
961 addreg0 = find_addr_reg (XEXP (operands[0], 0));
963 if (optype1 == MEMOP)
964 addreg1 = find_addr_reg (XEXP (operands[1], 0));
966 /* Ok, we can do one word at a time.
967 Normally we do the low-numbered word first,
968 but if either operand is autodecrementing then we
969 do the high-numbered word first.
971 In either case, set up in LATEHALF the operands to use
972 for the high-numbered word and in some cases alter the
973 operands in OPERANDS to be suitable for the low-numbered word. */
977 if (optype0 == REGOP)
979 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
980 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
982 else if (optype0 == OFFSOP)
984 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
985 latehalf[0] = adj_offsettable_operand (operands[0], 8);
989 middlehalf[0] = operands[0];
990 latehalf[0] = operands[0];
993 if (optype1 == REGOP)
995 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
996 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
998 else if (optype1 == OFFSOP)
1000 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
1001 latehalf[1] = adj_offsettable_operand (operands[1], 8);
1003 else if (optype1 == CNSTOP)
1005 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1007 REAL_VALUE_TYPE r; long l[3];
1009 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1010 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
1011 operands[1] = GEN_INT (l[0]);
1012 middlehalf[1] = GEN_INT (l[1]);
1013 latehalf[1] = GEN_INT (l[2]);
1015 else if (CONSTANT_P (operands[1]))
1016 /* No non-CONST_DOUBLE constant should ever appear here. */
1021 middlehalf[1] = operands[1];
1022 latehalf[1] = operands[1];
1025 else /* size is not 12: */
1027 if (optype0 == REGOP)
1028 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1029 else if (optype0 == OFFSOP)
1030 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1032 latehalf[0] = operands[0];
1034 if (optype1 == REGOP)
1035 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1036 else if (optype1 == OFFSOP)
1037 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1038 else if (optype1 == CNSTOP)
1039 split_double (operands[1], &operands[1], &latehalf[1]);
1041 latehalf[1] = operands[1];
1044 /* If insn is effectively movd N (sp),-(sp) then we will do the
1045 high word first. We should use the adjusted operand 1
1046 (which is N+4 (sp) or N+8 (sp))
1047 for the low word and middle word as well,
1048 to compensate for the first decrement of sp. */
1049 if (optype0 == PUSHOP
1050 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1051 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1052 middlehalf[1] = operands[1] = latehalf[1];
1054 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1055 if the upper part of reg N does not appear in the MEM, arrange to
1056 emit the move late-half first. Otherwise, compute the MEM address
1057 into the upper part of N and use that as a pointer to the memory
1059 if (optype0 == REGOP
1060 && (optype1 == OFFSOP || optype1 == MEMOP))
1062 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1063 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1065 /* If both halves of dest are used in the src memory address,
1066 compute the address into latehalf of dest. */
1068 xops[0] = latehalf[0];
1069 xops[1] = XEXP (operands[1], 0);
1070 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1071 if( GET_MODE (operands[1]) == XFmode )
1074 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1075 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1076 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1080 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1081 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1085 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1087 /* Check for two regs used by both source and dest. */
1088 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1089 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1092 /* JRV says this can't happen: */
1093 if (addreg0 || addreg1)
1096 /* Only the middle reg conflicts; simply put it last. */
1097 output_asm_insn (singlemove_string (operands), operands);
1098 output_asm_insn (singlemove_string (latehalf), latehalf);
1099 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1102 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1103 /* If the low half of dest is mentioned in the source memory
1104 address, the arrange to emit the move late half first. */
1105 dest_overlapped_low = 1;
1108 /* If one or both operands autodecrementing,
1109 do the two words, high-numbered first. */
1111 /* Likewise, the first move would clobber the source of the second one,
1112 do them in the other order. This happens only for registers;
1113 such overlap can't happen in memory unless the user explicitly
1114 sets it up, and that is an undefined circumstance. */
1117 if (optype0 == PUSHOP || optype1 == PUSHOP
1118 || (optype0 == REGOP && optype1 == REGOP
1119 && REGNO (operands[0]) == REGNO (latehalf[1]))
1120 || dest_overlapped_low)
1122 if (optype0 == PUSHOP || optype1 == PUSHOP
1123 || (optype0 == REGOP && optype1 == REGOP
1124 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1125 || REGNO (operands[0]) == REGNO (latehalf[1])))
1126 || dest_overlapped_low)
1128 /* Make any unoffsettable addresses point at high-numbered word. */
1130 asm_add (size-4, addreg0);
1132 asm_add (size-4, addreg1);
1135 output_asm_insn (singlemove_string (latehalf), latehalf);
1137 /* Undo the adds we just did. */
1139 asm_add (-4, addreg0);
1141 asm_add (-4, addreg1);
1145 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1147 asm_add (-4, addreg0);
1149 asm_add (-4, addreg1);
1152 /* Do low-numbered word. */
1153 return singlemove_string (operands);
1156 /* Normal case: do the two words, low-numbered first. */
1158 output_asm_insn (singlemove_string (operands), operands);
1160 /* Do the middle one of the three words for long double */
1164 asm_add (4, addreg0);
1166 asm_add (4, addreg1);
1168 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1171 /* Make any unoffsettable addresses point at high-numbered word. */
1173 asm_add (4, addreg0);
1175 asm_add (4, addreg1);
1178 output_asm_insn (singlemove_string (latehalf), latehalf);
1180 /* Undo the adds we just did. */
1182 asm_add (4-size, addreg0);
1184 asm_add (4-size, addreg1);
1190 #define MAX_TMPS 2 /* max temporary registers used */
1192 /* Output the appropriate code to move push memory on the stack */
1195 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1207 } tmp_info[MAX_TMPS];
1209 rtx src = operands[1];
1212 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1213 int stack_offset = 0;
1217 if (!offsettable_memref_p (src))
1218 fatal_insn ("Source is not offsettable", insn);
1220 if ((length & 3) != 0)
1221 fatal_insn ("Pushing non-word aligned size", insn);
1223 /* Figure out which temporary registers we have available */
1224 for (i = tmp_start; i < n_operands; i++)
1226 if (GET_CODE (operands[i]) == REG)
1228 if (reg_overlap_mentioned_p (operands[i], src))
1231 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1232 if (max_tmps == MAX_TMPS)
1238 for (offset = length - 4; offset >= 0; offset -= 4)
1240 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1241 output_asm_insn (AS1(push%L0,%0), xops);
1247 for (offset = length - 4; offset >= 0; )
1249 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1251 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1252 tmp_info[num_tmps].push = AS1(push%L0,%1);
1253 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1257 for (i = 0; i < num_tmps; i++)
1258 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1260 for (i = 0; i < num_tmps; i++)
1261 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1264 stack_offset += 4*num_tmps;
1272 /* Output the appropriate code to move data between two memory locations */
1275 output_move_memory (operands, insn, length, tmp_start, n_operands)
1286 } tmp_info[MAX_TMPS];
1288 rtx dest = operands[0];
1289 rtx src = operands[1];
1290 rtx qi_tmp = NULL_RTX;
1296 if (GET_CODE (dest) == MEM
1297 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1298 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1299 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1301 if (!offsettable_memref_p (src))
1302 fatal_insn ("Source is not offsettable", insn);
1304 if (!offsettable_memref_p (dest))
1305 fatal_insn ("Destination is not offsettable", insn);
1307 /* Figure out which temporary registers we have available */
1308 for (i = tmp_start; i < n_operands; i++)
1310 if (GET_CODE (operands[i]) == REG)
1312 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1313 qi_tmp = operands[i];
1315 if (reg_overlap_mentioned_p (operands[i], dest))
1316 fatal_insn ("Temporary register overlaps the destination", insn);
1318 if (reg_overlap_mentioned_p (operands[i], src))
1319 fatal_insn ("Temporary register overlaps the source", insn);
1321 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1322 if (max_tmps == MAX_TMPS)
1328 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1330 if ((length & 1) != 0)
1333 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1338 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1342 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1343 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1344 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1345 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1349 else if (length >= 2)
1351 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1352 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1353 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1354 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1362 for (i = 0; i < num_tmps; i++)
1363 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1365 for (i = 0; i < num_tmps; i++)
1366 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1371 xops[0] = adj_offsettable_operand (dest, offset);
1372 xops[1] = adj_offsettable_operand (src, offset);
1374 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1375 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1383 standard_80387_constant_p (x)
1386 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1391 if (setjmp (handler))
1394 set_float_handler (handler);
1395 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1396 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1397 is1 = REAL_VALUES_EQUAL (d, dconst1);
1398 set_float_handler (NULL_PTR);
1406 /* Note that on the 80387, other constants, such as pi,
1407 are much slower to load as standard constants
1408 than to load from doubles in memory! */
1415 output_move_const_single (operands)
1418 if (FP_REG_P (operands[0]))
1420 int conval = standard_80387_constant_p (operands[1]);
1428 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1430 REAL_VALUE_TYPE r; long l;
1432 if (GET_MODE (operands[1]) == XFmode)
1435 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1436 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1437 operands[1] = GEN_INT (l);
1439 return singlemove_string (operands);
1442 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1443 reference and a constant. */
1446 symbolic_operand (op, mode)
1448 enum machine_mode mode;
1450 switch (GET_CODE (op))
1457 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1458 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1459 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1465 /* Test for a valid operand for a call instruction.
1466 Don't allow the arg pointer register or virtual regs
1467 since they may change into reg + const, which the patterns
1468 can't handle yet. */
1471 call_insn_operand (op, mode)
1473 enum machine_mode mode;
1475 if (GET_CODE (op) == MEM
1476 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1477 /* This makes a difference for PIC. */
1478 && general_operand (XEXP (op, 0), Pmode))
1479 || (GET_CODE (XEXP (op, 0)) == REG
1480 && XEXP (op, 0) != arg_pointer_rtx
1481 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1482 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1487 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1491 expander_call_insn_operand (op, mode)
1493 enum machine_mode mode;
1495 if (GET_CODE (op) == MEM
1496 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1497 || (GET_CODE (XEXP (op, 0)) == REG
1498 && XEXP (op, 0) != arg_pointer_rtx
1499 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1500 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1505 /* Return 1 if OP is a comparison operator that can use the condition code
1506 generated by an arithmetic operation. */
1509 arithmetic_comparison_operator (op, mode)
1511 enum machine_mode mode;
1515 if (mode != VOIDmode && mode != GET_MODE (op))
1517 code = GET_CODE (op);
1518 if (GET_RTX_CLASS (code) != '<')
1521 return (code != GT && code != LE);
1524 /* Returns 1 if OP contains a symbol reference */
1527 symbolic_reference_mentioned_p (op)
1533 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1536 fmt = GET_RTX_FORMAT (GET_CODE (op));
1537 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1543 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1544 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1547 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1554 /* Attempt to expand a binary operator. Make the expansion closer to the
1555 actual machine, then just general_operand, which will allow 3 separate
1556 memory references (one output, two input) in a single insn. Return
1557 whether the insn fails, or succeeds. */
1560 ix86_expand_binary_operator (code, mode, operands)
1562 enum machine_mode mode;
1569 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1570 if (GET_RTX_CLASS (code) == 'c'
1571 && (rtx_equal_p (operands[0], operands[2])
1572 || immediate_operand (operands[1], mode)))
1574 rtx temp = operands[1];
1575 operands[1] = operands[2];
1579 /* If optimizing, copy to regs to improve CSE */
1580 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1582 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1583 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1585 if (GET_CODE (operands[2]) == MEM)
1586 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1588 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1590 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1591 emit_move_insn (temp, operands[1]);
1597 if (!ix86_binary_operator_ok (code, mode, operands))
1599 /* If not optimizing, try to make a valid insn (optimize code previously did
1600 this above to improve chances of CSE) */
1602 if ((!TARGET_PSEUDO || !optimize)
1603 && ((reload_in_progress | reload_completed) == 0)
1604 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1607 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1609 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1613 if (GET_CODE (operands[2]) == MEM)
1615 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1619 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1621 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1622 emit_move_insn (temp, operands[1]);
1627 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1637 /* Return TRUE or FALSE depending on whether the binary operator meets the
1638 appropriate constraints. */
1641 ix86_binary_operator_ok (code, mode, operands)
1643 enum machine_mode mode;
1646 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1647 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1650 /* Attempt to expand a unary operator. Make the expansion closer to the
1651 actual machine, then just general_operand, which will allow 2 separate
1652 memory references (one output, one input) in a single insn. Return
1653 whether the insn fails, or succeeds. */
1656 ix86_expand_unary_operator (code, mode, operands)
1658 enum machine_mode mode;
1663 /* If optimizing, copy to regs to improve CSE */
1666 && ((reload_in_progress | reload_completed) == 0)
1667 && GET_CODE (operands[1]) == MEM)
1669 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1672 if (!ix86_unary_operator_ok (code, mode, operands))
1674 if ((!TARGET_PSEUDO || !optimize)
1675 && ((reload_in_progress | reload_completed) == 0)
1676 && GET_CODE (operands[1]) == MEM)
1678 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1679 if (!ix86_unary_operator_ok (code, mode, operands))
1689 /* Return TRUE or FALSE depending on whether the unary operator meets the
1690 appropriate constraints. */
1693 ix86_unary_operator_ok (code, mode, operands)
1695 enum machine_mode mode;
1703 static rtx pic_label_rtx;
1704 static char pic_label_name [256];
1705 static int pic_label_no = 0;
1707 /* This function generates code for -fpic that loads %ebx with
1708 with the return address of the caller and then returns. */
1710 asm_output_function_prefix (file, name)
1715 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1716 || current_function_uses_const_pool);
1717 xops[0] = pic_offset_table_rtx;
1718 xops[1] = stack_pointer_rtx;
1720 /* deep branch prediction favors having a return for every call */
1721 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1725 if (pic_label_rtx == 0)
1727 pic_label_rtx = (rtx) gen_label_rtx ();
1728 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1729 LABEL_NAME (pic_label_rtx) = pic_label_name;
1731 prologue_node = make_node (FUNCTION_DECL);
1732 DECL_RESULT (prologue_node) = 0;
1733 #ifdef ASM_DECLARE_FUNCTION_NAME
1734 ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);
1736 output_asm_insn ("movl (%1),%0", xops);
1737 output_asm_insn ("ret", xops);
1741 /* Set up the stack and frame (if desired) for the function. */
1744 function_prologue (file, size)
1751 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1752 || current_function_uses_const_pool);
1753 long tsize = get_frame_size ();
1755 /* pic references don't explicitly mention pic_offset_table_rtx */
1756 if (TARGET_SCHEDULE_PROLOGUE)
1762 xops[0] = stack_pointer_rtx;
1763 xops[1] = frame_pointer_rtx;
1764 xops[2] = GEN_INT (tsize);
1766 if (frame_pointer_needed)
1768 output_asm_insn ("push%L1 %1", xops);
1769 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1774 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1775 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1778 xops[3] = gen_rtx (REG, SImode, 0);
1779 output_asm_insn (AS2 (mov%L0,%2,%3), xops);
1781 xops[3] = gen_rtx (SYMBOL_REF, Pmode, "_alloca");
1782 output_asm_insn (AS1 (call,%P3), xops);
1785 /* Note If use enter it is NOT reversed args.
1786 This one is not reversed from intel!!
1787 I think enter is slower. Also sdb doesn't like it.
1788 But if you want it the code is:
1790 xops[3] = const0_rtx;
1791 output_asm_insn ("enter %2,%3", xops);
1794 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1795 for (regno = limit - 1; regno >= 0; regno--)
1796 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1797 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1799 xops[0] = gen_rtx (REG, SImode, regno);
1800 output_asm_insn ("push%L0 %0", xops);
1803 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1805 xops[0] = pic_offset_table_rtx;
1806 xops[1] = gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx));
1808 output_asm_insn (AS1 (call,%P1), xops);
1809 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1812 else if (pic_reg_used)
1814 xops[0] = pic_offset_table_rtx;
1815 xops[1] = (rtx) gen_label_rtx ();
1817 output_asm_insn (AS1 (call,%P1), xops);
1818 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1819 output_asm_insn (AS1 (pop%L0,%0), xops);
1820 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1824 /* This function generates the assembly code for function entry.
1825 FILE is an stdio stream to output the code to.
1826 SIZE is an int: how many units of temporary storage to allocate. */
1829 ix86_expand_prologue ()
1834 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1835 || current_function_uses_const_pool);
1836 long tsize = get_frame_size ();
1839 if (!TARGET_SCHEDULE_PROLOGUE)
1842 xops[0] = stack_pointer_rtx;
1843 xops[1] = frame_pointer_rtx;
1844 xops[2] = GEN_INT (tsize);
1845 if (frame_pointer_needed)
1849 gen_rtx (MEM, SImode,
1850 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1851 frame_pointer_rtx));
1852 RTX_FRAME_RELATED_P (insn) = 1;
1853 insn = emit_move_insn (xops[1], xops[0]);
1854 RTX_FRAME_RELATED_P (insn) = 1;
1859 else if (! TARGET_STACK_PROBE || tsize < CHECK_STACK_LIMIT)
1861 insn = emit_insn (gen_subsi3 (xops[0], xops[0], xops[2]));
1862 RTX_FRAME_RELATED_P (insn) = 1;
1866 xops[3] = gen_rtx (REG, SImode, 0);
1867 emit_move_insn (xops[3], xops[2]);
1868 xops[3] = gen_rtx (MEM, FUNCTION_MODE,
1869 gen_rtx (SYMBOL_REF, Pmode, "_alloca"));
1870 emit_call_insn (gen_rtx (CALL, VOIDmode,
1871 xops[3], const0_rtx));
1874 /* Note If use enter it is NOT reversed args.
1875 This one is not reversed from intel!!
1876 I think enter is slower. Also sdb doesn't like it.
1877 But if you want it the code is:
1879 xops[3] = const0_rtx;
1880 output_asm_insn ("enter %2,%3", xops);
1883 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1884 for (regno = limit - 1; regno >= 0; regno--)
1885 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1886 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1888 xops[0] = gen_rtx (REG, SImode, regno);
1891 gen_rtx (MEM, SImode,
1892 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1895 RTX_FRAME_RELATED_P (insn) = 1;
1898 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1900 xops[0] = pic_offset_table_rtx;
1901 if (pic_label_rtx == 0)
1903 pic_label_rtx = (rtx) gen_label_rtx ();
1904 sprintf (pic_label_name, "LPR%d", pic_label_no++);
1905 LABEL_NAME (pic_label_rtx) = pic_label_name;
1907 xops[1] = gen_rtx (MEM, QImode, gen_rtx (SYMBOL_REF, Pmode, LABEL_NAME (pic_label_rtx)));
1909 emit_insn (gen_prologue_get_pc (xops[0], xops[1]));
1910 emit_insn (gen_prologue_set_got (xops[0],
1911 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1912 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1914 else if (pic_reg_used)
1916 xops[0] = pic_offset_table_rtx;
1917 xops[1] = (rtx) gen_label_rtx ();
1919 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1920 emit_insn (gen_pop (xops[0]));
1921 emit_insn (gen_prologue_set_got (xops[0],
1922 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1923 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1927 /* Restore function stack, frame, and registers. */
1930 function_epilogue (file, size)
1936 /* Return 1 if it is appropriate to emit `ret' instructions in the
1937 body of a function. Do this only if the epilogue is simple, needing a
1938 couple of insns. Prior to reloading, we can't tell how many registers
1939 must be saved, so return 0 then. Return 0 if there is no frame
1940 marker to de-allocate.
1942 If NON_SAVING_SETJMP is defined and true, then it is not possible
1943 for the epilogue to be simple, so return 0. This is a special case
1944 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1945 until final, but jump_optimize may need to know sooner if a
1949 ix86_can_use_return_insn_p ()
1953 int reglimit = (frame_pointer_needed
1954 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1955 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1956 || current_function_uses_const_pool);
1958 #ifdef NON_SAVING_SETJMP
1959 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1963 if (! reload_completed)
1966 for (regno = reglimit - 1; regno >= 0; regno--)
1967 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1968 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1971 return nregs == 0 || ! frame_pointer_needed;
1975 /* This function generates the assembly code for function exit.
1976 FILE is an stdio stream to output the code to.
1977 SIZE is an int: how many units of temporary storage to deallocate. */
1980 ix86_expand_epilogue ()
1983 register int nregs, limit;
1986 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1987 || current_function_uses_const_pool);
1988 long tsize = get_frame_size ();
1990 /* Compute the number of registers to pop */
1992 limit = (frame_pointer_needed
1993 ? FRAME_POINTER_REGNUM
1994 : STACK_POINTER_REGNUM);
1998 for (regno = limit - 1; regno >= 0; regno--)
1999 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2000 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2003 /* sp is often unreliable so we must go off the frame pointer,
2006 /* In reality, we may not care if sp is unreliable, because we can
2007 restore the register relative to the frame pointer. In theory,
2008 since each move is the same speed as a pop, and we don't need the
2009 leal, this is faster. For now restore multiple registers the old
2012 offset = -tsize - (nregs * UNITS_PER_WORD);
2014 xops[2] = stack_pointer_rtx;
2016 if (nregs > 1 || ! frame_pointer_needed)
2018 if (frame_pointer_needed)
2020 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
2021 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
2022 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
2025 for (regno = 0; regno < limit; regno++)
2026 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2027 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2029 xops[0] = gen_rtx (REG, SImode, regno);
2030 emit_insn (gen_pop (xops[0]));
2031 /* output_asm_insn ("pop%L0 %0", xops);*/
2035 for (regno = 0; regno < limit; regno++)
2036 if ((regs_ever_live[regno] && ! call_used_regs[regno])
2037 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
2039 xops[0] = gen_rtx (REG, SImode, regno);
2040 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
2041 emit_move_insn (xops[0], xops[1]);
2042 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
2046 if (frame_pointer_needed)
2048 /* If not an i386, mov & pop is faster than "leave". */
2050 if (TARGET_USE_LEAVE)
2051 emit_insn (gen_leave());
2052 /* output_asm_insn ("leave", xops);*/
2055 xops[0] = frame_pointer_rtx;
2056 xops[1] = stack_pointer_rtx;
2057 emit_insn (gen_epilogue_set_stack_ptr());
2058 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
2059 emit_insn (gen_pop (xops[0]));
2060 /* output_asm_insn ("pop%L0 %0", xops);*/
2065 /* If there is no frame pointer, we must still release the frame. */
2067 xops[0] = GEN_INT (tsize);
2068 emit_insn (gen_rtx (SET, SImode,
2070 gen_rtx (PLUS, SImode,
2073 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2076 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2077 if (profile_block_flag == 2)
2079 FUNCTION_BLOCK_PROFILER_EXIT(file);
2083 if (current_function_pops_args && current_function_args_size)
2085 xops[1] = GEN_INT (current_function_pops_args);
2087 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2088 asked to pop more, pop return address, do explicit add, and jump
2089 indirectly to the caller. */
2091 if (current_function_pops_args >= 32768)
2093 /* ??? Which register to use here? */
2094 xops[0] = gen_rtx (REG, SImode, 2);
2095 emit_insn (gen_pop (xops[0]));
2096 /* output_asm_insn ("pop%L0 %0", xops);*/
2097 emit_insn (gen_rtx (SET, SImode,
2099 gen_rtx (PLUS, SImode,
2102 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2103 emit_jump_insn (xops[0]);
2104 /* output_asm_insn ("jmp %*%0", xops);*/
2107 emit_jump_insn (gen_return_pop_internal (xops[1]));
2108 /* output_asm_insn ("ret %1", xops);*/
2111 /* output_asm_insn ("ret", xops);*/
2112 emit_jump_insn (gen_return_internal ());
2116 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2117 that is a valid memory address for an instruction.
2118 The MODE argument is the machine mode for the MEM expression
2119 that wants to use this address.
2121 On x86, legitimate addresses are:
2122 base movl (base),reg
2123 displacement movl disp,reg
2124 base + displacement movl disp(base),reg
2125 index + base movl (base,index),reg
2126 (index + base) + displacement movl disp(base,index),reg
2127 index*scale movl (,index,scale),reg
2128 index*scale + disp movl disp(,index,scale),reg
2129 index*scale + base movl (base,index,scale),reg
2130 (index*scale + base) + disp movl disp(base,index,scale),reg
2132 In each case, scale can be 1, 2, 4, 8. */
2134 /* This is exactly the same as print_operand_addr, except that
2135 it recognizes addresses instead of printing them.
2137 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2138 convert common non-canonical forms to canonical form so that they will
2141 #define ADDR_INVALID(msg,insn) \
2143 if (TARGET_DEBUG_ADDR) \
2145 fprintf (stderr, msg); \
2151 legitimate_address_p (mode, addr, strict)
2152 enum machine_mode mode;
2156 rtx base = NULL_RTX;
2157 rtx indx = NULL_RTX;
2158 rtx scale = NULL_RTX;
2159 rtx disp = NULL_RTX;
2161 if (TARGET_DEBUG_ADDR)
2164 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2165 GET_MODE_NAME (mode), strict);
2170 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2171 base = addr; /* base reg */
2173 else if (GET_CODE (addr) == PLUS)
2175 rtx op0 = XEXP (addr, 0);
2176 rtx op1 = XEXP (addr, 1);
2177 enum rtx_code code0 = GET_CODE (op0);
2178 enum rtx_code code1 = GET_CODE (op1);
2180 if (code0 == REG || code0 == SUBREG)
2182 if (code1 == REG || code1 == SUBREG)
2184 indx = op0; /* index + base */
2190 base = op0; /* base + displacement */
2195 else if (code0 == MULT)
2197 indx = XEXP (op0, 0);
2198 scale = XEXP (op0, 1);
2200 if (code1 == REG || code1 == SUBREG)
2201 base = op1; /* index*scale + base */
2204 disp = op1; /* index*scale + disp */
2207 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2209 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2210 scale = XEXP (XEXP (op0, 0), 1);
2211 base = XEXP (op0, 1);
2215 else if (code0 == PLUS)
2217 indx = XEXP (op0, 0); /* index + base + disp */
2218 base = XEXP (op0, 1);
2224 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2229 else if (GET_CODE (addr) == MULT)
2231 indx = XEXP (addr, 0); /* index*scale */
2232 scale = XEXP (addr, 1);
2236 disp = addr; /* displacement */
2238 /* Allow arg pointer and stack pointer as index if there is not scaling */
2239 if (base && indx && !scale
2240 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2247 /* Validate base register */
2248 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2249 is one word out of a two word structure, which is represented internally
2253 if (GET_CODE (base) != REG)
2255 ADDR_INVALID ("Base is not a register.\n", base);
2259 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2260 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2262 ADDR_INVALID ("Base is not valid.\n", base);
2267 /* Validate index register */
2268 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2269 is one word out of a two word structure, which is represented internally
2273 if (GET_CODE (indx) != REG)
2275 ADDR_INVALID ("Index is not a register.\n", indx);
2279 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2280 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2282 ADDR_INVALID ("Index is not valid.\n", indx);
2287 abort (); /* scale w/o index invalid */
2289 /* Validate scale factor */
2292 HOST_WIDE_INT value;
2294 if (GET_CODE (scale) != CONST_INT)
2296 ADDR_INVALID ("Scale is not valid.\n", scale);
2300 value = INTVAL (scale);
2301 if (value != 1 && value != 2 && value != 4 && value != 8)
2303 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2308 /* Validate displacement
2309 Constant pool addresses must be handled special. They are
2310 considered legitimate addresses, but only if not used with regs.
2311 When printed, the output routines know to print the reference with the
2312 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2315 if (GET_CODE (disp) == SYMBOL_REF
2316 && CONSTANT_POOL_ADDRESS_P (disp)
2321 else if (!CONSTANT_ADDRESS_P (disp))
2323 ADDR_INVALID ("Displacement is not valid.\n", disp);
2327 else if (GET_CODE (disp) == CONST_DOUBLE)
2329 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2333 else if (flag_pic && SYMBOLIC_CONST (disp)
2334 && base != pic_offset_table_rtx
2335 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2337 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2341 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2342 && (base != NULL_RTX || indx != NULL_RTX))
2344 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2349 if (TARGET_DEBUG_ADDR)
2350 fprintf (stderr, "Address is valid.\n");
2352 /* Everything looks valid, return true */
2357 /* Return a legitimate reference for ORIG (an address) using the
2358 register REG. If REG is 0, a new pseudo is generated.
2360 There are three types of references that must be handled:
2362 1. Global data references must load the address from the GOT, via
2363 the PIC reg. An insn is emitted to do this load, and the reg is
2366 2. Static data references must compute the address as an offset
2367 from the GOT, whose base is in the PIC reg. An insn is emitted to
2368 compute the address into a reg, and the reg is returned. Static
2369 data objects have SYMBOL_REF_FLAG set to differentiate them from
2370 global data objects.
2372 3. Constant pool addresses must be handled special. They are
2373 considered legitimate addresses, but only if not used with regs.
2374 When printed, the output routines know to print the reference with the
2375 PIC reg, even though the PIC reg doesn't appear in the RTL.
2377 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2378 reg also appears in the address (except for constant pool references,
2381 "switch" statements also require special handling when generating
2382 PIC code. See comments by the `casesi' insn in i386.md for details. */
2385 legitimize_pic_address (orig, reg)
2392 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2394 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2399 reg = gen_reg_rtx (Pmode);
2401 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2402 || GET_CODE (addr) == LABEL_REF)
2403 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2405 new = gen_rtx (MEM, Pmode,
2406 gen_rtx (PLUS, Pmode,
2407 pic_offset_table_rtx, orig));
2409 emit_move_insn (reg, new);
2411 current_function_uses_pic_offset_table = 1;
2414 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2418 if (GET_CODE (addr) == CONST)
2420 addr = XEXP (addr, 0);
2421 if (GET_CODE (addr) != PLUS)
2425 if (XEXP (addr, 0) == pic_offset_table_rtx)
2429 reg = gen_reg_rtx (Pmode);
2431 base = legitimize_pic_address (XEXP (addr, 0), reg);
2432 addr = legitimize_pic_address (XEXP (addr, 1),
2433 base == reg ? NULL_RTX : reg);
2435 if (GET_CODE (addr) == CONST_INT)
2436 return plus_constant (base, INTVAL (addr));
2438 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2440 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2441 addr = XEXP (addr, 1);
2443 return gen_rtx (PLUS, Pmode, base, addr);
2449 /* Emit insns to move operands[1] into operands[0]. */
2452 emit_pic_move (operands, mode)
2454 enum machine_mode mode;
2456 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2458 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2459 operands[1] = (rtx) force_reg (SImode, operands[1]);
2461 operands[1] = legitimize_pic_address (operands[1], temp);
2465 /* Try machine-dependent ways of modifying an illegitimate address
2466 to be legitimate. If we find one, return the new, valid address.
2467 This macro is used in only one place: `memory_address' in explow.c.
2469 OLDX is the address as it was before break_out_memory_refs was called.
2470 In some cases it is useful to look at this to decide what needs to be done.
2472 MODE and WIN are passed so that this macro can use
2473 GO_IF_LEGITIMATE_ADDRESS.
2475 It is always safe for this macro to do nothing. It exists to recognize
2476 opportunities to optimize the output.
2478 For the 80386, we handle X+REG by loading X into a register R and
2479 using R+REG. R will go in a general reg and indexing will be used.
2480 However, if REG is a broken-out memory address or multiplication,
2481 nothing needs to be done because REG can certainly go in a general reg.
2483 When -fpic is used, special handling is needed for symbolic references.
2484 See comments by legitimize_pic_address in i386.c for details. */
2487 legitimize_address (x, oldx, mode)
2490 enum machine_mode mode;
2495 if (TARGET_DEBUG_ADDR)
2497 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2501 if (flag_pic && SYMBOLIC_CONST (x))
2502 return legitimize_pic_address (x, 0);
2504 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2505 if (GET_CODE (x) == ASHIFT
2506 && GET_CODE (XEXP (x, 1)) == CONST_INT
2507 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2510 x = gen_rtx (MULT, Pmode,
2511 force_reg (Pmode, XEXP (x, 0)),
2512 GEN_INT (1 << log));
2515 if (GET_CODE (x) == PLUS)
2517 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2518 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2519 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2520 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2523 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2524 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2525 GEN_INT (1 << log));
2528 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2529 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2530 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2533 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2534 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2535 GEN_INT (1 << log));
2538 /* Put multiply first if it isn't already */
2539 if (GET_CODE (XEXP (x, 1)) == MULT)
2541 rtx tmp = XEXP (x, 0);
2542 XEXP (x, 0) = XEXP (x, 1);
2547 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2548 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2549 created by virtual register instantiation, register elimination, and
2550 similar optimizations. */
2551 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2554 x = gen_rtx (PLUS, Pmode,
2555 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2556 XEXP (XEXP (x, 1), 1));
2559 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2560 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2561 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2562 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2563 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2564 && CONSTANT_P (XEXP (x, 1)))
2566 rtx constant, other;
2568 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2570 constant = XEXP (x, 1);
2571 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2573 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2575 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2576 other = XEXP (x, 1);
2584 x = gen_rtx (PLUS, Pmode,
2585 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2586 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2587 plus_constant (other, INTVAL (constant)));
2591 if (changed && legitimate_address_p (mode, x, FALSE))
2594 if (GET_CODE (XEXP (x, 0)) == MULT)
2597 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2600 if (GET_CODE (XEXP (x, 1)) == MULT)
2603 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2607 && GET_CODE (XEXP (x, 1)) == REG
2608 && GET_CODE (XEXP (x, 0)) == REG)
2611 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2614 x = legitimize_pic_address (x, 0);
2617 if (changed && legitimate_address_p (mode, x, FALSE))
2620 if (GET_CODE (XEXP (x, 0)) == REG)
2622 register rtx temp = gen_reg_rtx (Pmode);
2623 register rtx val = force_operand (XEXP (x, 1), temp);
2625 emit_move_insn (temp, val);
2631 else if (GET_CODE (XEXP (x, 1)) == REG)
2633 register rtx temp = gen_reg_rtx (Pmode);
2634 register rtx val = force_operand (XEXP (x, 0), temp);
2636 emit_move_insn (temp, val);
2647 /* Print an integer constant expression in assembler syntax. Addition
2648 and subtraction are the only arithmetic that may appear in these
2649 expressions. FILE is the stdio stream to write to, X is the rtx, and
2650 CODE is the operand print code from the output string. */
2653 output_pic_addr_const (file, x, code)
2660 switch (GET_CODE (x))
2671 if (GET_CODE (x) == SYMBOL_REF)
2672 assemble_name (file, XSTR (x, 0));
2675 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2676 CODE_LABEL_NUMBER (XEXP (x, 0)));
2677 assemble_name (asm_out_file, buf);
2680 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2681 fprintf (file, "@GOTOFF(%%ebx)");
2682 else if (code == 'P')
2683 fprintf (file, "@PLT");
2684 else if (GET_CODE (x) == LABEL_REF)
2685 fprintf (file, "@GOTOFF");
2686 else if (! SYMBOL_REF_FLAG (x))
2687 fprintf (file, "@GOT");
2689 fprintf (file, "@GOTOFF");
2694 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2695 assemble_name (asm_out_file, buf);
2699 fprintf (file, "%d", INTVAL (x));
2703 /* This used to output parentheses around the expression,
2704 but that does not work on the 386 (either ATT or BSD assembler). */
2705 output_pic_addr_const (file, XEXP (x, 0), code);
2709 if (GET_MODE (x) == VOIDmode)
2711 /* We can use %d if the number is <32 bits and positive. */
2712 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2713 fprintf (file, "0x%x%08x",
2714 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2716 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2719 /* We can't handle floating point constants;
2720 PRINT_OPERAND must handle them. */
2721 output_operand_lossage ("floating constant misused");
2725 /* Some assemblers need integer constants to appear last (eg masm). */
2726 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2728 output_pic_addr_const (file, XEXP (x, 1), code);
2729 if (INTVAL (XEXP (x, 0)) >= 0)
2730 fprintf (file, "+");
2731 output_pic_addr_const (file, XEXP (x, 0), code);
2735 output_pic_addr_const (file, XEXP (x, 0), code);
2736 if (INTVAL (XEXP (x, 1)) >= 0)
2737 fprintf (file, "+");
2738 output_pic_addr_const (file, XEXP (x, 1), code);
2743 output_pic_addr_const (file, XEXP (x, 0), code);
2744 fprintf (file, "-");
2745 output_pic_addr_const (file, XEXP (x, 1), code);
2749 output_operand_lossage ("invalid expression as operand");
2753 /* Append the correct conditional move suffix which corresponds to CODE */
2756 put_condition_code (code, mode, file)
2758 enum mode_class mode;
2761 if (mode == MODE_INT)
2765 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2771 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2777 fputs ("ge", file); return;
2779 fputs ("g", file); return;
2781 fputs ("le", file); return;
2783 fputs ("l", file); return;
2785 fputs ("ae", file); return;
2787 fputs ("a", file); return;
2789 fputs ("be", file); return;
2791 fputs ("b", file); return;
2792 default: output_operand_lossage ("Invalid %%C operand");
2794 else if (mode == MODE_FLOAT)
2798 fputs ("ne", file); return;
2800 fputs ("e", file); return;
2802 fputs ("nb", file); return;
2804 fputs ("nbe", file); return;
2806 fputs ("be", file); return;
2808 fputs ("b", file); return;
2810 fputs ("nb", file); return;
2812 fputs ("nbe", file); return;
2814 fputs ("be", file); return;
2816 fputs ("b", file); return;
2817 default: output_operand_lossage ("Invalid %%C operand");
2822 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2823 C -- print opcode suffix for set/cmov insn.
2824 c -- like C, but print reversed condition
2825 F -- print opcode suffix for fcmov insn.
2826 f -- like C, but print reversed condition
2827 R -- print the prefix for register names.
2828 z -- print the opcode suffix for the size of the current operand.
2829 * -- print a star (in certain assembler syntax)
2830 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2831 c -- don't print special prefixes before constant operands.
2832 J -- print the appropriate jump operand.
2833 s -- print a shift double count, followed by the assemblers argument
2835 b -- print the QImode name of the register for the indicated operand.
2836 %b0 would print %al if operands[0] is reg 0.
2837 w -- likewise, print the HImode name of the register.
2838 k -- likewise, print the SImode name of the register.
2839 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2840 y -- print "st(0)" instead of "st" as a register.
2841 P -- print as a PIC constant
2845 print_operand (file, x, code)
2860 PUT_OP_SIZE (code, 'l', file);
2864 PUT_OP_SIZE (code, 'w', file);
2868 PUT_OP_SIZE (code, 'b', file);
2872 PUT_OP_SIZE (code, 'l', file);
2876 PUT_OP_SIZE (code, 's', file);
2880 PUT_OP_SIZE (code, 't', file);
2884 /* 387 opcodes don't get size suffixes if the operands are
2887 if (STACK_REG_P (x))
2890 /* this is the size of op from size of operand */
2891 switch (GET_MODE_SIZE (GET_MODE (x)))
2894 PUT_OP_SIZE ('B', 'b', file);
2898 PUT_OP_SIZE ('W', 'w', file);
2902 if (GET_MODE (x) == SFmode)
2904 PUT_OP_SIZE ('S', 's', file);
2908 PUT_OP_SIZE ('L', 'l', file);
2912 PUT_OP_SIZE ('T', 't', file);
2916 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2918 #ifdef GAS_MNEMONICS
2919 PUT_OP_SIZE ('Q', 'q', file);
2922 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2926 PUT_OP_SIZE ('Q', 'l', file);
2939 switch (GET_CODE (x))
2941 /* These conditions are appropriate for testing the result
2942 of an arithmetic operation, not for a compare operation.
2943 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2944 CC_Z_IN_NOT_C false and not floating point. */
2945 case NE: fputs ("jne", file); return;
2946 case EQ: fputs ("je", file); return;
2947 case GE: fputs ("jns", file); return;
2948 case LT: fputs ("js", file); return;
2949 case GEU: fputs ("jmp", file); return;
2950 case GTU: fputs ("jne", file); return;
2951 case LEU: fputs ("je", file); return;
2952 case LTU: fputs ("#branch never", file); return;
2954 /* no matching branches for GT nor LE */
2959 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2961 PRINT_OPERAND (file, x, 0);
2962 fputs (AS2C (,) + 1, file);
2966 /* This is used by the conditional move instructions. */
2968 put_condition_code (GET_CODE (x), MODE_INT, file);
2971 /* like above, but reverse condition */
2973 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
2977 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
2980 /* like above, but reverse condition */
2982 put_condition_code (reverse_condition (GET_CODE (x)),
2990 sprintf (str, "invalid operand code `%c'", code);
2991 output_operand_lossage (str);
2995 if (GET_CODE (x) == REG)
2997 PRINT_REG (x, code, file);
2999 else if (GET_CODE (x) == MEM)
3001 PRINT_PTR (x, file);
3002 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
3005 output_pic_addr_const (file, XEXP (x, 0), code);
3007 output_addr_const (file, XEXP (x, 0));
3010 output_address (XEXP (x, 0));
3012 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
3014 REAL_VALUE_TYPE r; long l;
3015 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3016 REAL_VALUE_TO_TARGET_SINGLE (r, l);
3017 PRINT_IMMED_PREFIX (file);
3018 fprintf (file, "0x%x", l);
3020 /* These float cases don't actually occur as immediate operands. */
3021 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
3023 REAL_VALUE_TYPE r; char dstr[30];
3024 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3025 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3026 fprintf (file, "%s", dstr);
3028 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
3030 REAL_VALUE_TYPE r; char dstr[30];
3031 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3032 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
3033 fprintf (file, "%s", dstr);
3039 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3040 PRINT_IMMED_PREFIX (file);
3041 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
3042 || GET_CODE (x) == LABEL_REF)
3043 PRINT_OFFSET_PREFIX (file);
3046 output_pic_addr_const (file, x, code);
3048 output_addr_const (file, x);
3052 /* Print a memory operand whose address is ADDR. */
3055 print_operand_address (file, addr)
3059 register rtx reg1, reg2, breg, ireg;
3062 switch (GET_CODE (addr))
3066 fprintf (file, "%se", RP);
3067 fputs (hi_reg_name[REGNO (addr)], file);
3077 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3079 offset = XEXP (addr, 0);
3080 addr = XEXP (addr, 1);
3082 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3084 offset = XEXP (addr, 1);
3085 addr = XEXP (addr, 0);
3087 if (GET_CODE (addr) != PLUS) ;
3088 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3090 reg1 = XEXP (addr, 0);
3091 addr = XEXP (addr, 1);
3093 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3095 reg1 = XEXP (addr, 1);
3096 addr = XEXP (addr, 0);
3098 else if (GET_CODE (XEXP (addr, 0)) == REG)
3100 reg1 = XEXP (addr, 0);
3101 addr = XEXP (addr, 1);
3103 else if (GET_CODE (XEXP (addr, 1)) == REG)
3105 reg1 = XEXP (addr, 1);
3106 addr = XEXP (addr, 0);
3108 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3110 if (reg1 == 0) reg1 = addr;
3116 if (addr != 0) abort ();
3119 if ((reg1 && GET_CODE (reg1) == MULT)
3120 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3125 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3131 if (ireg != 0 || breg != 0)
3138 output_pic_addr_const (file, addr, 0);
3140 else if (GET_CODE (addr) == LABEL_REF)
3141 output_asm_label (addr);
3144 output_addr_const (file, addr);
3147 if (ireg != 0 && GET_CODE (ireg) == MULT)
3149 scale = INTVAL (XEXP (ireg, 1));
3150 ireg = XEXP (ireg, 0);
3153 /* The stack pointer can only appear as a base register,
3154 never an index register, so exchange the regs if it is wrong. */
3156 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3165 /* output breg+ireg*scale */
3166 PRINT_B_I_S (breg, ireg, scale, file);
3173 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3175 scale = INTVAL (XEXP (addr, 0));
3176 ireg = XEXP (addr, 1);
3180 scale = INTVAL (XEXP (addr, 1));
3181 ireg = XEXP (addr, 0);
3183 output_addr_const (file, const0_rtx);
3184 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3189 if (GET_CODE (addr) == CONST_INT
3190 && INTVAL (addr) < 0x8000
3191 && INTVAL (addr) >= -0x8000)
3192 fprintf (file, "%d", INTVAL (addr));
3196 output_pic_addr_const (file, addr, 0);
3198 output_addr_const (file, addr);
3203 /* Set the cc_status for the results of an insn whose pattern is EXP.
3204 On the 80386, we assume that only test and compare insns, as well
3205 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3206 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3207 Also, we assume that jumps, moves and sCOND don't affect the condition
3208 codes. All else clobbers the condition codes, by assumption.
3210 We assume that ALL integer add, minus, etc. instructions effect the
3211 condition codes. This MUST be consistent with i386.md.
3213 We don't record any float test or compare - the redundant test &
3214 compare check in final.c does not handle stack-like regs correctly. */
3217 notice_update_cc (exp)
3220 if (GET_CODE (exp) == SET)
3222 /* Jumps do not alter the cc's. */
3223 if (SET_DEST (exp) == pc_rtx)
3225 #ifdef IS_STACK_MODE
3226 /* Moving into a memory of stack_mode may have been moved
3227 in between the use and set of cc0 by loop_spl(). So
3228 old value of cc.status must be retained */
3229 if(GET_CODE(SET_DEST(exp))==MEM
3230 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3235 /* Moving register or memory into a register:
3236 it doesn't alter the cc's, but it might invalidate
3237 the RTX's which we remember the cc's came from.
3238 (Note that moving a constant 0 or 1 MAY set the cc's). */
3239 if (REG_P (SET_DEST (exp))
3240 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3241 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3243 if (cc_status.value1
3244 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3245 cc_status.value1 = 0;
3246 if (cc_status.value2
3247 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3248 cc_status.value2 = 0;
3251 /* Moving register into memory doesn't alter the cc's.
3252 It may invalidate the RTX's which we remember the cc's came from. */
3253 if (GET_CODE (SET_DEST (exp)) == MEM
3254 && (REG_P (SET_SRC (exp))
3255 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3257 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3258 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3259 cc_status.value1 = 0;
3260 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3261 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3262 cc_status.value2 = 0;
3265 /* Function calls clobber the cc's. */
3266 else if (GET_CODE (SET_SRC (exp)) == CALL)
3271 /* Tests and compares set the cc's in predictable ways. */
3272 else if (SET_DEST (exp) == cc0_rtx)
3275 cc_status.value1 = SET_SRC (exp);
3278 /* Certain instructions effect the condition codes. */
3279 else if (GET_MODE (SET_SRC (exp)) == SImode
3280 || GET_MODE (SET_SRC (exp)) == HImode
3281 || GET_MODE (SET_SRC (exp)) == QImode)
3282 switch (GET_CODE (SET_SRC (exp)))
3284 case ASHIFTRT: case LSHIFTRT:
3286 /* Shifts on the 386 don't set the condition codes if the
3287 shift count is zero. */
3288 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3293 /* We assume that the CONST_INT is non-zero (this rtx would
3294 have been deleted if it were zero. */
3296 case PLUS: case MINUS: case NEG:
3297 case AND: case IOR: case XOR:
3298 cc_status.flags = CC_NO_OVERFLOW;
3299 cc_status.value1 = SET_SRC (exp);
3300 cc_status.value2 = SET_DEST (exp);
3311 else if (GET_CODE (exp) == PARALLEL
3312 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3314 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3316 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3319 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3320 cc_status.flags |= CC_IN_80387;
3322 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3333 /* Split one or more DImode RTL references into pairs of SImode
3334 references. The RTL can be REG, offsettable MEM, integer constant, or
3335 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3336 split and "num" is its length. lo_half and hi_half are output arrays
3337 that parallel "operands". */
3340 split_di (operands, num, lo_half, hi_half)
3343 rtx lo_half[], hi_half[];
3347 if (GET_CODE (operands[num]) == REG)
3349 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3350 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3352 else if (CONSTANT_P (operands[num]))
3354 split_double (operands[num], &lo_half[num], &hi_half[num]);
3356 else if (offsettable_memref_p (operands[num]))
3358 lo_half[num] = operands[num];
3359 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3366 /* Return 1 if this is a valid binary operation on a 387.
3367 OP is the expression matched, and MODE is its mode. */
3370 binary_387_op (op, mode)
3372 enum machine_mode mode;
3374 if (mode != VOIDmode && mode != GET_MODE (op))
3377 switch (GET_CODE (op))
3383 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3391 /* Return 1 if this is a valid shift or rotate operation on a 386.
3392 OP is the expression matched, and MODE is its mode. */
3397 enum machine_mode mode;
3399 rtx operand = XEXP (op, 0);
3401 if (mode != VOIDmode && mode != GET_MODE (op))
3404 if (GET_MODE (operand) != GET_MODE (op)
3405 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3408 return (GET_CODE (op) == ASHIFT
3409 || GET_CODE (op) == ASHIFTRT
3410 || GET_CODE (op) == LSHIFTRT
3411 || GET_CODE (op) == ROTATE
3412 || GET_CODE (op) == ROTATERT);
3415 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3416 MODE is not used. */
3419 VOIDmode_compare_op (op, mode)
3421 enum machine_mode mode;
3423 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3426 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3427 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3428 is the expression of the binary operation. The output may either be
3429 emitted here, or returned to the caller, like all output_* functions.
3431 There is no guarantee that the operands are the same mode, as they
3432 might be within FLOAT or FLOAT_EXTEND expressions. */
3435 output_387_binary_op (insn, operands)
3441 static char buf[100];
3443 switch (GET_CODE (operands[3]))
3446 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3447 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3454 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3455 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3462 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3463 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3470 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3471 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3481 strcpy (buf, base_op);
3483 switch (GET_CODE (operands[3]))
3487 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3490 operands[2] = operands[1];
3494 if (GET_CODE (operands[2]) == MEM)
3495 return strcat (buf, AS1 (%z2,%2));
3497 if (NON_STACK_REG_P (operands[1]))
3499 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3502 else if (NON_STACK_REG_P (operands[2]))
3504 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3508 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3509 return strcat (buf, AS2 (p,%2,%0));
3511 if (STACK_TOP_P (operands[0]))
3512 return strcat (buf, AS2C (%y2,%0));
3514 return strcat (buf, AS2C (%2,%0));
3518 if (GET_CODE (operands[1]) == MEM)
3519 return strcat (buf, AS1 (r%z1,%1));
3521 if (GET_CODE (operands[2]) == MEM)
3522 return strcat (buf, AS1 (%z2,%2));
3524 if (NON_STACK_REG_P (operands[1]))
3526 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3529 else if (NON_STACK_REG_P (operands[2]))
3531 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3535 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3538 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3539 return strcat (buf, AS2 (rp,%2,%0));
3541 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3542 return strcat (buf, AS2 (p,%1,%0));
3544 if (STACK_TOP_P (operands[0]))
3546 if (STACK_TOP_P (operands[1]))
3547 return strcat (buf, AS2C (%y2,%0));
3549 return strcat (buf, AS2 (r,%y1,%0));
3551 else if (STACK_TOP_P (operands[1]))
3552 return strcat (buf, AS2C (%1,%0));
3554 return strcat (buf, AS2 (r,%2,%0));
3561 /* Output code for INSN to convert a float to a signed int. OPERANDS
3562 are the insn operands. The output may be SFmode or DFmode and the
3563 input operand may be SImode or DImode. As a special case, make sure
3564 that the 387 stack top dies if the output mode is DImode, because the
3565 hardware requires this. */
3568 output_fix_trunc (insn, operands)
3572 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3575 if (! STACK_TOP_P (operands[1]) ||
3576 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3579 xops[0] = GEN_INT (12);
3580 xops[1] = operands[4];
3582 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3583 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3584 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3585 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3586 output_asm_insn (AS1 (fldc%W3,%3), operands);
3588 if (NON_STACK_REG_P (operands[0]))
3589 output_to_reg (operands[0], stack_top_dies);
3590 else if (GET_CODE (operands[0]) == MEM)
3593 output_asm_insn (AS1 (fistp%z0,%0), operands);
3595 output_asm_insn (AS1 (fist%z0,%0), operands);
3600 return AS1 (fldc%W2,%2);
3603 /* Output code for INSN to compare OPERANDS. The two operands might
3604 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3605 expression. If the compare is in mode CCFPEQmode, use an opcode that
3606 will not fault if a qNaN is present. */
3609 output_float_compare (insn, operands)
3614 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3615 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3618 if (TARGET_CMOVE && STACK_REG_P (operands[1]))
3620 cc_status.flags |= CC_FCOMI;
3621 cc_prev_status.flags &= ~CC_TEST_AX;
3624 if (! STACK_TOP_P (operands[0]))
3627 operands[0] = operands[1];
3629 cc_status.flags |= CC_REVERSED;
3632 if (! STACK_TOP_P (operands[0]))
3635 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3637 if (STACK_REG_P (operands[1])
3639 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3640 && REGNO (operands[1]) != FIRST_STACK_REG)
3642 /* If both the top of the 387 stack dies, and the other operand
3643 is also a stack register that dies, then this must be a
3644 `fcompp' float compare */
3646 if (unordered_compare)
3647 output_asm_insn ("fucompp", operands);
3649 output_asm_insn ("fcompp", operands);
3653 static char buf[100];
3655 /* Decide if this is the integer or float compare opcode, or the
3656 unordered float compare. */
3658 if (unordered_compare)
3659 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fucomi" : "fucom");
3660 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3661 strcpy (buf, (cc_status.flags & CC_FCOMI) ? "fcomi" : "fcom");
3663 strcpy (buf, "ficom");
3665 /* Modify the opcode if the 387 stack is to be popped. */
3670 if (NON_STACK_REG_P (operands[1]))
3671 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3672 else if (cc_status.flags & CC_FCOMI)
3676 xops[0] = operands[0];
3677 xops[1] = operands[1];
3678 xops[2] = operands[0];
3680 output_asm_insn (strcat (buf, AS2 (%z1,%y1,%2)), xops);
3684 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3687 /* Now retrieve the condition code. */
3689 return output_fp_cc0_set (insn);
3692 /* Output opcodes to transfer the results of FP compare or test INSN
3693 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3694 result of the compare or test is unordered, no comparison operator
3695 succeeds except NE. Return an output template, if any. */
3698 output_fp_cc0_set (insn)
3702 rtx unordered_label;
3706 xops[0] = gen_rtx (REG, HImode, 0);
3707 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3709 if (! TARGET_IEEE_FP)
3711 if (!(cc_status.flags & CC_REVERSED))
3713 next = next_cc0_user (insn);
3715 if (GET_CODE (next) == JUMP_INSN
3716 && GET_CODE (PATTERN (next)) == SET
3717 && SET_DEST (PATTERN (next)) == pc_rtx
3718 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3720 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3722 else if (GET_CODE (PATTERN (next)) == SET)
3724 code = GET_CODE (SET_SRC (PATTERN (next)));
3730 if (code == GT || code == LT || code == EQ || code == NE
3731 || code == LE || code == GE)
3732 { /* We will test eax directly */
3733 cc_status.flags |= CC_TEST_AX;
3740 next = next_cc0_user (insn);
3741 if (next == NULL_RTX)
3744 if (GET_CODE (next) == JUMP_INSN
3745 && GET_CODE (PATTERN (next)) == SET
3746 && SET_DEST (PATTERN (next)) == pc_rtx
3747 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3749 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3751 else if (GET_CODE (PATTERN (next)) == SET)
3753 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3754 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3755 else code = GET_CODE (SET_SRC (PATTERN (next)));
3757 else if (GET_CODE (PATTERN (next)) == PARALLEL
3758 && GET_CODE (XVECEXP (PATTERN (next), 0, 0)) == SET)
3760 if (GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0))) == IF_THEN_ELSE)
3761 code = GET_CODE (XEXP (SET_SRC (XVECEXP (PATTERN (next), 0, 0)), 0));
3762 else code = GET_CODE (SET_SRC (XVECEXP (PATTERN (next), 0, 0)));
3767 xops[0] = gen_rtx (REG, QImode, 0);
3772 xops[1] = GEN_INT (0x45);
3773 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3778 xops[1] = GEN_INT (0x45);
3779 xops[2] = GEN_INT (0x01);
3780 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3781 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3786 xops[1] = GEN_INT (0x05);
3787 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3792 xops[1] = GEN_INT (0x45);
3793 xops[2] = GEN_INT (0x40);
3794 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3795 output_asm_insn (AS1 (dec%B0,%h0), xops);
3796 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3801 xops[1] = GEN_INT (0x45);
3802 xops[2] = GEN_INT (0x40);
3803 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3804 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3809 xops[1] = GEN_INT (0x44);
3810 xops[2] = GEN_INT (0x40);
3811 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3812 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3826 #define MAX_386_STACK_LOCALS 2
3828 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3830 /* Define the structure for the machine field in struct function. */
3831 struct machine_function
3833 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3836 /* Functions to save and restore i386_stack_locals.
3837 These will be called, via pointer variables,
3838 from push_function_context and pop_function_context. */
3841 save_386_machine_status (p)
3844 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3845 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3846 sizeof i386_stack_locals);
3850 restore_386_machine_status (p)
3853 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3854 sizeof i386_stack_locals);
3858 /* Clear stack slot assignments remembered from previous functions.
3859 This is called from INIT_EXPANDERS once before RTL is emitted for each
3863 clear_386_stack_locals ()
3865 enum machine_mode mode;
3868 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3869 mode = (enum machine_mode) ((int) mode + 1))
3870 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3871 i386_stack_locals[(int) mode][n] = NULL_RTX;
3873 /* Arrange to save and restore i386_stack_locals around nested functions. */
3874 save_machine_status = save_386_machine_status;
3875 restore_machine_status = restore_386_machine_status;
3878 /* Return a MEM corresponding to a stack slot with mode MODE.
3879 Allocate a new slot if necessary.
3881 The RTL for a function can have several slots available: N is
3882 which slot to use. */
3885 assign_386_stack_local (mode, n)
3886 enum machine_mode mode;
3889 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3892 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3893 i386_stack_locals[(int) mode][n]
3894 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3896 return i386_stack_locals[(int) mode][n];
3902 enum machine_mode mode;
3904 return (GET_CODE (op) == MULT);
3909 enum machine_mode mode;
3911 return (GET_CODE (op) == DIV);
3916 /* Create a new copy of an rtx.
3917 Recursively copies the operands of the rtx,
3918 except for those few rtx codes that are sharable.
3919 Doesn't share CONST */
3927 register RTX_CODE code;
3928 register char *format_ptr;
3930 code = GET_CODE (orig);
3943 /* SCRATCH must be shared because they represent distinct values. */
3948 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3949 a LABEL_REF, it isn't sharable. */
3950 if (GET_CODE (XEXP (orig, 0)) == PLUS
3951 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3952 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3956 /* A MEM with a constant address is not sharable. The problem is that
3957 the constant address may need to be reloaded. If the mem is shared,
3958 then reloading one copy of this mem will cause all copies to appear
3959 to have been reloaded. */
3962 copy = rtx_alloc (code);
3963 PUT_MODE (copy, GET_MODE (orig));
3964 copy->in_struct = orig->in_struct;
3965 copy->volatil = orig->volatil;
3966 copy->unchanging = orig->unchanging;
3967 copy->integrated = orig->integrated;
3969 copy->is_spill_rtx = orig->is_spill_rtx;
3971 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3973 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3975 switch (*format_ptr++)
3978 XEXP (copy, i) = XEXP (orig, i);
3979 if (XEXP (orig, i) != NULL)
3980 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3985 XEXP (copy, i) = XEXP (orig, i);
3990 XVEC (copy, i) = XVEC (orig, i);
3991 if (XVEC (orig, i) != NULL)
3993 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3994 for (j = 0; j < XVECLEN (copy, i); j++)
3995 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
4000 XWINT (copy, i) = XWINT (orig, i);
4004 XINT (copy, i) = XINT (orig, i);
4009 XSTR (copy, i) = XSTR (orig, i);
4020 /* try to rewrite a memory address to make it valid */
4022 rewrite_address (mem_rtx)
4025 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
4027 int offset_adjust = 0;
4028 int was_only_offset = 0;
4029 rtx mem_addr = XEXP (mem_rtx, 0);
4030 char *storage = (char *) oballoc (0);
4032 int is_spill_rtx = 0;
4034 in_struct = MEM_IN_STRUCT_P (mem_rtx);
4035 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
4037 if (GET_CODE (mem_addr) == PLUS &&
4038 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
4039 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
4040 { /* this part is utilized by the combiner */
4042 gen_rtx (PLUS, GET_MODE (mem_addr),
4043 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
4045 XEXP (XEXP (mem_addr, 1), 0)),
4046 XEXP (XEXP (mem_addr, 1), 1));
4047 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
4049 XEXP (mem_rtx, 0) = ret_rtx;
4050 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
4056 /* this part is utilized by loop.c */
4057 /* If the address contains PLUS (reg,const) and this pattern is invalid
4058 in this case - try to rewrite the address to make it valid intel1
4060 storage = (char *) oballoc (0);
4061 index_rtx = base_rtx = offset_rtx = NULL;
4062 /* find the base index and offset elements of the memory address */
4063 if (GET_CODE (mem_addr) == PLUS)
4065 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
4067 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4069 base_rtx = XEXP (mem_addr, 1);
4070 index_rtx = XEXP (mem_addr, 0);
4074 base_rtx = XEXP (mem_addr, 0);
4075 offset_rtx = XEXP (mem_addr, 1);
4078 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
4080 index_rtx = XEXP (mem_addr, 0);
4081 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
4083 base_rtx = XEXP (mem_addr, 1);
4087 offset_rtx = XEXP (mem_addr, 1);
4090 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4093 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4094 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4095 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4096 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4097 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4098 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4099 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4101 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4102 offset_rtx = XEXP (mem_addr, 1);
4103 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4104 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4108 offset_rtx = XEXP (mem_addr, 1);
4109 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4110 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4113 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4115 was_only_offset = 1;
4118 offset_rtx = XEXP (mem_addr, 1);
4119 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4120 if (offset_adjust == 0)
4122 XEXP (mem_rtx, 0) = offset_rtx;
4123 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4133 else if (GET_CODE (mem_addr) == MULT)
4135 index_rtx = mem_addr;
4142 if (index_rtx && GET_CODE (index_rtx) == MULT)
4144 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4149 scale_rtx = XEXP (index_rtx, 1);
4150 scale = INTVAL (scale_rtx);
4151 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4153 /* now find which of the elements are invalid and try to fix them */
4154 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4156 offset_adjust = INTVAL (index_rtx) * scale;
4157 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4158 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4160 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4161 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4163 offset_rtx = copy_all_rtx (offset_rtx);
4164 XEXP (XEXP (offset_rtx, 0), 1) =
4165 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4166 if (!CONSTANT_P (offset_rtx))
4173 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4176 gen_rtx (CONST, GET_MODE (offset_rtx),
4177 gen_rtx (PLUS, GET_MODE (offset_rtx),
4179 gen_rtx (CONST_INT, 0, offset_adjust)));
4180 if (!CONSTANT_P (offset_rtx))
4186 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4188 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4190 else if (!offset_rtx)
4192 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4194 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4195 XEXP (mem_rtx, 0) = offset_rtx;
4198 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4199 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4200 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4202 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4203 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4205 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4207 offset_adjust += INTVAL (base_rtx);
4210 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4211 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4212 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4214 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4215 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4219 if (!LEGITIMATE_INDEX_P (index_rtx)
4220 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4228 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4234 if (offset_adjust != 0)
4238 if (GET_CODE (offset_rtx) == CONST &&
4239 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4241 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4242 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4244 offset_rtx = copy_all_rtx (offset_rtx);
4245 XEXP (XEXP (offset_rtx, 0), 1) =
4246 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4247 if (!CONSTANT_P (offset_rtx))
4254 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4257 gen_rtx (CONST, GET_MODE (offset_rtx),
4258 gen_rtx (PLUS, GET_MODE (offset_rtx),
4260 gen_rtx (CONST_INT, 0, offset_adjust)));
4261 if (!CONSTANT_P (offset_rtx))
4267 else if (GET_CODE (offset_rtx) == CONST_INT)
4269 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4279 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4287 if (GET_CODE (offset_rtx) == CONST_INT &&
4288 INTVAL (offset_rtx) == 0)
4290 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4291 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4297 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4298 gen_rtx (PLUS, GET_MODE (base_rtx),
4299 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4307 if (GET_CODE (offset_rtx) == CONST_INT &&
4308 INTVAL (offset_rtx) == 0)
4310 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4314 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4315 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4325 if (GET_CODE (offset_rtx) == CONST_INT &&
4326 INTVAL (offset_rtx) == 0)
4328 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4333 gen_rtx (PLUS, GET_MODE (offset_rtx),
4334 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4341 if (GET_CODE (offset_rtx) == CONST_INT &&
4342 INTVAL (offset_rtx) == 0)
4344 ret_rtx = index_rtx;
4348 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4357 if (GET_CODE (offset_rtx) == CONST_INT &&
4358 INTVAL (offset_rtx) == 0)
4364 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4367 else if (was_only_offset)
4369 ret_rtx = offset_rtx;
4377 XEXP (mem_rtx, 0) = ret_rtx;
4378 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4390 /* return 1 if the first insn to set cc before insn also sets the register
4391 reg_rtx - otherwise return 0 */
4393 last_to_set_cc (reg_rtx, insn)
4396 rtx prev_insn = PREV_INSN (insn);
4400 if (GET_CODE (prev_insn) == NOTE)
4403 else if (GET_CODE (prev_insn) == INSN)
4405 if (GET_CODE (PATTERN (prev_insn)) != SET)
4408 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4410 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4416 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4423 prev_insn = PREV_INSN (prev_insn);
4431 doesnt_set_condition_code (pat)
4434 switch (GET_CODE (pat))
4448 sets_condition_code (pat)
4451 switch (GET_CODE (pat))
4475 str_immediate_operand (op, mode)
4477 enum machine_mode mode;
4479 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4491 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4492 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4493 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4494 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4503 Return 1 if the mode of the SET_DEST of insn is floating point
4504 and it is not an fld or a move from memory to memory.
4505 Otherwise return 0 */
4510 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4511 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4512 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4513 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4514 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4515 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4516 && GET_CODE (SET_SRC (insn)) != MEM)
4525 Return 1 if the mode of the SET_DEST floating point and is memory
4526 and the source is a register.
4532 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4533 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4534 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4535 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4536 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4537 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4547 Return 1 if dep_insn sets a register which insn uses as a base
4548 or index to reference memory.
4549 otherwise return 0 */
4552 agi_dependent (insn, dep_insn)
4555 if (GET_CODE (dep_insn) == INSN
4556 && GET_CODE (PATTERN (dep_insn)) == SET
4557 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4559 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4562 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4563 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4564 && push_operand (SET_DEST (PATTERN (dep_insn)),
4565 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4567 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4575 Return 1 if reg is used in rtl as a base or index for a memory ref
4576 otherwise return 0. */
4579 reg_mentioned_in_mem (reg, rtl)
4584 register enum rtx_code code;
4589 code = GET_CODE (rtl);
4607 if (code == MEM && reg_mentioned_p (reg, rtl))
4610 fmt = GET_RTX_FORMAT (code);
4611 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4616 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4618 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4623 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4630 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4632 operands[0] = result, initialized with the startaddress
4633 operands[1] = alignment of the address.
4634 operands[2] = scratch register, initialized with the startaddress when
4635 not aligned, otherwise undefined
4637 This is just the body. It needs the initialisations mentioned above and
4638 some address computing at the end. These things are done in i386.md. */
4641 output_strlen_unroll (operands)
4646 xops[0] = operands[0]; /* Result */
4647 /* operands[1]; * Alignment */
4648 xops[1] = operands[2]; /* Scratch */
4649 xops[2] = GEN_INT (0);
4650 xops[3] = GEN_INT (2);
4651 xops[4] = GEN_INT (3);
4652 xops[5] = GEN_INT (4);
4653 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4654 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4655 xops[8] = gen_label_rtx (); /* label of main loop */
4656 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4657 xops[9] = gen_label_rtx (); /* pentium optimisation */
4658 xops[10] = gen_label_rtx (); /* end label 2 */
4659 xops[11] = gen_label_rtx (); /* end label 1 */
4660 xops[12] = gen_label_rtx (); /* end label */
4661 /* xops[13] * Temporary used */
4662 xops[14] = GEN_INT (0xff);
4663 xops[15] = GEN_INT (0xff00);
4664 xops[16] = GEN_INT (0xff0000);
4665 xops[17] = GEN_INT (0xff000000);
4667 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4669 /* is there a known alignment and is it less then 4 */
4670 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4672 /* is there a known alignment and is it not 2 */
4673 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4675 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4676 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4678 /* leave just the 3 lower bits */
4679 /* if this is a q-register, then the high part is used later */
4680 /* therefore user andl rather than andb */
4681 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4682 /* is aligned to 4-byte adress when zero */
4683 output_asm_insn (AS1 (je,%l8), xops);
4684 /* side-effect even Parity when %eax == 3 */
4685 output_asm_insn (AS1 (jp,%6), xops);
4687 /* is it aligned to 2 bytes ? */
4688 if (QI_REG_P (xops[1]))
4689 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4691 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4692 output_asm_insn (AS1 (je,%7), xops);
4696 /* since the alignment is 2, we have to check 2 or 0 bytes */
4698 /* check if is aligned to 4 - byte */
4699 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4700 /* is aligned to 4-byte adress when zero */
4701 output_asm_insn (AS1 (je,%l8), xops);
4704 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4705 /* now, compare the bytes */
4706 /* compare with the high part of a q-reg gives shorter code */
4707 if (QI_REG_P (xops[1]))
4709 /* compare the first n unaligned byte on a byte per byte basis */
4710 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4711 /* when zero we reached the end */
4712 output_asm_insn (AS1 (je,%l12), xops);
4713 /* increment the address */
4714 output_asm_insn (AS1 (inc%L0,%0), xops);
4716 /* not needed with an alignment of 2 */
4717 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4719 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4720 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4721 output_asm_insn (AS1 (je,%l12), xops);
4722 output_asm_insn (AS1 (inc%L0,%0), xops);
4724 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4726 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4730 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4731 output_asm_insn (AS1 (je,%l12), xops);
4732 output_asm_insn (AS1 (inc%L0,%0), xops);
4734 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4735 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4736 output_asm_insn (AS1 (je,%l12), xops);
4737 output_asm_insn (AS1 (inc%L0,%0), xops);
4739 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4740 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4742 output_asm_insn (AS1 (je,%l12), xops);
4743 output_asm_insn (AS1 (inc%L0,%0), xops);
4746 /* Generate loop to check 4 bytes at a time */
4747 /* IMHO it is not a good idea to align this loop. It gives only */
4748 /* huge programs, but does not help to speed up */
4749 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4750 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4752 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4753 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4755 if (QI_REG_P (xops[1]))
4757 /* On i586 it is faster to combine the hi- and lo- part as
4758 a kind of lookahead. If anding both yields zero, then one
4759 of both *could* be zero, otherwise none of both is zero;
4760 this saves one instruction, on i486 this is slower
4761 tested with P-90, i486DX2-66, AMD486DX2-66 */
4764 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4765 output_asm_insn (AS1 (jne,%l9), xops);
4768 /* check first byte */
4769 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4770 output_asm_insn (AS1 (je,%l12), xops);
4772 /* check second byte */
4773 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4774 output_asm_insn (AS1 (je,%l11), xops);
4777 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4781 /* check first byte */
4782 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4783 output_asm_insn (AS1 (je,%l12), xops);
4785 /* check second byte */
4786 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4787 output_asm_insn (AS1 (je,%l11), xops);
4790 /* check third byte */
4791 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4792 output_asm_insn (AS1 (je,%l10), xops);
4794 /* check fourth byte and increment address */
4795 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4796 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4797 output_asm_insn (AS1 (jne,%l8), xops);
4799 /* now generate fixups when the compare stops within a 4-byte word */
4800 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4802 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4803 output_asm_insn (AS1 (inc%L0,%0), xops);
4805 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4806 output_asm_insn (AS1 (inc%L0,%0), xops);
4808 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));