1 /* Subroutines used for code generation on the DEC Alpha.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
52 /* Specify which cpu to schedule for. */
54 enum processor_type alpha_cpu;
55 static const char * const alpha_cpu_name[] =
60 /* Specify how accurate floating-point traps need to be. */
62 enum alpha_trap_precision alpha_tp;
64 /* Specify the floating-point rounding mode. */
66 enum alpha_fp_rounding_mode alpha_fprm;
68 /* Specify which things cause traps. */
70 enum alpha_fp_trap_mode alpha_fptm;
72 /* Specify bit size of immediate TLS offsets. */
74 int alpha_tls_size = 32;
76 /* Strings decoded into the above options. */
78 const char *alpha_cpu_string; /* -mcpu= */
79 const char *alpha_tune_string; /* -mtune= */
80 const char *alpha_tp_string; /* -mtrap-precision=[p|s|i] */
81 const char *alpha_fprm_string; /* -mfp-rounding-mode=[n|m|c|d] */
82 const char *alpha_fptm_string; /* -mfp-trap-mode=[n|u|su|sui] */
83 const char *alpha_mlat_string; /* -mmemory-latency= */
84 const char *alpha_tls_size_string; /* -mtls-size=[16|32|64] */
86 /* Save information from a "cmpxx" operation until the branch or scc is
89 struct alpha_compare alpha_compare;
91 /* Non-zero if inside of a function, because the Alpha asm can't
92 handle .files inside of functions. */
94 static int inside_function = FALSE;
96 /* The number of cycles of latency we should assume on memory reads. */
98 int alpha_memory_latency = 3;
100 /* Whether the function needs the GP. */
102 static int alpha_function_needs_gp;
104 /* The alias set for prologue/epilogue register save/restore. */
106 static int alpha_sr_alias_set;
108 /* The assembler name of the current function. */
110 static const char *alpha_fnname;
112 /* The next explicit relocation sequence number. */
113 int alpha_next_sequence_number = 1;
115 /* The literal and gpdisp sequence numbers for this insn, as printed
116 by %# and %* respectively. */
117 int alpha_this_literal_sequence_number;
118 int alpha_this_gpdisp_sequence_number;
120 /* Declarations of static functions. */
121 static int tls_symbolic_operand_1
122 PARAMS ((rtx, enum machine_mode, int, int));
123 static enum tls_model tls_symbolic_operand_type
125 static bool decl_in_text_section
127 static bool alpha_in_small_data_p
129 static void alpha_encode_section_info
130 PARAMS ((tree, int));
131 static const char *alpha_strip_name_encoding
132 PARAMS ((const char *));
133 static int some_small_symbolic_operand_1
134 PARAMS ((rtx *, void *));
135 static int split_small_symbolic_operand_1
136 PARAMS ((rtx *, void *));
137 static void alpha_set_memflags_1
138 PARAMS ((rtx, int, int, int));
139 static rtx alpha_emit_set_const_1
140 PARAMS ((rtx, enum machine_mode, HOST_WIDE_INT, int));
141 static void alpha_expand_unaligned_load_words
142 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
143 static void alpha_expand_unaligned_store_words
144 PARAMS ((rtx *out_regs, rtx smem, HOST_WIDE_INT words, HOST_WIDE_INT ofs));
145 static void alpha_init_builtins
147 static rtx alpha_expand_builtin
148 PARAMS ((tree, rtx, rtx, enum machine_mode, int));
149 static void alpha_sa_mask
150 PARAMS ((unsigned long *imaskP, unsigned long *fmaskP));
151 static int find_lo_sum
152 PARAMS ((rtx *, void *));
153 static int alpha_does_function_need_gp
155 static int alpha_ra_ever_killed
157 static const char *get_trap_mode_suffix
159 static const char *get_round_mode_suffix
161 static const char *get_some_local_dynamic_name
163 static int get_some_local_dynamic_name_1
164 PARAMS ((rtx *, void *));
165 static rtx set_frame_related_p
167 static const char *alpha_lookup_xfloating_lib_func
168 PARAMS ((enum rtx_code));
169 static int alpha_compute_xfloating_mode_arg
170 PARAMS ((enum rtx_code, enum alpha_fp_rounding_mode));
171 static void alpha_emit_xfloating_libcall
172 PARAMS ((const char *, rtx, rtx[], int, rtx));
173 static rtx alpha_emit_xfloating_compare
174 PARAMS ((enum rtx_code, rtx, rtx));
175 static void alpha_output_function_end_prologue
177 static int alpha_adjust_cost
178 PARAMS ((rtx, rtx, rtx, int));
179 static int alpha_issue_rate
181 static int alpha_use_dfa_pipeline_interface
183 static int alpha_multipass_dfa_lookahead
186 #ifdef OBJECT_FORMAT_ELF
187 static void alpha_elf_select_rtx_section
188 PARAMS ((enum machine_mode, rtx, unsigned HOST_WIDE_INT));
191 #if TARGET_ABI_OPEN_VMS
192 static bool alpha_linkage_symbol_p
193 PARAMS ((const char *symname));
194 static void alpha_write_linkage
195 PARAMS ((FILE *, const char *, tree));
198 static struct machine_function * alpha_init_machine_status
201 static void unicosmk_output_deferred_case_vectors PARAMS ((FILE *));
202 static void unicosmk_gen_dsib PARAMS ((unsigned long *imaskP));
203 static void unicosmk_output_ssib PARAMS ((FILE *, const char *));
204 static int unicosmk_need_dex PARAMS ((rtx));
206 /* Get the number of args of a function in one of two ways. */
207 #if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK
208 #define NUM_ARGS current_function_args_info.num_args
210 #define NUM_ARGS current_function_args_info
216 /* Initialize the GCC target structure. */
217 #if TARGET_ABI_OPEN_VMS
218 const struct attribute_spec vms_attribute_table[];
219 static unsigned int vms_section_type_flags PARAMS ((tree, const char *, int));
220 static void vms_asm_named_section PARAMS ((const char *, unsigned int));
221 static void vms_asm_out_constructor PARAMS ((rtx, int));
222 static void vms_asm_out_destructor PARAMS ((rtx, int));
223 # undef TARGET_ATTRIBUTE_TABLE
224 # define TARGET_ATTRIBUTE_TABLE vms_attribute_table
225 # undef TARGET_SECTION_TYPE_FLAGS
226 # define TARGET_SECTION_TYPE_FLAGS vms_section_type_flags
229 #undef TARGET_IN_SMALL_DATA_P
230 #define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p
231 #undef TARGET_ENCODE_SECTION_INFO
232 #define TARGET_ENCODE_SECTION_INFO alpha_encode_section_info
233 #undef TARGET_STRIP_NAME_ENCODING
234 #define TARGET_STRIP_NAME_ENCODING alpha_strip_name_encoding
236 #if TARGET_ABI_UNICOSMK
237 static void unicosmk_asm_named_section PARAMS ((const char *, unsigned int));
238 static void unicosmk_insert_attributes PARAMS ((tree, tree *));
239 static unsigned int unicosmk_section_type_flags PARAMS ((tree, const char *,
241 static void unicosmk_unique_section PARAMS ((tree, int));
242 # undef TARGET_INSERT_ATTRIBUTES
243 # define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
244 # undef TARGET_SECTION_TYPE_FLAGS
245 # define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
246 # undef TARGET_ASM_UNIQUE_SECTION
247 # define TARGET_ASM_UNIQUE_SECTION unicosmk_unique_section
250 #undef TARGET_ASM_ALIGNED_HI_OP
251 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
252 #undef TARGET_ASM_ALIGNED_DI_OP
253 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
255 /* Default unaligned ops are provided for ELF systems. To get unaligned
256 data for non-ELF systems, we have to turn off auto alignment. */
257 #ifndef OBJECT_FORMAT_ELF
258 #undef TARGET_ASM_UNALIGNED_HI_OP
259 #define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
260 #undef TARGET_ASM_UNALIGNED_SI_OP
261 #define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
262 #undef TARGET_ASM_UNALIGNED_DI_OP
263 #define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
266 #ifdef OBJECT_FORMAT_ELF
267 #undef TARGET_ASM_SELECT_RTX_SECTION
268 #define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section
271 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
272 #define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
274 #undef TARGET_SCHED_ADJUST_COST
275 #define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
276 #undef TARGET_SCHED_ISSUE_RATE
277 #define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
278 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
279 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE \
280 alpha_use_dfa_pipeline_interface
281 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
282 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD \
283 alpha_multipass_dfa_lookahead
285 #undef TARGET_HAVE_TLS
286 #define TARGET_HAVE_TLS HAVE_AS_TLS
288 #undef TARGET_INIT_BUILTINS
289 #define TARGET_INIT_BUILTINS alpha_init_builtins
290 #undef TARGET_EXPAND_BUILTIN
291 #define TARGET_EXPAND_BUILTIN alpha_expand_builtin
293 struct gcc_target targetm = TARGET_INITIALIZER;
295 /* Parse target option strings. */
301 static const struct cpu_table {
302 const char *const name;
303 const enum processor_type processor;
306 #define EV5_MASK (MASK_CPU_EV5)
307 #define EV6_MASK (MASK_CPU_EV6|MASK_BWX|MASK_MAX|MASK_FIX)
308 { "ev4", PROCESSOR_EV4, 0 },
309 { "ev45", PROCESSOR_EV4, 0 },
310 { "21064", PROCESSOR_EV4, 0 },
311 { "ev5", PROCESSOR_EV5, EV5_MASK },
312 { "21164", PROCESSOR_EV5, EV5_MASK },
313 { "ev56", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
314 { "21164a", PROCESSOR_EV5, EV5_MASK|MASK_BWX },
315 { "pca56", PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
316 { "21164PC",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
317 { "21164pc",PROCESSOR_EV5, EV5_MASK|MASK_BWX|MASK_MAX },
318 { "ev6", PROCESSOR_EV6, EV6_MASK },
319 { "21264", PROCESSOR_EV6, EV6_MASK },
320 { "ev67", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
321 { "21264a", PROCESSOR_EV6, EV6_MASK|MASK_CIX },
325 /* Unicos/Mk doesn't have shared libraries. */
326 if (TARGET_ABI_UNICOSMK && flag_pic)
328 warning ("-f%s ignored for Unicos/Mk (not supported)",
329 (flag_pic > 1) ? "PIC" : "pic");
333 /* On Unicos/Mk, the native compiler consistenly generates /d suffices for
334 floating-point instructions. Make that the default for this target. */
335 if (TARGET_ABI_UNICOSMK)
336 alpha_fprm = ALPHA_FPRM_DYN;
338 alpha_fprm = ALPHA_FPRM_NORM;
340 alpha_tp = ALPHA_TP_PROG;
341 alpha_fptm = ALPHA_FPTM_N;
343 /* We cannot use su and sui qualifiers for conversion instructions on
344 Unicos/Mk. I'm not sure if this is due to assembler or hardware
345 limitations. Right now, we issue a warning if -mieee is specified
346 and then ignore it; eventually, we should either get it right or
347 disable the option altogether. */
351 if (TARGET_ABI_UNICOSMK)
352 warning ("-mieee not supported on Unicos/Mk");
355 alpha_tp = ALPHA_TP_INSN;
356 alpha_fptm = ALPHA_FPTM_SU;
360 if (TARGET_IEEE_WITH_INEXACT)
362 if (TARGET_ABI_UNICOSMK)
363 warning ("-mieee-with-inexact not supported on Unicos/Mk");
366 alpha_tp = ALPHA_TP_INSN;
367 alpha_fptm = ALPHA_FPTM_SUI;
373 if (! strcmp (alpha_tp_string, "p"))
374 alpha_tp = ALPHA_TP_PROG;
375 else if (! strcmp (alpha_tp_string, "f"))
376 alpha_tp = ALPHA_TP_FUNC;
377 else if (! strcmp (alpha_tp_string, "i"))
378 alpha_tp = ALPHA_TP_INSN;
380 error ("bad value `%s' for -mtrap-precision switch", alpha_tp_string);
383 if (alpha_fprm_string)
385 if (! strcmp (alpha_fprm_string, "n"))
386 alpha_fprm = ALPHA_FPRM_NORM;
387 else if (! strcmp (alpha_fprm_string, "m"))
388 alpha_fprm = ALPHA_FPRM_MINF;
389 else if (! strcmp (alpha_fprm_string, "c"))
390 alpha_fprm = ALPHA_FPRM_CHOP;
391 else if (! strcmp (alpha_fprm_string,"d"))
392 alpha_fprm = ALPHA_FPRM_DYN;
394 error ("bad value `%s' for -mfp-rounding-mode switch",
398 if (alpha_fptm_string)
400 if (strcmp (alpha_fptm_string, "n") == 0)
401 alpha_fptm = ALPHA_FPTM_N;
402 else if (strcmp (alpha_fptm_string, "u") == 0)
403 alpha_fptm = ALPHA_FPTM_U;
404 else if (strcmp (alpha_fptm_string, "su") == 0)
405 alpha_fptm = ALPHA_FPTM_SU;
406 else if (strcmp (alpha_fptm_string, "sui") == 0)
407 alpha_fptm = ALPHA_FPTM_SUI;
409 error ("bad value `%s' for -mfp-trap-mode switch", alpha_fptm_string);
412 if (alpha_tls_size_string)
414 if (strcmp (alpha_tls_size_string, "16") == 0)
416 else if (strcmp (alpha_tls_size_string, "32") == 0)
418 else if (strcmp (alpha_tls_size_string, "64") == 0)
421 error ("bad value `%s' for -mtls-size switch", alpha_tls_size_string);
425 = TARGET_CPU_DEFAULT & MASK_CPU_EV6 ? PROCESSOR_EV6
426 : (TARGET_CPU_DEFAULT & MASK_CPU_EV5 ? PROCESSOR_EV5 : PROCESSOR_EV4);
428 if (alpha_cpu_string)
430 for (i = 0; cpu_table [i].name; i++)
431 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
433 alpha_cpu = cpu_table [i].processor;
434 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX
435 | MASK_CPU_EV5 | MASK_CPU_EV6);
436 target_flags |= cpu_table [i].flags;
439 if (! cpu_table [i].name)
440 error ("bad value `%s' for -mcpu switch", alpha_cpu_string);
443 if (alpha_tune_string)
445 for (i = 0; cpu_table [i].name; i++)
446 if (! strcmp (alpha_tune_string, cpu_table [i].name))
448 alpha_cpu = cpu_table [i].processor;
451 if (! cpu_table [i].name)
452 error ("bad value `%s' for -mcpu switch", alpha_tune_string);
455 /* Do some sanity checks on the above options. */
457 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
459 warning ("trap mode not supported on Unicos/Mk");
460 alpha_fptm = ALPHA_FPTM_N;
463 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
464 && alpha_tp != ALPHA_TP_INSN && ! TARGET_CPU_EV6)
466 warning ("fp software completion requires -mtrap-precision=i");
467 alpha_tp = ALPHA_TP_INSN;
472 /* Except for EV6 pass 1 (not released), we always have precise
473 arithmetic traps. Which means we can do software completion
474 without minding trap shadows. */
475 alpha_tp = ALPHA_TP_PROG;
478 if (TARGET_FLOAT_VAX)
480 if (alpha_fprm == ALPHA_FPRM_MINF || alpha_fprm == ALPHA_FPRM_DYN)
482 warning ("rounding mode not supported for VAX floats");
483 alpha_fprm = ALPHA_FPRM_NORM;
485 if (alpha_fptm == ALPHA_FPTM_SUI)
487 warning ("trap mode not supported for VAX floats");
488 alpha_fptm = ALPHA_FPTM_SU;
496 if (!alpha_mlat_string)
497 alpha_mlat_string = "L1";
499 if (ISDIGIT ((unsigned char)alpha_mlat_string[0])
500 && (lat = strtol (alpha_mlat_string, &end, 10), *end == '\0'))
502 else if ((alpha_mlat_string[0] == 'L' || alpha_mlat_string[0] == 'l')
503 && ISDIGIT ((unsigned char)alpha_mlat_string[1])
504 && alpha_mlat_string[2] == '\0')
506 static int const cache_latency[][4] =
508 { 3, 30, -1 }, /* ev4 -- Bcache is a guess */
509 { 2, 12, 38 }, /* ev5 -- Bcache from PC164 LMbench numbers */
510 { 3, 12, 30 }, /* ev6 -- Bcache from DS20 LMbench. */
513 lat = alpha_mlat_string[1] - '0';
514 if (lat <= 0 || lat > 3 || cache_latency[alpha_cpu][lat-1] == -1)
516 warning ("L%d cache latency unknown for %s",
517 lat, alpha_cpu_name[alpha_cpu]);
521 lat = cache_latency[alpha_cpu][lat-1];
523 else if (! strcmp (alpha_mlat_string, "main"))
525 /* Most current memories have about 370ns latency. This is
526 a reasonable guess for a fast cpu. */
531 warning ("bad value `%s' for -mmemory-latency", alpha_mlat_string);
535 alpha_memory_latency = lat;
538 /* Default the definition of "small data" to 8 bytes. */
542 /* Infer TARGET_SMALL_DATA from -fpic/-fPIC. */
544 target_flags |= MASK_SMALL_DATA;
545 else if (flag_pic == 2)
546 target_flags &= ~MASK_SMALL_DATA;
548 /* Align labels and loops for optimal branching. */
549 /* ??? Kludge these by not doing anything if we don't optimize and also if
550 we are writing ECOFF symbols to work around a bug in DEC's assembler. */
551 if (optimize > 0 && write_symbols != SDB_DEBUG)
553 if (align_loops <= 0)
555 if (align_jumps <= 0)
558 if (align_functions <= 0)
559 align_functions = 16;
561 /* Acquire a unique set number for our register saves and restores. */
562 alpha_sr_alias_set = new_alias_set ();
564 /* Register variables and functions with the garbage collector. */
566 /* Set up function hooks. */
567 init_machine_status = alpha_init_machine_status;
570 /* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
578 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
580 if ((value & 0xff) != 0 && (value & 0xff) != 0xff)
586 /* Returns 1 if OP is either the constant zero or a register. If a
587 register, it must be in the proper mode unless MODE is VOIDmode. */
590 reg_or_0_operand (op, mode)
592 enum machine_mode mode;
594 return op == CONST0_RTX (mode) || register_operand (op, mode);
597 /* Return 1 if OP is a constant in the range of 0-63 (for a shift) or
601 reg_or_6bit_operand (op, mode)
603 enum machine_mode mode;
605 return ((GET_CODE (op) == CONST_INT
606 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64)
607 || register_operand (op, mode));
611 /* Return 1 if OP is an 8-bit constant or any register. */
614 reg_or_8bit_operand (op, mode)
616 enum machine_mode mode;
618 return ((GET_CODE (op) == CONST_INT
619 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100)
620 || register_operand (op, mode));
623 /* Return 1 if OP is a constant or any register. */
626 reg_or_const_int_operand (op, mode)
628 enum machine_mode mode;
630 return GET_CODE (op) == CONST_INT || register_operand (op, mode);
633 /* Return 1 if OP is an 8-bit constant. */
636 cint8_operand (op, mode)
638 enum machine_mode mode ATTRIBUTE_UNUSED;
640 return ((GET_CODE (op) == CONST_INT
641 && (unsigned HOST_WIDE_INT) INTVAL (op) < 0x100));
644 /* Return 1 if the operand is a valid second operand to an add insn. */
647 add_operand (op, mode)
649 enum machine_mode mode;
651 if (GET_CODE (op) == CONST_INT)
652 /* Constraints I, J, O and P are covered by K. */
653 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'K')
654 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
656 return register_operand (op, mode);
659 /* Return 1 if the operand is a valid second operand to a sign-extending
663 sext_add_operand (op, mode)
665 enum machine_mode mode;
667 if (GET_CODE (op) == CONST_INT)
668 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
669 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'));
671 return reg_not_elim_operand (op, mode);
674 /* Return 1 if OP is the constant 4 or 8. */
677 const48_operand (op, mode)
679 enum machine_mode mode ATTRIBUTE_UNUSED;
681 return (GET_CODE (op) == CONST_INT
682 && (INTVAL (op) == 4 || INTVAL (op) == 8));
685 /* Return 1 if OP is a valid first operand to an AND insn. */
688 and_operand (op, mode)
690 enum machine_mode mode;
692 if (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
693 return (zap_mask (CONST_DOUBLE_LOW (op))
694 && zap_mask (CONST_DOUBLE_HIGH (op)));
696 if (GET_CODE (op) == CONST_INT)
697 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
698 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100
699 || zap_mask (INTVAL (op)));
701 return register_operand (op, mode);
704 /* Return 1 if OP is a valid first operand to an IOR or XOR insn. */
707 or_operand (op, mode)
709 enum machine_mode mode;
711 if (GET_CODE (op) == CONST_INT)
712 return ((unsigned HOST_WIDE_INT) INTVAL (op) < 0x100
713 || (unsigned HOST_WIDE_INT) ~ INTVAL (op) < 0x100);
715 return register_operand (op, mode);
718 /* Return 1 if OP is a constant that is the width, in bits, of an integral
719 mode smaller than DImode. */
722 mode_width_operand (op, mode)
724 enum machine_mode mode ATTRIBUTE_UNUSED;
726 return (GET_CODE (op) == CONST_INT
727 && (INTVAL (op) == 8 || INTVAL (op) == 16
728 || INTVAL (op) == 32 || INTVAL (op) == 64));
731 /* Return 1 if OP is a constant that is the width of an integral machine mode
732 smaller than an integer. */
735 mode_mask_operand (op, mode)
737 enum machine_mode mode ATTRIBUTE_UNUSED;
739 if (GET_CODE (op) == CONST_INT)
741 HOST_WIDE_INT value = INTVAL (op);
747 if (value == 0xffffffff)
752 else if (HOST_BITS_PER_WIDE_INT == 32 && GET_CODE (op) == CONST_DOUBLE)
754 if (CONST_DOUBLE_LOW (op) == 0xffffffff && CONST_DOUBLE_HIGH (op) == 0)
761 /* Return 1 if OP is a multiple of 8 less than 64. */
764 mul8_operand (op, mode)
766 enum machine_mode mode ATTRIBUTE_UNUSED;
768 return (GET_CODE (op) == CONST_INT
769 && (unsigned HOST_WIDE_INT) INTVAL (op) < 64
770 && (INTVAL (op) & 7) == 0);
773 /* Return 1 if OP is the zero constant for MODE. */
776 const0_operand (op, mode)
778 enum machine_mode mode;
780 return op == CONST0_RTX (mode);
783 /* Return 1 if OP is a hard floating-point register. */
786 hard_fp_register_operand (op, mode)
788 enum machine_mode mode;
790 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
793 if (GET_CODE (op) == SUBREG)
794 op = SUBREG_REG (op);
795 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == FLOAT_REGS;
798 /* Return 1 if OP is a hard general register. */
801 hard_int_register_operand (op, mode)
803 enum machine_mode mode;
805 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
808 if (GET_CODE (op) == SUBREG)
809 op = SUBREG_REG (op);
810 return GET_CODE (op) == REG && REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS;
813 /* Return 1 if OP is a register or a constant integer. */
817 reg_or_cint_operand (op, mode)
819 enum machine_mode mode;
821 return (GET_CODE (op) == CONST_INT
822 || register_operand (op, mode));
825 /* Return 1 if OP is something that can be reloaded into a register;
826 if it is a MEM, it need not be valid. */
829 some_operand (op, mode)
831 enum machine_mode mode;
833 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
836 switch (GET_CODE (op))
850 return some_operand (SUBREG_REG (op), VOIDmode);
859 /* Likewise, but don't accept constants. */
862 some_ni_operand (op, mode)
864 enum machine_mode mode;
866 if (GET_MODE (op) != mode && mode != VOIDmode)
869 if (GET_CODE (op) == SUBREG)
870 op = SUBREG_REG (op);
872 return (GET_CODE (op) == REG || GET_CODE (op) == MEM);
875 /* Return 1 if OP is a valid operand for the source of a move insn. */
878 input_operand (op, mode)
880 enum machine_mode mode;
882 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
885 if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_MODE (op) != mode)
888 switch (GET_CODE (op))
893 if (TARGET_EXPLICIT_RELOCS)
895 /* We don't split symbolic operands into something unintelligable
896 until after reload, but we do not wish non-small, non-global
897 symbolic operands to be reconstructed from their high/lo_sum
899 return (small_symbolic_operand (op, mode)
900 || global_symbolic_operand (op, mode)
901 || gotdtp_symbolic_operand (op, mode)
902 || gottp_symbolic_operand (op, mode));
905 /* This handles both the Windows/NT and OSF cases. */
906 return mode == ptr_mode || mode == DImode;
909 return (TARGET_EXPLICIT_RELOCS
910 && local_symbolic_operand (XEXP (op, 0), mode));
917 if (register_operand (op, mode))
919 /* ... fall through ... */
921 return ((TARGET_BWX || (mode != HImode && mode != QImode))
922 && general_operand (op, mode));
926 return op == CONST0_RTX (mode);
929 return mode == QImode || mode == HImode || add_operand (op, mode);
941 /* Return 1 if OP is a SYMBOL_REF for a function known to be in this
942 file, and in the same section as the current function. */
945 current_file_function_operand (op, mode)
947 enum machine_mode mode ATTRIBUTE_UNUSED;
949 if (GET_CODE (op) != SYMBOL_REF)
952 /* Easy test for recursion. */
953 if (op == XEXP (DECL_RTL (current_function_decl), 0))
956 /* Otherwise, we need the DECL for the SYMBOL_REF, which we can't get.
957 So SYMBOL_REF_FLAG has been declared to imply that the function is
958 in the default text section. So we must also check that the current
959 function is also in the text section. */
960 if (SYMBOL_REF_FLAG (op) && decl_in_text_section (current_function_decl))
966 /* Return 1 if OP is a SYMBOL_REF for which we can make a call via bsr. */
969 direct_call_operand (op, mode)
971 enum machine_mode mode;
973 /* Must be defined in this file. */
974 if (! current_file_function_operand (op, mode))
977 /* If profiling is implemented via linker tricks, we can't jump
978 to the nogp alternate entry point. */
979 /* ??? TARGET_PROFILING_NEEDS_GP isn't really the right test,
980 but is approximately correct for the OSF ABIs. Don't know
981 what to do for VMS, NT, or UMK. */
982 if (! TARGET_PROFILING_NEEDS_GP
983 && ! current_function_profile)
989 /* Return true if OP is a LABEL_REF, or SYMBOL_REF or CONST referencing
990 a (non-tls) variable known to be defined in this file. */
993 local_symbolic_operand (op, mode)
995 enum machine_mode mode;
999 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1002 if (GET_CODE (op) == LABEL_REF)
1005 if (GET_CODE (op) == CONST
1006 && GET_CODE (XEXP (op, 0)) == PLUS
1007 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1008 op = XEXP (XEXP (op, 0), 0);
1010 if (GET_CODE (op) != SYMBOL_REF)
1013 /* Easy pickings. */
1014 if (CONSTANT_POOL_ADDRESS_P (op) || STRING_POOL_ADDRESS_P (op))
1017 /* ??? SYMBOL_REF_FLAG is set for local function symbols, but we
1018 run into problems with the rtl inliner in that the symbol was
1019 once external, but is local after inlining, which results in
1020 unrecognizable insns. */
1024 /* If @[LS], then alpha_encode_section_info sez it's local. */
1025 if (str[0] == '@' && (str[1] == 'L' || str[1] == 'S'))
1028 /* If *$, then ASM_GENERATE_INTERNAL_LABEL sez it's local. */
1029 if (str[0] == '*' && str[1] == '$')
1035 /* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1036 known to be defined in this file in the small data area. */
1039 small_symbolic_operand (op, mode)
1041 enum machine_mode mode ATTRIBUTE_UNUSED;
1045 if (! TARGET_SMALL_DATA)
1048 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1051 if (GET_CODE (op) == CONST
1052 && GET_CODE (XEXP (op, 0)) == PLUS
1053 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1054 op = XEXP (XEXP (op, 0), 0);
1056 if (GET_CODE (op) != SYMBOL_REF)
1059 if (CONSTANT_POOL_ADDRESS_P (op))
1060 return GET_MODE_SIZE (get_pool_mode (op)) <= (unsigned) g_switch_value;
1064 return str[0] == '@' && str[1] == 'S';
1068 /* Return true if OP is a SYMBOL_REF or CONST referencing a variable
1069 not known (or known not) to be defined in this file. */
1072 global_symbolic_operand (op, mode)
1074 enum machine_mode mode;
1078 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1081 if (GET_CODE (op) == CONST
1082 && GET_CODE (XEXP (op, 0)) == PLUS
1083 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
1084 op = XEXP (XEXP (op, 0), 0);
1086 if (GET_CODE (op) != SYMBOL_REF)
1089 if (local_symbolic_operand (op, mode))
1092 /* Also verify that it's not a TLS symbol. */
1094 return str[0] != '%' && str[0] != '@';
1097 /* Return 1 if OP is a valid operand for the MEM of a CALL insn. */
1100 call_operand (op, mode)
1102 enum machine_mode mode;
1107 if (GET_CODE (op) == REG)
1111 /* Disallow virtual registers to cope with pathalogical test cases
1112 such as compile/930117-1.c in which the virtual reg decomposes
1113 to the frame pointer. Which is a hard reg that is not $27. */
1114 return (REGNO (op) == 27 || REGNO (op) > LAST_VIRTUAL_REGISTER);
1119 if (TARGET_ABI_UNICOSMK)
1121 if (GET_CODE (op) == SYMBOL_REF)
1127 /* Returns 1 if OP is a symbolic operand, i.e. a symbol_ref or a label_ref,
1128 possibly with an offset. */
1131 symbolic_operand (op, mode)
1133 enum machine_mode mode;
1135 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1137 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1139 if (GET_CODE (op) == CONST
1140 && GET_CODE (XEXP (op,0)) == PLUS
1141 && GET_CODE (XEXP (XEXP (op,0), 0)) == SYMBOL_REF
1142 && GET_CODE (XEXP (XEXP (op,0), 1)) == CONST_INT)
1147 /* Return true if OP is valid for a particular TLS relocation. */
1150 tls_symbolic_operand_1 (op, mode, size, unspec)
1152 enum machine_mode mode;
1158 if (mode != VOIDmode && GET_MODE (op) != VOIDmode && mode != GET_MODE (op))
1161 if (GET_CODE (op) != CONST)
1165 if (GET_CODE (op) != UNSPEC || XINT (op, 1) != unspec)
1167 op = XVECEXP (op, 0, 0);
1169 if (GET_CODE (op) != SYMBOL_REF)
1178 else if (str[0] == '@')
1180 if (alpha_tls_size > size)
1186 letter = (unspec == UNSPEC_DTPREL ? 'D' : 'T');
1188 return str[1] == letter;
1191 /* Return true if OP is valid for 16-bit DTP relative relocations. */
1194 dtp16_symbolic_operand (op, mode)
1196 enum machine_mode mode;
1198 return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_DTPREL);
1201 /* Return true if OP is valid for 32-bit DTP relative relocations. */
1204 dtp32_symbolic_operand (op, mode)
1206 enum machine_mode mode;
1208 return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_DTPREL);
1211 /* Return true if OP is valid for 64-bit DTP relative relocations. */
1214 gotdtp_symbolic_operand (op, mode)
1216 enum machine_mode mode;
1218 return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_DTPREL);
1221 /* Return true if OP is valid for 16-bit TP relative relocations. */
1224 tp16_symbolic_operand (op, mode)
1226 enum machine_mode mode;
1228 return tls_symbolic_operand_1 (op, mode, 16, UNSPEC_TPREL);
1231 /* Return true if OP is valid for 32-bit TP relative relocations. */
1234 tp32_symbolic_operand (op, mode)
1236 enum machine_mode mode;
1238 return tls_symbolic_operand_1 (op, mode, 32, UNSPEC_TPREL);
1241 /* Return true if OP is valid for 64-bit TP relative relocations. */
1244 gottp_symbolic_operand (op, mode)
1246 enum machine_mode mode;
1248 return tls_symbolic_operand_1 (op, mode, 64, UNSPEC_TPREL);
1251 /* Return 1 if OP is a valid Alpha comparison operator. Here we know which
1252 comparisons are valid in which insn. */
1255 alpha_comparison_operator (op, mode)
1257 enum machine_mode mode;
1259 enum rtx_code code = GET_CODE (op);
1261 if (mode != GET_MODE (op) && mode != VOIDmode)
1264 return (code == EQ || code == LE || code == LT
1265 || code == LEU || code == LTU);
1268 /* Return 1 if OP is a valid Alpha comparison operator against zero.
1269 Here we know which comparisons are valid in which insn. */
1272 alpha_zero_comparison_operator (op, mode)
1274 enum machine_mode mode;
1276 enum rtx_code code = GET_CODE (op);
1278 if (mode != GET_MODE (op) && mode != VOIDmode)
1281 return (code == EQ || code == NE || code == LE || code == LT
1282 || code == LEU || code == LTU);
1285 /* Return 1 if OP is a valid Alpha swapped comparison operator. */
1288 alpha_swapped_comparison_operator (op, mode)
1290 enum machine_mode mode;
1292 enum rtx_code code = GET_CODE (op);
1294 if ((mode != GET_MODE (op) && mode != VOIDmode)
1295 || GET_RTX_CLASS (code) != '<')
1298 code = swap_condition (code);
1299 return (code == EQ || code == LE || code == LT
1300 || code == LEU || code == LTU);
1303 /* Return 1 if OP is a signed comparison operation. */
1306 signed_comparison_operator (op, mode)
1308 enum machine_mode mode ATTRIBUTE_UNUSED;
1310 enum rtx_code code = GET_CODE (op);
1312 if (mode != GET_MODE (op) && mode != VOIDmode)
1315 return (code == EQ || code == NE
1316 || code == LE || code == LT
1317 || code == GE || code == GT);
1320 /* Return 1 if OP is a valid Alpha floating point comparison operator.
1321 Here we know which comparisons are valid in which insn. */
1324 alpha_fp_comparison_operator (op, mode)
1326 enum machine_mode mode;
1328 enum rtx_code code = GET_CODE (op);
1330 if (mode != GET_MODE (op) && mode != VOIDmode)
1333 return (code == EQ || code == LE || code == LT || code == UNORDERED);
1336 /* Return 1 if this is a divide or modulus operator. */
1339 divmod_operator (op, mode)
1341 enum machine_mode mode ATTRIBUTE_UNUSED;
1343 switch (GET_CODE (op))
1345 case DIV: case MOD: case UDIV: case UMOD:
1355 /* Return 1 if this memory address is a known aligned register plus
1356 a constant. It must be a valid address. This means that we can do
1357 this as an aligned reference plus some offset.
1359 Take into account what reload will do. */
1362 aligned_memory_operand (op, mode)
1364 enum machine_mode mode;
1368 if (reload_in_progress)
1371 if (GET_CODE (tmp) == SUBREG)
1372 tmp = SUBREG_REG (tmp);
1373 if (GET_CODE (tmp) == REG
1374 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1376 op = reg_equiv_memory_loc[REGNO (tmp)];
1382 if (GET_CODE (op) != MEM
1383 || GET_MODE (op) != mode)
1387 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1388 sorts of constructs. Dig for the real base register. */
1389 if (reload_in_progress
1390 && GET_CODE (op) == PLUS
1391 && GET_CODE (XEXP (op, 0)) == PLUS)
1392 base = XEXP (XEXP (op, 0), 0);
1395 if (! memory_address_p (mode, op))
1397 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1400 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) >= 32);
1403 /* Similar, but return 1 if OP is a MEM which is not alignable. */
1406 unaligned_memory_operand (op, mode)
1408 enum machine_mode mode;
1412 if (reload_in_progress)
1415 if (GET_CODE (tmp) == SUBREG)
1416 tmp = SUBREG_REG (tmp);
1417 if (GET_CODE (tmp) == REG
1418 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1420 op = reg_equiv_memory_loc[REGNO (tmp)];
1426 if (GET_CODE (op) != MEM
1427 || GET_MODE (op) != mode)
1431 /* LEGITIMIZE_RELOAD_ADDRESS creates (plus (plus reg const_hi) const_lo)
1432 sorts of constructs. Dig for the real base register. */
1433 if (reload_in_progress
1434 && GET_CODE (op) == PLUS
1435 && GET_CODE (XEXP (op, 0)) == PLUS)
1436 base = XEXP (XEXP (op, 0), 0);
1439 if (! memory_address_p (mode, op))
1441 base = (GET_CODE (op) == PLUS ? XEXP (op, 0) : op);
1444 return (GET_CODE (base) == REG && REGNO_POINTER_ALIGN (REGNO (base)) < 32);
1447 /* Return 1 if OP is either a register or an unaligned memory location. */
1450 reg_or_unaligned_mem_operand (op, mode)
1452 enum machine_mode mode;
1454 return register_operand (op, mode) || unaligned_memory_operand (op, mode);
1457 /* Return 1 if OP is any memory location. During reload a pseudo matches. */
1460 any_memory_operand (op, mode)
1462 enum machine_mode mode ATTRIBUTE_UNUSED;
1464 return (GET_CODE (op) == MEM
1465 || (GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == REG)
1466 || (reload_in_progress && GET_CODE (op) == REG
1467 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1468 || (reload_in_progress && GET_CODE (op) == SUBREG
1469 && GET_CODE (SUBREG_REG (op)) == REG
1470 && REGNO (SUBREG_REG (op)) >= FIRST_PSEUDO_REGISTER));
1473 /* Returns 1 if OP is not an eliminable register.
1475 This exists to cure a pathological abort in the s8addq (et al) patterns,
1477 long foo () { long t; bar(); return (long) &t * 26107; }
1479 which run afoul of a hack in reload to cure a (presumably) similar
1480 problem with lea-type instructions on other targets. But there is
1481 one of us and many of them, so work around the problem by selectively
1482 preventing combine from making the optimization. */
1485 reg_not_elim_operand (op, mode)
1487 enum machine_mode mode;
1490 if (GET_CODE (op) == SUBREG)
1491 inner = SUBREG_REG (op);
1492 if (inner == frame_pointer_rtx || inner == arg_pointer_rtx)
1495 return register_operand (op, mode);
1498 /* Return 1 is OP is a memory location that is not a reference (using
1499 an AND) to an unaligned location. Take into account what reload
1503 normal_memory_operand (op, mode)
1505 enum machine_mode mode ATTRIBUTE_UNUSED;
1507 if (reload_in_progress)
1510 if (GET_CODE (tmp) == SUBREG)
1511 tmp = SUBREG_REG (tmp);
1512 if (GET_CODE (tmp) == REG
1513 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
1515 op = reg_equiv_memory_loc[REGNO (tmp)];
1517 /* This may not have been assigned an equivalent address if it will
1518 be eliminated. In that case, it doesn't matter what we do. */
1524 return GET_CODE (op) == MEM && GET_CODE (XEXP (op, 0)) != AND;
1527 /* Accept a register, but not a subreg of any kind. This allows us to
1528 avoid pathological cases in reload wrt data movement common in
1529 int->fp conversion. */
1532 reg_no_subreg_operand (op, mode)
1534 enum machine_mode mode;
1536 if (GET_CODE (op) != REG)
1538 return register_operand (op, mode);
1541 /* Recognize an addition operation that includes a constant. Used to
1542 convince reload to canonize (plus (plus reg c1) c2) during register
1546 addition_operation (op, mode)
1548 enum machine_mode mode;
1550 if (GET_MODE (op) != mode && mode != VOIDmode)
1552 if (GET_CODE (op) == PLUS
1553 && register_operand (XEXP (op, 0), mode)
1554 && GET_CODE (XEXP (op, 1)) == CONST_INT
1555 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op, 1)), 'K'))
1560 /* Implements CONST_OK_FOR_LETTER_P. Return true if the value matches
1561 the range defined for C in [I-P]. */
1564 alpha_const_ok_for_letter_p (value, c)
1565 HOST_WIDE_INT value;
1571 /* An unsigned 8 bit constant. */
1572 return (unsigned HOST_WIDE_INT) value < 0x100;
1574 /* The constant zero. */
1577 /* A signed 16 bit constant. */
1578 return (unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000;
1580 /* A shifted signed 16 bit constant appropriate for LDAH. */
1581 return ((value & 0xffff) == 0
1582 && ((value) >> 31 == -1 || value >> 31 == 0));
1584 /* A constant that can be AND'ed with using a ZAP insn. */
1585 return zap_mask (value);
1587 /* A complemented unsigned 8 bit constant. */
1588 return (unsigned HOST_WIDE_INT) (~ value) < 0x100;
1590 /* A negated unsigned 8 bit constant. */
1591 return (unsigned HOST_WIDE_INT) (- value) < 0x100;
1593 /* The constant 1, 2 or 3. */
1594 return value == 1 || value == 2 || value == 3;
1601 /* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1602 matches for C in [GH]. */
1605 alpha_const_double_ok_for_letter_p (value, c)
1612 /* The floating point zero constant. */
1613 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1614 && value == CONST0_RTX (GET_MODE (value)));
1617 /* A valid operand of a ZAP insn. */
1618 return (GET_MODE (value) == VOIDmode
1619 && zap_mask (CONST_DOUBLE_LOW (value))
1620 && zap_mask (CONST_DOUBLE_HIGH (value)));
1627 /* Implements CONST_DOUBLE_OK_FOR_LETTER_P. Return true if VALUE
1631 alpha_extra_constraint (value, c)
1638 return normal_memory_operand (value, VOIDmode);
1640 return direct_call_operand (value, Pmode);
1642 return (GET_CODE (value) == CONST_INT
1643 && (unsigned HOST_WIDE_INT) INTVAL (value) < 64);
1645 return GET_CODE (value) == HIGH;
1647 return TARGET_ABI_UNICOSMK && symbolic_operand (value, VOIDmode);
1649 return (GET_CODE (value) == CONST_VECTOR
1650 && value == CONST0_RTX (GET_MODE (value)));
1656 /* Return 1 if this function can directly return via $26. */
1661 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK
1663 && alpha_sa_size () == 0
1664 && get_frame_size () == 0
1665 && current_function_outgoing_args_size == 0
1666 && current_function_pretend_args_size == 0);
1669 /* Return the ADDR_VEC associated with a tablejump insn. */
1672 alpha_tablejump_addr_vec (insn)
1677 tmp = JUMP_LABEL (insn);
1680 tmp = NEXT_INSN (tmp);
1683 if (GET_CODE (tmp) == JUMP_INSN
1684 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
1685 return PATTERN (tmp);
1689 /* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
1692 alpha_tablejump_best_label (insn)
1695 rtx jump_table = alpha_tablejump_addr_vec (insn);
1696 rtx best_label = NULL_RTX;
1698 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
1699 there for edge frequency counts from profile data. */
1703 int n_labels = XVECLEN (jump_table, 1);
1704 int best_count = -1;
1707 for (i = 0; i < n_labels; i++)
1711 for (j = i + 1; j < n_labels; j++)
1712 if (XEXP (XVECEXP (jump_table, 1, i), 0)
1713 == XEXP (XVECEXP (jump_table, 1, j), 0))
1716 if (count > best_count)
1717 best_count = count, best_label = XVECEXP (jump_table, 1, i);
1721 return best_label ? best_label : const0_rtx;
1724 /* Return the TLS model to use for SYMBOL. */
1726 static enum tls_model
1727 tls_symbolic_operand_type (symbol)
1732 if (GET_CODE (symbol) != SYMBOL_REF)
1734 str = XSTR (symbol, 0);
1738 /* ??? Be prepared for -ftls-model=local-dynamic. Perhaps we shouldn't
1739 have separately encoded local-ness. On well, maybe the user will use
1740 attribute visibility next time. At least we don't crash... */
1741 if (str[1] == 'G' || str[1] == 'D')
1742 return TLS_MODEL_GLOBAL_DYNAMIC;
1744 return TLS_MODEL_INITIAL_EXEC;
1746 else if (str[0] == '@')
1750 /* Local dynamic is a waste if we're not going to combine
1751 the __tls_get_addr calls. So avoid it if not optimizing. */
1753 return TLS_MODEL_LOCAL_DYNAMIC;
1755 return TLS_MODEL_GLOBAL_DYNAMIC;
1759 /* 64-bit local exec is the same as initial exec except without
1760 the dynamic relocation. In either case we use a got entry. */
1761 if (alpha_tls_size == 64)
1762 return TLS_MODEL_INITIAL_EXEC;
1764 return TLS_MODEL_LOCAL_EXEC;
1772 /* Return true if the function DECL will be placed in the default text
1774 /* ??? Ideally we'd be able to always move from a SYMBOL_REF back to the
1775 decl, as that would allow us to determine if two functions are in the
1776 same section, which is what we really want to know. */
1779 decl_in_text_section (decl)
1782 return (DECL_SECTION_NAME (decl) == NULL_TREE
1783 && ! (flag_function_sections
1784 || (targetm.have_named_sections
1785 && DECL_ONE_ONLY (decl))));
1788 /* Return true if EXP should be placed in the small data section. */
1791 alpha_in_small_data_p (exp)
1794 /* We want to merge strings, so we never consider them small data. */
1795 if (TREE_CODE (exp) == STRING_CST)
1798 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
1800 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
1801 if (strcmp (section, ".sdata") == 0
1802 || strcmp (section, ".sbss") == 0)
1807 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
1809 /* If this is an incomplete type with size 0, then we can't put it
1810 in sdata because it might be too big when completed. */
1811 if (size > 0 && size <= g_switch_value)
1818 /* If we are referencing a function that is static, make the SYMBOL_REF
1819 special. We use this to see indicate we can branch to this function
1820 without setting PV or restoring GP.
1822 If this is a variable that is known to be defined locally, add "@v"
1823 to the name. If in addition the variable is to go in .sdata/.sbss,
1824 then add "@s" instead. */
1827 alpha_encode_section_info (decl, first)
1829 int first ATTRIBUTE_UNUSED;
1831 const char *symbol_str;
1836 rtl = DECL_P (decl) ? DECL_RTL (decl) : TREE_CST_RTL (decl);
1838 /* Careful not to prod global register variables. */
1839 if (GET_CODE (rtl) != MEM)
1841 symbol = XEXP (rtl, 0);
1842 if (GET_CODE (symbol) != SYMBOL_REF)
1845 if (TREE_CODE (decl) == FUNCTION_DECL)
1847 /* We mark public functions once they are emitted; otherwise we
1848 don't know that they exist in this unit of translation. */
1849 if (TREE_PUBLIC (decl))
1852 /* Do not mark functions that are not in .text; otherwise we
1853 don't know that they are near enough for a direct branch. */
1854 if (! decl_in_text_section (decl))
1857 SYMBOL_REF_FLAG (symbol) = 1;
1861 /* Early out if we're not going to do anything with this data. */
1862 if (! TARGET_EXPLICIT_RELOCS)
1865 symbol_str = XSTR (symbol, 0);
1867 /* A variable is considered "local" if it is defined in this module. */
1868 is_local = (*targetm.binds_local_p) (decl);
1870 /* Care for TLS variables. */
1871 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
1873 enum tls_model kind;
1877 kind = TLS_MODEL_LOCAL_EXEC;
1879 kind = TLS_MODEL_INITIAL_EXEC;
1882 kind = TLS_MODEL_LOCAL_DYNAMIC;
1884 kind = TLS_MODEL_GLOBAL_DYNAMIC;
1885 if (kind < flag_tls_default)
1886 kind = flag_tls_default;
1890 case TLS_MODEL_GLOBAL_DYNAMIC:
1893 case TLS_MODEL_LOCAL_DYNAMIC:
1896 case TLS_MODEL_INITIAL_EXEC:
1897 case TLS_MODEL_LOCAL_EXEC:
1904 /* Determine if DECL will wind up in .sdata/.sbss. */
1905 if (alpha_in_small_data_p (decl))
1911 /* Finally, encode this into the symbol string. */
1917 if (symbol_str[0] == (is_local ? '@' : '%'))
1919 if (symbol_str[1] == encoding)
1924 len = strlen (symbol_str) + 1;
1925 newstr = alloca (len + 2);
1927 newstr[0] = (is_local ? '@' : '%');
1928 newstr[1] = encoding;
1929 memcpy (newstr + 2, symbol_str, len);
1931 XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 2 - 1);
1935 /* Undo the effects of the above. */
1938 alpha_strip_name_encoding (str)
1941 if (str[0] == '@' || str[0] == '%')
1948 #if TARGET_ABI_OPEN_VMS
1950 alpha_linkage_symbol_p (symname)
1951 const char *symname;
1953 int symlen = strlen (symname);
1956 return strcmp (&symname [symlen - 4], "..lk") == 0;
1961 #define LINKAGE_SYMBOL_REF_P(X) \
1962 ((GET_CODE (X) == SYMBOL_REF \
1963 && alpha_linkage_symbol_p (XSTR (X, 0))) \
1964 || (GET_CODE (X) == CONST \
1965 && GET_CODE (XEXP (X, 0)) == PLUS \
1966 && GET_CODE (XEXP (XEXP (X, 0), 0)) == SYMBOL_REF \
1967 && alpha_linkage_symbol_p (XSTR (XEXP (XEXP (X, 0), 0), 0))))
1970 /* legitimate_address_p recognizes an RTL expression that is a valid
1971 memory address for an instruction. The MODE argument is the
1972 machine mode for the MEM expression that wants to use this address.
1974 For Alpha, we have either a constant address or the sum of a
1975 register and a constant address, or just a register. For DImode,
1976 any of those forms can be surrounded with an AND that clear the
1977 low-order three bits; this is an "unaligned" access. */
1980 alpha_legitimate_address_p (mode, x, strict)
1981 enum machine_mode mode;
1985 /* If this is an ldq_u type address, discard the outer AND. */
1987 && GET_CODE (x) == AND
1988 && GET_CODE (XEXP (x, 1)) == CONST_INT
1989 && INTVAL (XEXP (x, 1)) == -8)
1992 /* Discard non-paradoxical subregs. */
1993 if (GET_CODE (x) == SUBREG
1994 && (GET_MODE_SIZE (GET_MODE (x))
1995 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
1998 /* Unadorned general registers are valid. */
2001 ? STRICT_REG_OK_FOR_BASE_P (x)
2002 : NONSTRICT_REG_OK_FOR_BASE_P (x)))
2005 /* Constant addresses (i.e. +/- 32k) are valid. */
2006 if (CONSTANT_ADDRESS_P (x))
2009 #if TARGET_ABI_OPEN_VMS
2010 if (LINKAGE_SYMBOL_REF_P (x))
2014 /* Register plus a small constant offset is valid. */
2015 if (GET_CODE (x) == PLUS)
2017 rtx ofs = XEXP (x, 1);
2020 /* Discard non-paradoxical subregs. */
2021 if (GET_CODE (x) == SUBREG
2022 && (GET_MODE_SIZE (GET_MODE (x))
2023 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2029 && NONSTRICT_REG_OK_FP_BASE_P (x)
2030 && GET_CODE (ofs) == CONST_INT)
2033 ? STRICT_REG_OK_FOR_BASE_P (x)
2034 : NONSTRICT_REG_OK_FOR_BASE_P (x))
2035 && CONSTANT_ADDRESS_P (ofs))
2038 else if (GET_CODE (x) == ADDRESSOF
2039 && GET_CODE (ofs) == CONST_INT)
2043 /* If we're managing explicit relocations, LO_SUM is valid, as
2044 are small data symbols. */
2045 else if (TARGET_EXPLICIT_RELOCS)
2047 if (small_symbolic_operand (x, Pmode))
2050 if (GET_CODE (x) == LO_SUM)
2052 rtx ofs = XEXP (x, 1);
2055 /* Discard non-paradoxical subregs. */
2056 if (GET_CODE (x) == SUBREG
2057 && (GET_MODE_SIZE (GET_MODE (x))
2058 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2061 /* Must have a valid base register. */
2064 ? STRICT_REG_OK_FOR_BASE_P (x)
2065 : NONSTRICT_REG_OK_FOR_BASE_P (x))))
2068 /* The symbol must be local. */
2069 if (local_symbolic_operand (ofs, Pmode)
2070 || dtp32_symbolic_operand (ofs, Pmode)
2071 || tp32_symbolic_operand (ofs, Pmode))
2079 /* Try machine-dependent ways of modifying an illegitimate address
2080 to be legitimate. If we find one, return the new, valid address. */
2083 alpha_legitimize_address (x, scratch, mode)
2086 enum machine_mode mode ATTRIBUTE_UNUSED;
2088 HOST_WIDE_INT addend;
2090 /* If the address is (plus reg const_int) and the CONST_INT is not a
2091 valid offset, compute the high part of the constant and add it to
2092 the register. Then our address is (plus temp low-part-const). */
2093 if (GET_CODE (x) == PLUS
2094 && GET_CODE (XEXP (x, 0)) == REG
2095 && GET_CODE (XEXP (x, 1)) == CONST_INT
2096 && ! CONSTANT_ADDRESS_P (XEXP (x, 1)))
2098 addend = INTVAL (XEXP (x, 1));
2103 /* If the address is (const (plus FOO const_int)), find the low-order
2104 part of the CONST_INT. Then load FOO plus any high-order part of the
2105 CONST_INT into a register. Our address is (plus reg low-part-const).
2106 This is done to reduce the number of GOT entries. */
2108 && GET_CODE (x) == CONST
2109 && GET_CODE (XEXP (x, 0)) == PLUS
2110 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2112 addend = INTVAL (XEXP (XEXP (x, 0), 1));
2113 x = force_reg (Pmode, XEXP (XEXP (x, 0), 0));
2117 /* If we have a (plus reg const), emit the load as in (2), then add
2118 the two registers, and finally generate (plus reg low-part-const) as
2121 && GET_CODE (x) == PLUS
2122 && GET_CODE (XEXP (x, 0)) == REG
2123 && GET_CODE (XEXP (x, 1)) == CONST
2124 && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
2125 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)
2127 addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1));
2128 x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0),
2129 XEXP (XEXP (XEXP (x, 1), 0), 0),
2130 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2134 /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */
2135 if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode))
2137 rtx r0, r16, eqv, tga, tp, insn, dest, seq;
2139 switch (tls_symbolic_operand_type (x))
2141 case TLS_MODEL_GLOBAL_DYNAMIC:
2144 r0 = gen_rtx_REG (Pmode, 0);
2145 r16 = gen_rtx_REG (Pmode, 16);
2146 tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2147 dest = gen_reg_rtx (Pmode);
2148 seq = GEN_INT (alpha_next_sequence_number++);
2150 emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq));
2151 insn = gen_call_value_osf_tlsgd (r0, tga, seq);
2152 insn = emit_call_insn (insn);
2153 CONST_OR_PURE_CALL_P (insn) = 1;
2154 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2156 insn = get_insns ();
2159 emit_libcall_block (insn, dest, r0, x);
2162 case TLS_MODEL_LOCAL_DYNAMIC:
2165 r0 = gen_rtx_REG (Pmode, 0);
2166 r16 = gen_rtx_REG (Pmode, 16);
2167 tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr");
2168 scratch = gen_reg_rtx (Pmode);
2169 seq = GEN_INT (alpha_next_sequence_number++);
2171 emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq));
2172 insn = gen_call_value_osf_tlsldm (r0, tga, seq);
2173 insn = emit_call_insn (insn);
2174 CONST_OR_PURE_CALL_P (insn) = 1;
2175 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
2177 insn = get_insns ();
2180 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2181 UNSPEC_TLSLDM_CALL);
2182 emit_libcall_block (insn, scratch, r0, eqv);
2184 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL);
2185 eqv = gen_rtx_CONST (Pmode, eqv);
2187 if (alpha_tls_size == 64)
2189 dest = gen_reg_rtx (Pmode);
2190 emit_insn (gen_rtx_SET (VOIDmode, dest, eqv));
2191 emit_insn (gen_adddi3 (dest, dest, scratch));
2194 if (alpha_tls_size == 32)
2196 insn = gen_rtx_HIGH (Pmode, eqv);
2197 insn = gen_rtx_PLUS (Pmode, scratch, insn);
2198 scratch = gen_reg_rtx (Pmode);
2199 emit_insn (gen_rtx_SET (VOIDmode, scratch, insn));
2201 return gen_rtx_LO_SUM (Pmode, scratch, eqv);
2203 case TLS_MODEL_INITIAL_EXEC:
2204 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2205 eqv = gen_rtx_CONST (Pmode, eqv);
2206 tp = gen_reg_rtx (Pmode);
2207 scratch = gen_reg_rtx (Pmode);
2208 dest = gen_reg_rtx (Pmode);
2210 emit_insn (gen_load_tp (tp));
2211 emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv));
2212 emit_insn (gen_adddi3 (dest, tp, scratch));
2215 case TLS_MODEL_LOCAL_EXEC:
2216 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
2217 eqv = gen_rtx_CONST (Pmode, eqv);
2218 tp = gen_reg_rtx (Pmode);
2220 emit_insn (gen_load_tp (tp));
2221 if (alpha_tls_size == 32)
2223 insn = gen_rtx_HIGH (Pmode, eqv);
2224 insn = gen_rtx_PLUS (Pmode, tp, insn);
2225 tp = gen_reg_rtx (Pmode);
2226 emit_insn (gen_rtx_SET (VOIDmode, tp, insn));
2228 return gen_rtx_LO_SUM (Pmode, tp, eqv);
2231 if (local_symbolic_operand (x, Pmode))
2233 if (small_symbolic_operand (x, Pmode))
2237 if (!no_new_pseudos)
2238 scratch = gen_reg_rtx (Pmode);
2239 emit_insn (gen_rtx_SET (VOIDmode, scratch,
2240 gen_rtx_HIGH (Pmode, x)));
2241 return gen_rtx_LO_SUM (Pmode, scratch, x);
2250 HOST_WIDE_INT low, high;
2252 low = ((addend & 0xffff) ^ 0x8000) - 0x8000;
2254 high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000;
2258 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend),
2259 (no_new_pseudos ? scratch : NULL_RTX),
2260 1, OPTAB_LIB_WIDEN);
2262 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
2263 (no_new_pseudos ? scratch : NULL_RTX),
2264 1, OPTAB_LIB_WIDEN);
2266 return plus_constant (x, low);
2270 /* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a
2271 small symbolic operand until after reload. At which point we need
2272 to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref))
2273 so that sched2 has the proper dependency information. */
2276 some_small_symbolic_operand (x, mode)
2278 enum machine_mode mode ATTRIBUTE_UNUSED;
2280 return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);
2284 some_small_symbolic_operand_1 (px, data)
2286 void *data ATTRIBUTE_UNUSED;
2290 /* Don't re-split. */
2291 if (GET_CODE (x) == LO_SUM)
2294 return small_symbolic_operand (x, Pmode) != 0;
2298 split_small_symbolic_operand (x)
2302 for_each_rtx (&x, split_small_symbolic_operand_1, NULL);
2307 split_small_symbolic_operand_1 (px, data)
2309 void *data ATTRIBUTE_UNUSED;
2313 /* Don't re-split. */
2314 if (GET_CODE (x) == LO_SUM)
2317 if (small_symbolic_operand (x, Pmode))
2319 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
2327 /* Try a machine-dependent way of reloading an illegitimate address
2328 operand. If we find one, push the reload and return the new rtx. */
2331 alpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)
2333 enum machine_mode mode ATTRIBUTE_UNUSED;
2336 int ind_levels ATTRIBUTE_UNUSED;
2338 /* We must recognize output that we have already generated ourselves. */
2339 if (GET_CODE (x) == PLUS
2340 && GET_CODE (XEXP (x, 0)) == PLUS
2341 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2342 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2345 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2346 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2351 /* We wish to handle large displacements off a base register by
2352 splitting the addend across an ldah and the mem insn. This
2353 cuts number of extra insns needed from 3 to 1. */
2354 if (GET_CODE (x) == PLUS
2355 && GET_CODE (XEXP (x, 0)) == REG
2356 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2357 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
2358 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2360 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2361 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2363 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2365 /* Check for 32-bit overflow. */
2366 if (high + low != val)
2369 /* Reload the high part into a base reg; leave the low part
2370 in the mem directly. */
2371 x = gen_rtx_PLUS (GET_MODE (x),
2372 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2376 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2377 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2385 /* REF is an alignable memory location. Place an aligned SImode
2386 reference into *PALIGNED_MEM and the number of bits to shift into
2387 *PBITNUM. SCRATCH is a free register for use in reloading out
2388 of range stack slots. */
2391 get_aligned_mem (ref, paligned_mem, pbitnum)
2393 rtx *paligned_mem, *pbitnum;
2396 HOST_WIDE_INT offset = 0;
2398 if (GET_CODE (ref) != MEM)
2401 if (reload_in_progress
2402 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
2404 base = find_replacement (&XEXP (ref, 0));
2406 if (! memory_address_p (GET_MODE (ref), base))
2411 base = XEXP (ref, 0);
2414 if (GET_CODE (base) == PLUS)
2415 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2418 = widen_memory_access (ref, SImode, (offset & ~3) - offset);
2420 if (WORDS_BIG_ENDIAN)
2421 *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))
2422 + (offset & 3) * 8));
2424 *pbitnum = GEN_INT ((offset & 3) * 8);
2427 /* Similar, but just get the address. Handle the two reload cases.
2428 Add EXTRA_OFFSET to the address we return. */
2431 get_unaligned_address (ref, extra_offset)
2436 HOST_WIDE_INT offset = 0;
2438 if (GET_CODE (ref) != MEM)
2441 if (reload_in_progress
2442 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
2444 base = find_replacement (&XEXP (ref, 0));
2446 if (! memory_address_p (GET_MODE (ref), base))
2451 base = XEXP (ref, 0);
2454 if (GET_CODE (base) == PLUS)
2455 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
2457 return plus_constant (base, offset + extra_offset);
2460 /* On the Alpha, all (non-symbolic) constants except zero go into
2461 a floating-point register via memory. Note that we cannot
2462 return anything that is not a subset of CLASS, and that some
2463 symbolic constants cannot be dropped to memory. */
2466 alpha_preferred_reload_class(x, class)
2468 enum reg_class class;
2470 /* Zero is present in any register class. */
2471 if (x == CONST0_RTX (GET_MODE (x)))
2474 /* These sorts of constants we can easily drop to memory. */
2475 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2477 if (class == FLOAT_REGS)
2479 if (class == ALL_REGS)
2480 return GENERAL_REGS;
2484 /* All other kinds of constants should not (and in the case of HIGH
2485 cannot) be dropped to memory -- instead we use a GENERAL_REGS
2486 secondary reload. */
2488 return (class == ALL_REGS ? GENERAL_REGS : class);
2493 /* Loading and storing HImode or QImode values to and from memory
2494 usually requires a scratch register. The exceptions are loading
2495 QImode and HImode from an aligned address to a general register
2496 unless byte instructions are permitted.
2498 We also cannot load an unaligned address or a paradoxical SUBREG
2499 into an FP register.
2501 We also cannot do integral arithmetic into FP regs, as might result
2502 from register elimination into a DImode fp register. */
2505 secondary_reload_class (class, mode, x, in)
2506 enum reg_class class;
2507 enum machine_mode mode;
2511 if ((mode == QImode || mode == HImode) && ! TARGET_BWX)
2513 if (GET_CODE (x) == MEM
2514 || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
2515 || (GET_CODE (x) == SUBREG
2516 && (GET_CODE (SUBREG_REG (x)) == MEM
2517 || (GET_CODE (SUBREG_REG (x)) == REG
2518 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))
2520 if (!in || !aligned_memory_operand(x, mode))
2521 return GENERAL_REGS;
2525 if (class == FLOAT_REGS)
2527 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
2528 return GENERAL_REGS;
2530 if (GET_CODE (x) == SUBREG
2531 && (GET_MODE_SIZE (GET_MODE (x))
2532 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2533 return GENERAL_REGS;
2535 if (in && INTEGRAL_MODE_P (mode)
2536 && ! (memory_operand (x, mode) || x == const0_rtx))
2537 return GENERAL_REGS;
2543 /* Subfunction of the following function. Update the flags of any MEM
2544 found in part of X. */
2547 alpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)
2549 int in_struct_p, volatile_p, unchanging_p;
2553 switch (GET_CODE (x))
2559 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2560 alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,
2565 alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,
2570 alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,
2572 alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,
2577 MEM_IN_STRUCT_P (x) = in_struct_p;
2578 MEM_VOLATILE_P (x) = volatile_p;
2579 RTX_UNCHANGING_P (x) = unchanging_p;
2580 /* Sadly, we cannot use alias sets because the extra aliasing
2581 produced by the AND interferes. Given that two-byte quantities
2582 are the only thing we would be able to differentiate anyway,
2583 there does not seem to be any point in convoluting the early
2584 out of the alias check. */
2592 /* Given INSN, which is an INSN list or the PATTERN of a single insn
2593 generated to perform a memory operation, look for any MEMs in either
2594 a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and
2595 volatile flags from REF into each of the MEMs found. If REF is not
2596 a MEM, don't do anything. */
2599 alpha_set_memflags (insn, ref)
2603 int in_struct_p, volatile_p, unchanging_p;
2605 if (GET_CODE (ref) != MEM)
2608 in_struct_p = MEM_IN_STRUCT_P (ref);
2609 volatile_p = MEM_VOLATILE_P (ref);
2610 unchanging_p = RTX_UNCHANGING_P (ref);
2612 /* This is only called from alpha.md, after having had something
2613 generated from one of the insn patterns. So if everything is
2614 zero, the pattern is already up-to-date. */
2615 if (! in_struct_p && ! volatile_p && ! unchanging_p)
2618 alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);
2621 /* Try to output insns to set TARGET equal to the constant C if it can be
2622 done in less than N insns. Do all computations in MODE. Returns the place
2623 where the output has been placed if it can be done and the insns have been
2624 emitted. If it would take more than N insns, zero is returned and no
2625 insns and emitted. */
2628 alpha_emit_set_const (target, mode, c, n)
2630 enum machine_mode mode;
2635 rtx orig_target = target;
2638 /* If we can't make any pseudos, TARGET is an SImode hard register, we
2639 can't load this constant in one insn, do this in DImode. */
2640 if (no_new_pseudos && mode == SImode
2641 && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER
2642 && (result = alpha_emit_set_const_1 (target, mode, c, 1)) == 0)
2644 target = gen_lowpart (DImode, target);
2648 /* Try 1 insn, then 2, then up to N. */
2649 for (i = 1; i <= n; i++)
2651 result = alpha_emit_set_const_1 (target, mode, c, i);
2654 rtx insn = get_last_insn ();
2655 rtx set = single_set (insn);
2656 if (! CONSTANT_P (SET_SRC (set)))
2657 set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));
2662 /* Allow for the case where we changed the mode of TARGET. */
2663 if (result == target)
2664 result = orig_target;
2669 /* Internal routine for the above to check for N or below insns. */
2672 alpha_emit_set_const_1 (target, mode, c, n)
2674 enum machine_mode mode;
2680 /* Use a pseudo if highly optimizing and still generating RTL. */
2682 = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);
2685 /* If this is a sign-extended 32-bit constant, we can do this in at most
2686 three insns, so do it if we have enough insns left. We always have
2687 a sign-extended 32-bit constant when compiling on a narrow machine. */
2689 if (HOST_BITS_PER_WIDE_INT != 64
2690 || c >> 31 == -1 || c >> 31 == 0)
2692 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
2693 HOST_WIDE_INT tmp1 = c - low;
2694 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
2695 HOST_WIDE_INT extra = 0;
2697 /* If HIGH will be interpreted as negative but the constant is
2698 positive, we must adjust it to do two ldha insns. */
2700 if ((high & 0x8000) != 0 && c >= 0)
2704 high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);
2707 if (c == low || (low == 0 && extra == 0))
2709 /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)
2710 but that meant that we can't handle INT_MIN on 32-bit machines
2711 (like NT/Alpha), because we recurse indefinitely through
2712 emit_move_insn to gen_movdi. So instead, since we know exactly
2713 what we want, create it explicitly. */
2716 target = gen_reg_rtx (mode);
2717 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));
2720 else if (n >= 2 + (extra != 0))
2722 temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);
2724 /* As of 2002-02-23, addsi3 is only available when not optimizing.
2725 This means that if we go through expand_binop, we'll try to
2726 generate extensions, etc, which will require new pseudos, which
2727 will fail during some split phases. The SImode add patterns
2728 still exist, but are not named. So build the insns by hand. */
2733 subtarget = gen_reg_rtx (mode);
2734 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
2735 insn = gen_rtx_SET (VOIDmode, subtarget, insn);
2741 target = gen_reg_rtx (mode);
2742 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2743 insn = gen_rtx_SET (VOIDmode, target, insn);
2749 /* If we couldn't do it that way, try some other methods. But if we have
2750 no instructions left, don't bother. Likewise, if this is SImode and
2751 we can't make pseudos, we can't do anything since the expand_binop
2752 and expand_unop calls will widen and try to make pseudos. */
2754 if (n == 1 || (mode == SImode && no_new_pseudos))
2757 /* Next, see if we can load a related constant and then shift and possibly
2758 negate it to get the constant we want. Try this once each increasing
2759 numbers of insns. */
2761 for (i = 1; i < n; i++)
2763 /* First, see if minus some low bits, we've an easy load of
2766 new = ((c & 0xffff) ^ 0x8000) - 0x8000;
2768 && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)
2769 return expand_binop (mode, add_optab, temp, GEN_INT (new),
2770 target, 0, OPTAB_WIDEN);
2772 /* Next try complementing. */
2773 if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)
2774 return expand_unop (mode, one_cmpl_optab, temp, target, 0);
2776 /* Next try to form a constant and do a left shift. We can do this
2777 if some low-order bits are zero; the exact_log2 call below tells
2778 us that information. The bits we are shifting out could be any
2779 value, but here we'll just try the 0- and sign-extended forms of
2780 the constant. To try to increase the chance of having the same
2781 constant in more than one insn, start at the highest number of
2782 bits to shift, but try all possibilities in case a ZAPNOT will
2785 if ((bits = exact_log2 (c & - c)) > 0)
2786 for (; bits > 0; bits--)
2787 if ((temp = (alpha_emit_set_const
2788 (subtarget, mode, c >> bits, i))) != 0
2789 || ((temp = (alpha_emit_set_const
2791 ((unsigned HOST_WIDE_INT) c) >> bits, i)))
2793 return expand_binop (mode, ashl_optab, temp, GEN_INT (bits),
2794 target, 0, OPTAB_WIDEN);
2796 /* Now try high-order zero bits. Here we try the shifted-in bits as
2797 all zero and all ones. Be careful to avoid shifting outside the
2798 mode and to avoid shifting outside the host wide int size. */
2799 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
2800 confuse the recursive call and set all of the high 32 bits. */
2802 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2803 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64))) > 0)
2804 for (; bits > 0; bits--)
2805 if ((temp = alpha_emit_set_const (subtarget, mode,
2807 || ((temp = (alpha_emit_set_const
2809 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2812 return expand_binop (mode, lshr_optab, temp, GEN_INT (bits),
2813 target, 1, OPTAB_WIDEN);
2815 /* Now try high-order 1 bits. We get that with a sign-extension.
2816 But one bit isn't enough here. Be careful to avoid shifting outside
2817 the mode and to avoid shifting outside the host wide int size. */
2819 if ((bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
2820 - floor_log2 (~ c) - 2)) > 0)
2821 for (; bits > 0; bits--)
2822 if ((temp = alpha_emit_set_const (subtarget, mode,
2824 || ((temp = (alpha_emit_set_const
2826 ((c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1)),
2829 return expand_binop (mode, ashr_optab, temp, GEN_INT (bits),
2830 target, 0, OPTAB_WIDEN);
2833 #if HOST_BITS_PER_WIDE_INT == 64
2834 /* Finally, see if can load a value into the target that is the same as the
2835 constant except that all bytes that are 0 are changed to be 0xff. If we
2836 can, then we can do a ZAPNOT to obtain the desired constant. */
2839 for (i = 0; i < 64; i += 8)
2840 if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0)
2841 new |= (HOST_WIDE_INT) 0xff << i;
2843 /* We are only called for SImode and DImode. If this is SImode, ensure that
2844 we are sign extended to a full word. */
2847 new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000;
2849 if (new != c && new != -1
2850 && (temp = alpha_emit_set_const (subtarget, mode, new, n - 1)) != 0)
2851 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new),
2852 target, 0, OPTAB_WIDEN);
2858 /* Having failed to find a 3 insn sequence in alpha_emit_set_const,
2859 fall back to a straight forward decomposition. We do this to avoid
2860 exponential run times encountered when looking for longer sequences
2861 with alpha_emit_set_const. */
2864 alpha_emit_set_long_const (target, c1, c2)
2866 HOST_WIDE_INT c1, c2;
2868 HOST_WIDE_INT d1, d2, d3, d4;
2870 /* Decompose the entire word */
2871 #if HOST_BITS_PER_WIDE_INT >= 64
2872 if (c2 != -(c1 < 0))
2874 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2876 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2877 c1 = (c1 - d2) >> 32;
2878 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2880 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2884 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2886 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2890 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2892 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2897 /* Construct the high word */
2900 emit_move_insn (target, GEN_INT (d4));
2902 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3)));
2905 emit_move_insn (target, GEN_INT (d3));
2907 /* Shift it into place */
2908 emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32)));
2910 /* Add in the low bits. */
2912 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2)));
2914 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
2919 /* Expand a move instruction; return true if all work is done.
2920 We don't handle non-bwx subword loads here. */
2923 alpha_expand_mov (mode, operands)
2924 enum machine_mode mode;
2927 /* If the output is not a register, the input must be. */
2928 if (GET_CODE (operands[0]) == MEM
2929 && ! reg_or_0_operand (operands[1], mode))
2930 operands[1] = force_reg (mode, operands[1]);
2932 /* Allow legitimize_address to perform some simplifications. */
2933 if (mode == Pmode && symbolic_operand (operands[1], mode))
2937 /* With RTL inlining, at -O3, rtl is generated, stored, then actually
2938 compiled at the end of compilation. In the meantime, someone can
2939 re-encode-section-info on some symbol changing it e.g. from global
2940 to local-not-small. If this happens, we'd have emitted a plain
2941 load rather than a high+losum load and not recognize the insn.
2943 So if rtl inlining is in effect, we delay the global/not-global
2944 decision until rest_of_compilation by wrapping it in an
2946 if (TARGET_EXPLICIT_RELOCS && flag_inline_functions
2947 && rtx_equal_function_value_matters
2948 && global_symbolic_operand (operands[1], mode))
2950 emit_insn (gen_movdi_er_maybe_g (operands[0], operands[1]));
2954 tmp = alpha_legitimize_address (operands[1], operands[0], mode);
2957 if (tmp == operands[0])
2964 /* Early out for non-constants and valid constants. */
2965 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2968 /* Split large integers. */
2969 if (GET_CODE (operands[1]) == CONST_INT
2970 || GET_CODE (operands[1]) == CONST_DOUBLE)
2972 HOST_WIDE_INT i0, i1;
2973 rtx temp = NULL_RTX;
2975 if (GET_CODE (operands[1]) == CONST_INT)
2977 i0 = INTVAL (operands[1]);
2980 else if (HOST_BITS_PER_WIDE_INT >= 64)
2982 i0 = CONST_DOUBLE_LOW (operands[1]);
2987 i0 = CONST_DOUBLE_LOW (operands[1]);
2988 i1 = CONST_DOUBLE_HIGH (operands[1]);
2991 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0))
2992 temp = alpha_emit_set_const (operands[0], mode, i0, 3);
2994 if (!temp && TARGET_BUILD_CONSTANTS)
2995 temp = alpha_emit_set_long_const (operands[0], i0, i1);
2999 if (rtx_equal_p (operands[0], temp))
3006 /* Otherwise we've nothing left but to drop the thing to memory. */
3007 operands[1] = force_const_mem (DImode, operands[1]);
3008 if (reload_in_progress)
3010 emit_move_insn (operands[0], XEXP (operands[1], 0));
3011 operands[1] = copy_rtx (operands[1]);
3012 XEXP (operands[1], 0) = operands[0];
3015 operands[1] = validize_mem (operands[1]);
3019 /* Expand a non-bwx QImode or HImode move instruction;
3020 return true if all work is done. */
3023 alpha_expand_mov_nobwx (mode, operands)
3024 enum machine_mode mode;
3027 /* If the output is not a register, the input must be. */
3028 if (GET_CODE (operands[0]) == MEM)
3029 operands[1] = force_reg (mode, operands[1]);
3031 /* Handle four memory cases, unaligned and aligned for either the input
3032 or the output. The only case where we can be called during reload is
3033 for aligned loads; all other cases require temporaries. */
3035 if (GET_CODE (operands[1]) == MEM
3036 || (GET_CODE (operands[1]) == SUBREG
3037 && GET_CODE (SUBREG_REG (operands[1])) == MEM)
3038 || (reload_in_progress && GET_CODE (operands[1]) == REG
3039 && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER)
3040 || (reload_in_progress && GET_CODE (operands[1]) == SUBREG
3041 && GET_CODE (SUBREG_REG (operands[1])) == REG
3042 && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER))
3044 if (aligned_memory_operand (operands[1], mode))
3046 if (reload_in_progress)
3048 emit_insn ((mode == QImode
3049 ? gen_reload_inqi_help
3050 : gen_reload_inhi_help)
3051 (operands[0], operands[1],
3052 gen_rtx_REG (SImode, REGNO (operands[0]))));
3056 rtx aligned_mem, bitnum;
3057 rtx scratch = gen_reg_rtx (SImode);
3059 get_aligned_mem (operands[1], &aligned_mem, &bitnum);
3061 emit_insn ((mode == QImode
3062 ? gen_aligned_loadqi
3063 : gen_aligned_loadhi)
3064 (operands[0], aligned_mem, bitnum, scratch));
3069 /* Don't pass these as parameters since that makes the generated
3070 code depend on parameter evaluation order which will cause
3071 bootstrap failures. */
3073 rtx temp1 = gen_reg_rtx (DImode);
3074 rtx temp2 = gen_reg_rtx (DImode);
3075 rtx seq = ((mode == QImode
3076 ? gen_unaligned_loadqi
3077 : gen_unaligned_loadhi)
3078 (operands[0], get_unaligned_address (operands[1], 0),
3081 alpha_set_memflags (seq, operands[1]);
3087 if (GET_CODE (operands[0]) == MEM
3088 || (GET_CODE (operands[0]) == SUBREG
3089 && GET_CODE (SUBREG_REG (operands[0])) == MEM)
3090 || (reload_in_progress && GET_CODE (operands[0]) == REG
3091 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER)
3092 || (reload_in_progress && GET_CODE (operands[0]) == SUBREG
3093 && GET_CODE (SUBREG_REG (operands[0])) == REG
3094 && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER))
3096 if (aligned_memory_operand (operands[0], mode))
3098 rtx aligned_mem, bitnum;
3099 rtx temp1 = gen_reg_rtx (SImode);
3100 rtx temp2 = gen_reg_rtx (SImode);
3102 get_aligned_mem (operands[0], &aligned_mem, &bitnum);
3104 emit_insn (gen_aligned_store (aligned_mem, operands[1], bitnum,
3109 rtx temp1 = gen_reg_rtx (DImode);
3110 rtx temp2 = gen_reg_rtx (DImode);
3111 rtx temp3 = gen_reg_rtx (DImode);
3112 rtx seq = ((mode == QImode
3113 ? gen_unaligned_storeqi
3114 : gen_unaligned_storehi)
3115 (get_unaligned_address (operands[0], 0),
3116 operands[1], temp1, temp2, temp3));
3118 alpha_set_memflags (seq, operands[0]);
3127 /* Generate an unsigned DImode to FP conversion. This is the same code
3128 optabs would emit if we didn't have TFmode patterns.
3130 For SFmode, this is the only construction I've found that can pass
3131 gcc.c-torture/execute/ieee/rbug.c. No scenario that uses DFmode
3132 intermediates will work, because you'll get intermediate rounding
3133 that ruins the end result. Some of this could be fixed by turning
3134 on round-to-positive-infinity, but that requires diddling the fpsr,
3135 which kills performance. I tried turning this around and converting
3136 to a negative number, so that I could turn on /m, but either I did
3137 it wrong or there's something else cause I wound up with the exact
3138 same single-bit error. There is a branch-less form of this same code:
3149 fcmoveq $f10,$f11,$f0
3151 I'm not using it because it's the same number of instructions as
3152 this branch-full form, and it has more serialized long latency
3153 instructions on the critical path.
3155 For DFmode, we can avoid rounding errors by breaking up the word
3156 into two pieces, converting them separately, and adding them back:
3158 LC0: .long 0,0x5f800000
3163 cpyse $f11,$f31,$f10
3164 cpyse $f31,$f11,$f11
3172 This doesn't seem to be a clear-cut win over the optabs form.
3173 It probably all depends on the distribution of numbers being
3174 converted -- in the optabs form, all but high-bit-set has a
3175 much lower minimum execution time. */
3178 alpha_emit_floatuns (operands)
3181 rtx neglab, donelab, i0, i1, f0, in, out;
3182 enum machine_mode mode;
3185 in = force_reg (DImode, operands[1]);
3186 mode = GET_MODE (out);
3187 neglab = gen_label_rtx ();
3188 donelab = gen_label_rtx ();
3189 i0 = gen_reg_rtx (DImode);
3190 i1 = gen_reg_rtx (DImode);
3191 f0 = gen_reg_rtx (mode);
3193 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
3195 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in)));
3196 emit_jump_insn (gen_jump (donelab));
3199 emit_label (neglab);
3201 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
3202 emit_insn (gen_anddi3 (i1, in, const1_rtx));
3203 emit_insn (gen_iordi3 (i0, i0, i1));
3204 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0)));
3205 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0)));
3207 emit_label (donelab);
3210 /* Generate the comparison for a conditional branch. */
3213 alpha_emit_conditional_branch (code)
3216 enum rtx_code cmp_code, branch_code;
3217 enum machine_mode cmp_mode, branch_mode = VOIDmode;
3218 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3221 if (alpha_compare.fp_p && GET_MODE (op0) == TFmode)
3223 if (! TARGET_HAS_XFLOATING_LIBS)
3226 /* X_floating library comparison functions return
3230 Convert the compare against the raw return value. */
3252 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3254 alpha_compare.fp_p = 0;
3257 /* The general case: fold the comparison code to the types of compares
3258 that we have, choosing the branch as necessary. */
3261 case EQ: case LE: case LT: case LEU: case LTU:
3263 /* We have these compares: */
3264 cmp_code = code, branch_code = NE;
3269 /* These must be reversed. */
3270 cmp_code = reverse_condition (code), branch_code = EQ;
3273 case GE: case GT: case GEU: case GTU:
3274 /* For FP, we swap them, for INT, we reverse them. */
3275 if (alpha_compare.fp_p)
3277 cmp_code = swap_condition (code);
3279 tem = op0, op0 = op1, op1 = tem;
3283 cmp_code = reverse_condition (code);
3292 if (alpha_compare.fp_p)
3295 if (flag_unsafe_math_optimizations)
3297 /* When we are not as concerned about non-finite values, and we
3298 are comparing against zero, we can branch directly. */
3299 if (op1 == CONST0_RTX (DFmode))
3300 cmp_code = NIL, branch_code = code;
3301 else if (op0 == CONST0_RTX (DFmode))
3303 /* Undo the swap we probably did just above. */
3304 tem = op0, op0 = op1, op1 = tem;
3305 branch_code = swap_condition (cmp_code);
3311 /* ??? We mark the the branch mode to be CCmode to prevent the
3312 compare and branch from being combined, since the compare
3313 insn follows IEEE rules that the branch does not. */
3314 branch_mode = CCmode;
3321 /* The following optimizations are only for signed compares. */
3322 if (code != LEU && code != LTU && code != GEU && code != GTU)
3324 /* Whee. Compare and branch against 0 directly. */
3325 if (op1 == const0_rtx)
3326 cmp_code = NIL, branch_code = code;
3328 /* We want to use cmpcc/bcc when we can, since there is a zero delay
3329 bypass between logicals and br/cmov on EV5. But we don't want to
3330 force valid immediate constants into registers needlessly. */
3331 else if (GET_CODE (op1) == CONST_INT)
3333 HOST_WIDE_INT v = INTVAL (op1), n = -v;
3335 if (! CONST_OK_FOR_LETTER_P (v, 'I')
3336 && (CONST_OK_FOR_LETTER_P (n, 'K')
3337 || CONST_OK_FOR_LETTER_P (n, 'L')))
3339 cmp_code = PLUS, branch_code = code;
3345 if (!reg_or_0_operand (op0, DImode))
3346 op0 = force_reg (DImode, op0);
3347 if (cmp_code != PLUS && !reg_or_8bit_operand (op1, DImode))
3348 op1 = force_reg (DImode, op1);
3351 /* Emit an initial compare instruction, if necessary. */
3353 if (cmp_code != NIL)
3355 tem = gen_reg_rtx (cmp_mode);
3356 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
3359 /* Zero the operands. */
3360 memset (&alpha_compare, 0, sizeof (alpha_compare));
3362 /* Return the branch comparison. */
3363 return gen_rtx_fmt_ee (branch_code, branch_mode, tem, CONST0_RTX (cmp_mode));
3366 /* Certain simplifications can be done to make invalid setcc operations
3367 valid. Return the final comparison, or NULL if we can't work. */
3370 alpha_emit_setcc (code)
3373 enum rtx_code cmp_code;
3374 rtx op0 = alpha_compare.op0, op1 = alpha_compare.op1;
3375 int fp_p = alpha_compare.fp_p;
3378 /* Zero the operands. */
3379 memset (&alpha_compare, 0, sizeof (alpha_compare));
3381 if (fp_p && GET_MODE (op0) == TFmode)
3383 if (! TARGET_HAS_XFLOATING_LIBS)
3386 /* X_floating library comparison functions return
3390 Convert the compare against the raw return value. */
3392 if (code == UNORDERED || code == ORDERED)
3397 op0 = alpha_emit_xfloating_compare (cmp_code, op0, op1);
3401 if (code == UNORDERED)
3403 else if (code == ORDERED)
3409 if (fp_p && !TARGET_FIX)
3412 /* The general case: fold the comparison code to the types of compares
3413 that we have, choosing the branch as necessary. */
3418 case EQ: case LE: case LT: case LEU: case LTU:
3420 /* We have these compares. */
3422 cmp_code = code, code = NE;
3426 if (!fp_p && op1 == const0_rtx)
3431 cmp_code = reverse_condition (code);
3435 case GE: case GT: case GEU: case GTU:
3436 /* These normally need swapping, but for integer zero we have
3437 special patterns that recognize swapped operands. */
3438 if (!fp_p && op1 == const0_rtx)
3440 code = swap_condition (code);
3442 cmp_code = code, code = NE;
3443 tmp = op0, op0 = op1, op1 = tmp;
3452 if (!register_operand (op0, DImode))
3453 op0 = force_reg (DImode, op0);
3454 if (!reg_or_8bit_operand (op1, DImode))
3455 op1 = force_reg (DImode, op1);
3458 /* Emit an initial compare instruction, if necessary. */
3459 if (cmp_code != NIL)
3461 enum machine_mode mode = fp_p ? DFmode : DImode;
3463 tmp = gen_reg_rtx (mode);
3464 emit_insn (gen_rtx_SET (VOIDmode, tmp,
3465 gen_rtx_fmt_ee (cmp_code, mode, op0, op1)));
3467 op0 = fp_p ? gen_lowpart (DImode, tmp) : tmp;
3471 /* Return the setcc comparison. */
3472 return gen_rtx_fmt_ee (code, DImode, op0, op1);
3476 /* Rewrite a comparison against zero CMP of the form
3477 (CODE (cc0) (const_int 0)) so it can be written validly in
3478 a conditional move (if_then_else CMP ...).
3479 If both of the operands that set cc0 are non-zero we must emit
3480 an insn to perform the compare (it can't be done within
3481 the conditional move). */
3483 alpha_emit_conditional_move (cmp, mode)
3485 enum machine_mode mode;
3487 enum rtx_code code = GET_CODE (cmp);
3488 enum rtx_code cmov_code = NE;
3489 rtx op0 = alpha_compare.op0;
3490 rtx op1 = alpha_compare.op1;
3491 int fp_p = alpha_compare.fp_p;
3492 enum machine_mode cmp_mode
3493 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
3494 enum machine_mode cmp_op_mode = fp_p ? DFmode : DImode;
3495 enum machine_mode cmov_mode = VOIDmode;
3496 int local_fast_math = flag_unsafe_math_optimizations;
3499 /* Zero the operands. */
3500 memset (&alpha_compare, 0, sizeof (alpha_compare));
3502 if (fp_p != FLOAT_MODE_P (mode))
3504 enum rtx_code cmp_code;
3509 /* If we have fp<->int register move instructions, do a cmov by
3510 performing the comparison in fp registers, and move the
3511 zero/non-zero value to integer registers, where we can then
3512 use a normal cmov, or vice-versa. */
3516 case EQ: case LE: case LT: case LEU: case LTU:
3517 /* We have these compares. */
3518 cmp_code = code, code = NE;
3522 /* This must be reversed. */
3523 cmp_code = EQ, code = EQ;
3526 case GE: case GT: case GEU: case GTU:
3527 /* These normally need swapping, but for integer zero we have
3528 special patterns that recognize swapped operands. */
3529 if (!fp_p && op1 == const0_rtx)
3530 cmp_code = code, code = NE;
3533 cmp_code = swap_condition (code);
3535 tem = op0, op0 = op1, op1 = tem;
3543 tem = gen_reg_rtx (cmp_op_mode);
3544 emit_insn (gen_rtx_SET (VOIDmode, tem,
3545 gen_rtx_fmt_ee (cmp_code, cmp_op_mode,
3548 cmp_mode = cmp_op_mode = fp_p ? DImode : DFmode;
3549 op0 = gen_lowpart (cmp_op_mode, tem);
3550 op1 = CONST0_RTX (cmp_op_mode);
3552 local_fast_math = 1;
3555 /* We may be able to use a conditional move directly.
3556 This avoids emitting spurious compares. */
3557 if (signed_comparison_operator (cmp, VOIDmode)
3558 && (!fp_p || local_fast_math)
3559 && (op0 == CONST0_RTX (cmp_mode) || op1 == CONST0_RTX (cmp_mode)))
3560 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
3562 /* We can't put the comparison inside the conditional move;
3563 emit a compare instruction and put that inside the
3564 conditional move. Make sure we emit only comparisons we have;
3565 swap or reverse as necessary. */
3572 case EQ: case LE: case LT: case LEU: case LTU:
3573 /* We have these compares: */
3577 /* This must be reversed. */
3578 code = reverse_condition (code);
3582 case GE: case GT: case GEU: case GTU:
3583 /* These must be swapped. */
3584 if (op1 != CONST0_RTX (cmp_mode))
3586 code = swap_condition (code);
3587 tem = op0, op0 = op1, op1 = tem;
3597 if (!reg_or_0_operand (op0, DImode))
3598 op0 = force_reg (DImode, op0);
3599 if (!reg_or_8bit_operand (op1, DImode))
3600 op1 = force_reg (DImode, op1);
3603 /* ??? We mark the branch mode to be CCmode to prevent the compare
3604 and cmov from being combined, since the compare insn follows IEEE
3605 rules that the cmov does not. */
3606 if (fp_p && !local_fast_math)
3609 tem = gen_reg_rtx (cmp_op_mode);
3610 emit_move_insn (tem, gen_rtx_fmt_ee (code, cmp_op_mode, op0, op1));
3611 return gen_rtx_fmt_ee (cmov_code, cmov_mode, tem, CONST0_RTX (cmp_op_mode));
3614 /* Simplify a conditional move of two constants into a setcc with
3615 arithmetic. This is done with a splitter since combine would
3616 just undo the work if done during code generation. It also catches
3617 cases we wouldn't have before cse. */
3620 alpha_split_conditional_move (code, dest, cond, t_rtx, f_rtx)
3622 rtx dest, cond, t_rtx, f_rtx;
3624 HOST_WIDE_INT t, f, diff;
3625 enum machine_mode mode;
3626 rtx target, subtarget, tmp;
3628 mode = GET_MODE (dest);
3633 if (((code == NE || code == EQ) && diff < 0)
3634 || (code == GE || code == GT))
3636 code = reverse_condition (code);
3637 diff = t, t = f, f = diff;
3641 subtarget = target = dest;
3644 target = gen_lowpart (DImode, dest);
3645 if (! no_new_pseudos)
3646 subtarget = gen_reg_rtx (DImode);
3650 /* Below, we must be careful to use copy_rtx on target and subtarget
3651 in intermediate insns, as they may be a subreg rtx, which may not
3654 if (f == 0 && exact_log2 (diff) > 0
3655 /* On EV6, we've got enough shifters to make non-arithmatic shifts
3656 viable over a longer latency cmove. On EV5, the E0 slot is a
3657 scarce resource, and on EV4 shift has the same latency as a cmove. */
3658 && (diff <= 8 || alpha_cpu == PROCESSOR_EV6))
3660 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3661 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3663 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
3664 GEN_INT (exact_log2 (t)));
3665 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3667 else if (f == 0 && t == -1)
3669 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3670 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3672 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
3674 else if (diff == 1 || diff == 4 || diff == 8)
3678 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
3679 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp));
3682 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
3685 add_op = GEN_INT (f);
3686 if (sext_add_operand (add_op, mode))
3688 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
3690 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
3691 emit_insn (gen_rtx_SET (VOIDmode, target, tmp));
3703 /* Look up the function X_floating library function name for the
3707 alpha_lookup_xfloating_lib_func (code)
3712 const enum rtx_code code;
3713 const char *const func;
3716 static const struct xfloating_op vms_xfloating_ops[] =
3718 { PLUS, "OTS$ADD_X" },
3719 { MINUS, "OTS$SUB_X" },
3720 { MULT, "OTS$MUL_X" },
3721 { DIV, "OTS$DIV_X" },
3722 { EQ, "OTS$EQL_X" },
3723 { NE, "OTS$NEQ_X" },
3724 { LT, "OTS$LSS_X" },
3725 { LE, "OTS$LEQ_X" },
3726 { GT, "OTS$GTR_X" },
3727 { GE, "OTS$GEQ_X" },
3728 { FIX, "OTS$CVTXQ" },
3729 { FLOAT, "OTS$CVTQX" },
3730 { UNSIGNED_FLOAT, "OTS$CVTQUX" },
3731 { FLOAT_EXTEND, "OTS$CVT_FLOAT_T_X" },
3732 { FLOAT_TRUNCATE, "OTS$CVT_FLOAT_X_T" },
3735 static const struct xfloating_op osf_xfloating_ops[] =
3737 { PLUS, "_OtsAddX" },
3738 { MINUS, "_OtsSubX" },
3739 { MULT, "_OtsMulX" },
3740 { DIV, "_OtsDivX" },
3747 { FIX, "_OtsCvtXQ" },
3748 { FLOAT, "_OtsCvtQX" },
3749 { UNSIGNED_FLOAT, "_OtsCvtQUX" },
3750 { FLOAT_EXTEND, "_OtsConvertFloatTX" },
3751 { FLOAT_TRUNCATE, "_OtsConvertFloatXT" },
3754 const struct xfloating_op *ops;
3755 const long n = ARRAY_SIZE (osf_xfloating_ops);
3758 /* How irritating. Nothing to key off for the table. Hardcode
3759 knowledge of the G_floating routines. */
3760 if (TARGET_FLOAT_VAX)
3762 if (TARGET_ABI_OPEN_VMS)
3764 if (code == FLOAT_EXTEND)
3765 return "OTS$CVT_FLOAT_G_X";
3766 if (code == FLOAT_TRUNCATE)
3767 return "OTS$CVT_FLOAT_X_G";
3771 if (code == FLOAT_EXTEND)
3772 return "_OtsConvertFloatGX";
3773 if (code == FLOAT_TRUNCATE)
3774 return "_OtsConvertFloatXG";
3778 if (TARGET_ABI_OPEN_VMS)
3779 ops = vms_xfloating_ops;
3781 ops = osf_xfloating_ops;
3783 for (i = 0; i < n; ++i)
3784 if (ops[i].code == code)
3790 /* Most X_floating operations take the rounding mode as an argument.
3791 Compute that here. */
3794 alpha_compute_xfloating_mode_arg (code, round)
3796 enum alpha_fp_rounding_mode round;
3802 case ALPHA_FPRM_NORM:
3805 case ALPHA_FPRM_MINF:
3808 case ALPHA_FPRM_CHOP:
3811 case ALPHA_FPRM_DYN:
3817 /* XXX For reference, round to +inf is mode = 3. */
3820 if (code == FLOAT_TRUNCATE && alpha_fptm == ALPHA_FPTM_N)
3826 /* Emit an X_floating library function call.
3828 Note that these functions do not follow normal calling conventions:
3829 TFmode arguments are passed in two integer registers (as opposed to
3830 indirect); TFmode return values appear in R16+R17.
3832 FUNC is the function name to call.
3833 TARGET is where the output belongs.
3834 OPERANDS are the inputs.
3835 NOPERANDS is the count of inputs.
3836 EQUIV is the expression equivalent for the function.
3840 alpha_emit_xfloating_libcall (func, target, operands, noperands, equiv)
3847 rtx usage = NULL_RTX, tmp, reg;
3852 for (i = 0; i < noperands; ++i)
3854 switch (GET_MODE (operands[i]))
3857 reg = gen_rtx_REG (TFmode, regno);
3862 reg = gen_rtx_REG (DFmode, regno + 32);
3867 if (GET_CODE (operands[i]) != CONST_INT)
3871 reg = gen_rtx_REG (DImode, regno);
3879 emit_move_insn (reg, operands[i]);
3880 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage);
3883 switch (GET_MODE (target))
3886 reg = gen_rtx_REG (TFmode, 16);
3889 reg = gen_rtx_REG (DFmode, 32);
3892 reg = gen_rtx_REG (DImode, 0);
3898 tmp = gen_rtx_MEM (QImode, gen_rtx_SYMBOL_REF (Pmode, (char *) func));
3899 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx,
3900 const0_rtx, const0_rtx));
3901 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3906 emit_libcall_block (tmp, target, reg, equiv);
3909 /* Emit an X_floating library function call for arithmetic (+,-,*,/). */
3912 alpha_emit_xfloating_arith (code, operands)
3918 rtx out_operands[3];
3920 func = alpha_lookup_xfloating_lib_func (code);
3921 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3923 out_operands[0] = operands[1];
3924 out_operands[1] = operands[2];
3925 out_operands[2] = GEN_INT (mode);
3926 alpha_emit_xfloating_libcall (func, operands[0], out_operands, 3,
3927 gen_rtx_fmt_ee (code, TFmode, operands[1],
3931 /* Emit an X_floating library function call for a comparison. */
3934 alpha_emit_xfloating_compare (code, op0, op1)
3939 rtx out, operands[2];
3941 func = alpha_lookup_xfloating_lib_func (code);
3945 out = gen_reg_rtx (DImode);
3947 /* ??? Strange mode for equiv because what's actually returned
3948 is -1,0,1, not a proper boolean value. */
3949 alpha_emit_xfloating_libcall (func, out, operands, 2,
3950 gen_rtx_fmt_ee (code, CCmode, op0, op1));
3955 /* Emit an X_floating library function call for a conversion. */
3958 alpha_emit_xfloating_cvt (code, operands)
3962 int noperands = 1, mode;
3963 rtx out_operands[2];
3966 func = alpha_lookup_xfloating_lib_func (code);
3968 out_operands[0] = operands[1];
3973 mode = alpha_compute_xfloating_mode_arg (code, ALPHA_FPRM_CHOP);
3974 out_operands[1] = GEN_INT (mode);
3977 case FLOAT_TRUNCATE:
3978 mode = alpha_compute_xfloating_mode_arg (code, alpha_fprm);
3979 out_operands[1] = GEN_INT (mode);
3986 alpha_emit_xfloating_libcall (func, operands[0], out_operands, noperands,
3987 gen_rtx_fmt_e (code, GET_MODE (operands[0]),
3991 /* Split a TFmode OP[1] into DImode OP[2,3] and likewise for
3992 OP[0] into OP[0,1]. Naturally, output operand ordering is
3996 alpha_split_tfmode_pair (operands)
3999 if (GET_CODE (operands[1]) == REG)
4001 operands[3] = gen_rtx_REG (DImode, REGNO (operands[1]) + 1);
4002 operands[2] = gen_rtx_REG (DImode, REGNO (operands[1]));
4004 else if (GET_CODE (operands[1]) == MEM)
4006 operands[3] = adjust_address (operands[1], DImode, 8);
4007 operands[2] = adjust_address (operands[1], DImode, 0);
4009 else if (operands[1] == CONST0_RTX (TFmode))
4010 operands[2] = operands[3] = const0_rtx;
4014 if (GET_CODE (operands[0]) == REG)
4016 operands[1] = gen_rtx_REG (DImode, REGNO (operands[0]) + 1);
4017 operands[0] = gen_rtx_REG (DImode, REGNO (operands[0]));
4019 else if (GET_CODE (operands[0]) == MEM)
4021 operands[1] = adjust_address (operands[0], DImode, 8);
4022 operands[0] = adjust_address (operands[0], DImode, 0);
4028 /* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
4029 op2 is a register containing the sign bit, operation is the
4030 logical operation to be performed. */
4033 alpha_split_tfmode_frobsign (operands, operation)
4035 rtx (*operation) PARAMS ((rtx, rtx, rtx));
4037 rtx high_bit = operands[2];
4041 alpha_split_tfmode_pair (operands);
4043 /* Detect three flavours of operand overlap. */
4045 if (rtx_equal_p (operands[0], operands[2]))
4047 else if (rtx_equal_p (operands[1], operands[2]))
4049 if (rtx_equal_p (operands[0], high_bit))
4056 emit_move_insn (operands[0], operands[2]);
4058 /* ??? If the destination overlaps both source tf and high_bit, then
4059 assume source tf is dead in its entirety and use the other half
4060 for a scratch register. Otherwise "scratch" is just the proper
4061 destination register. */
4062 scratch = operands[move < 2 ? 1 : 3];
4064 emit_insn ((*operation) (scratch, high_bit, operands[3]));
4068 emit_move_insn (operands[0], operands[2]);
4070 emit_move_insn (operands[1], scratch);
4074 /* Use ext[wlq][lh] as the Architecture Handbook describes for extracting
4078 word: ldq_u r1,X(r11) ldq_u r1,X(r11)
4079 ldq_u r2,X+1(r11) ldq_u r2,X+1(r11)
4080 lda r3,X(r11) lda r3,X+2(r11)
4081 extwl r1,r3,r1 extql r1,r3,r1
4082 extwh r2,r3,r2 extqh r2,r3,r2
4083 or r1.r2.r1 or r1,r2,r1
4086 long: ldq_u r1,X(r11) ldq_u r1,X(r11)
4087 ldq_u r2,X+3(r11) ldq_u r2,X+3(r11)
4088 lda r3,X(r11) lda r3,X(r11)
4089 extll r1,r3,r1 extll r1,r3,r1
4090 extlh r2,r3,r2 extlh r2,r3,r2
4091 or r1.r2.r1 addl r1,r2,r1
4093 quad: ldq_u r1,X(r11)
4102 alpha_expand_unaligned_load (tgt, mem, size, ofs, sign)
4104 HOST_WIDE_INT size, ofs;
4107 rtx meml, memh, addr, extl, exth, tmp, mema;
4108 enum machine_mode mode;
4110 meml = gen_reg_rtx (DImode);
4111 memh = gen_reg_rtx (DImode);
4112 addr = gen_reg_rtx (DImode);
4113 extl = gen_reg_rtx (DImode);
4114 exth = gen_reg_rtx (DImode);
4116 mema = XEXP (mem, 0);
4117 if (GET_CODE (mema) == LO_SUM)
4118 mema = force_reg (Pmode, mema);
4120 /* AND addresses cannot be in any alias set, since they may implicitly
4121 alias surrounding code. Ideally we'd have some alias set that
4122 covered all types except those with alignment 8 or higher. */
4124 tmp = change_address (mem, DImode,
4125 gen_rtx_AND (DImode,
4126 plus_constant (mema, ofs),
4128 set_mem_alias_set (tmp, 0);
4129 emit_move_insn (meml, tmp);
4131 tmp = change_address (mem, DImode,
4132 gen_rtx_AND (DImode,
4133 plus_constant (mema, ofs + size - 1),
4135 set_mem_alias_set (tmp, 0);
4136 emit_move_insn (memh, tmp);
4138 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4))
4140 emit_move_insn (addr, plus_constant (mema, -1));
4142 emit_insn (gen_extqh_be (extl, meml, addr));
4143 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr));
4145 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4146 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
4147 addr, 1, OPTAB_WIDEN);
4149 else if (sign && size == 2)
4151 emit_move_insn (addr, plus_constant (mema, ofs+2));
4153 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
4154 emit_insn (gen_extqh_le (exth, memh, addr));
4156 /* We must use tgt here for the target. Alpha-vms port fails if we use
4157 addr for the target, because addr is marked as a pointer and combine
4158 knows that pointers are always sign-extended 32 bit values. */
4159 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
4160 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
4161 addr, 1, OPTAB_WIDEN);
4165 if (WORDS_BIG_ENDIAN)
4167 emit_move_insn (addr, plus_constant (mema, ofs+size-1));
4171 emit_insn (gen_extwh_be (extl, meml, addr));
4176 emit_insn (gen_extlh_be (extl, meml, addr));
4181 emit_insn (gen_extqh_be (extl, meml, addr));
4188 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
4192 emit_move_insn (addr, plus_constant (mema, ofs));
4193 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
4197 emit_insn (gen_extwh_le (exth, memh, addr));
4202 emit_insn (gen_extlh_le (exth, memh, addr));
4207 emit_insn (gen_extqh_le (exth, memh, addr));
4216 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
4217 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
4222 emit_move_insn (tgt, gen_lowpart(GET_MODE (tgt), addr));
4225 /* Similarly, use ins and msk instructions to perform unaligned stores. */
4228 alpha_expand_unaligned_store (dst, src, size, ofs)
4230 HOST_WIDE_INT size, ofs;
4232 rtx dstl, dsth, addr, insl, insh, meml, memh, dsta;
4234 dstl = gen_reg_rtx (DImode);
4235 dsth = gen_reg_rtx (DImode);
4236 insl = gen_reg_rtx (DImode);
4237 insh = gen_reg_rtx (DImode);
4239 dsta = XEXP (dst, 0);
4240 if (GET_CODE (dsta) == LO_SUM)
4241 dsta = force_reg (Pmode, dsta);
4243 /* AND addresses cannot be in any alias set, since they may implicitly
4244 alias surrounding code. Ideally we'd have some alias set that
4245 covered all types except those with alignment 8 or higher. */
4247 meml = change_address (dst, DImode,
4248 gen_rtx_AND (DImode,
4249 plus_constant (dsta, ofs),
4251 set_mem_alias_set (meml, 0);
4253 memh = change_address (dst, DImode,
4254 gen_rtx_AND (DImode,
4255 plus_constant (dsta, ofs + size - 1),
4257 set_mem_alias_set (memh, 0);
4259 emit_move_insn (dsth, memh);
4260 emit_move_insn (dstl, meml);
4261 if (WORDS_BIG_ENDIAN)
4263 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1));
4265 if (src != const0_rtx)
4270 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
4273 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
4276 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
4279 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
4280 GEN_INT (size*8), addr));
4286 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr));
4290 rtx msk = immed_double_const (0xffffffff, 0, DImode);
4291 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
4295 emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr));
4299 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr));
4303 addr = copy_addr_to_reg (plus_constant (dsta, ofs));
4305 if (src != const0_rtx)
4307 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
4308 GEN_INT (size*8), addr));
4313 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr));
4316 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr));
4319 emit_insn (gen_insql_le (insl, src, addr));
4324 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
4329 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
4333 rtx msk = immed_double_const (0xffffffff, 0, DImode);
4334 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
4338 emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
4343 if (src != const0_rtx)
4345 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
4346 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
4349 if (WORDS_BIG_ENDIAN)
4351 emit_move_insn (meml, dstl);
4352 emit_move_insn (memh, dsth);
4356 /* Must store high before low for degenerate case of aligned. */
4357 emit_move_insn (memh, dsth);
4358 emit_move_insn (meml, dstl);
4362 /* The block move code tries to maximize speed by separating loads and
4363 stores at the expense of register pressure: we load all of the data
4364 before we store it back out. There are two secondary effects worth
4365 mentioning, that this speeds copying to/from aligned and unaligned
4366 buffers, and that it makes the code significantly easier to write. */
4368 #define MAX_MOVE_WORDS 8
4370 /* Load an integral number of consecutive unaligned quadwords. */
4373 alpha_expand_unaligned_load_words (out_regs, smem, words, ofs)
4376 HOST_WIDE_INT words, ofs;
4378 rtx const im8 = GEN_INT (-8);
4379 rtx const i64 = GEN_INT (64);
4380 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
4381 rtx sreg, areg, tmp, smema;
4384 smema = XEXP (smem, 0);
4385 if (GET_CODE (smema) == LO_SUM)
4386 smema = force_reg (Pmode, smema);
4388 /* Generate all the tmp registers we need. */
4389 for (i = 0; i < words; ++i)
4391 data_regs[i] = out_regs[i];
4392 ext_tmps[i] = gen_reg_rtx (DImode);
4394 data_regs[words] = gen_reg_rtx (DImode);
4397 smem = adjust_address (smem, GET_MODE (smem), ofs);
4399 /* Load up all of the source data. */
4400 for (i = 0; i < words; ++i)
4402 tmp = change_address (smem, DImode,
4403 gen_rtx_AND (DImode,
4404 plus_constant (smema, 8*i),
4406 set_mem_alias_set (tmp, 0);
4407 emit_move_insn (data_regs[i], tmp);
4410 tmp = change_address (smem, DImode,
4411 gen_rtx_AND (DImode,
4412 plus_constant (smema, 8*words - 1),
4414 set_mem_alias_set (tmp, 0);
4415 emit_move_insn (data_regs[words], tmp);
4417 /* Extract the half-word fragments. Unfortunately DEC decided to make
4418 extxh with offset zero a noop instead of zeroing the register, so
4419 we must take care of that edge condition ourselves with cmov. */
4421 sreg = copy_addr_to_reg (smema);
4422 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
4424 if (WORDS_BIG_ENDIAN)
4425 emit_move_insn (sreg, plus_constant (sreg, 7));
4426 for (i = 0; i < words; ++i)
4428 if (WORDS_BIG_ENDIAN)
4430 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg));
4431 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
4435 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
4436 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
4438 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
4439 gen_rtx_IF_THEN_ELSE (DImode,
4440 gen_rtx_EQ (DImode, areg,
4442 const0_rtx, ext_tmps[i])));
4445 /* Merge the half-words into whole words. */
4446 for (i = 0; i < words; ++i)
4448 out_regs[i] = expand_binop (DImode, ior_optab, data_regs[i],
4449 ext_tmps[i], data_regs[i], 1, OPTAB_WIDEN);
4453 /* Store an integral number of consecutive unaligned quadwords. DATA_REGS
4454 may be NULL to store zeros. */
4457 alpha_expand_unaligned_store_words (data_regs, dmem, words, ofs)
4460 HOST_WIDE_INT words, ofs;
4462 rtx const im8 = GEN_INT (-8);
4463 rtx const i64 = GEN_INT (64);
4464 rtx ins_tmps[MAX_MOVE_WORDS];
4465 rtx st_tmp_1, st_tmp_2, dreg;
4466 rtx st_addr_1, st_addr_2, dmema;
4469 dmema = XEXP (dmem, 0);
4470 if (GET_CODE (dmema) == LO_SUM)
4471 dmema = force_reg (Pmode, dmema);
4473 /* Generate all the tmp registers we need. */
4474 if (data_regs != NULL)
4475 for (i = 0; i < words; ++i)
4476 ins_tmps[i] = gen_reg_rtx(DImode);
4477 st_tmp_1 = gen_reg_rtx(DImode);
4478 st_tmp_2 = gen_reg_rtx(DImode);
4481 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
4483 st_addr_2 = change_address (dmem, DImode,
4484 gen_rtx_AND (DImode,
4485 plus_constant (dmema, words*8 - 1),
4487 set_mem_alias_set (st_addr_2, 0);
4489 st_addr_1 = change_address (dmem, DImode,
4490 gen_rtx_AND (DImode, dmema, im8));
4491 set_mem_alias_set (st_addr_1, 0);
4493 /* Load up the destination end bits. */
4494 emit_move_insn (st_tmp_2, st_addr_2);
4495 emit_move_insn (st_tmp_1, st_addr_1);
4497 /* Shift the input data into place. */
4498 dreg = copy_addr_to_reg (dmema);
4499 if (WORDS_BIG_ENDIAN)
4500 emit_move_insn (dreg, plus_constant (dreg, 7));
4501 if (data_regs != NULL)
4503 for (i = words-1; i >= 0; --i)
4505 if (WORDS_BIG_ENDIAN)
4507 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
4508 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
4512 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
4513 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
4516 for (i = words-1; i > 0; --i)
4518 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
4519 ins_tmps[i-1], ins_tmps[i-1], 1,
4524 /* Split and merge the ends with the destination data. */
4525 if (WORDS_BIG_ENDIAN)
4527 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
4528 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
4532 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
4533 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
4536 if (data_regs != NULL)
4538 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
4539 st_tmp_2, 1, OPTAB_WIDEN);
4540 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
4541 st_tmp_1, 1, OPTAB_WIDEN);
4545 if (WORDS_BIG_ENDIAN)
4546 emit_move_insn (st_addr_1, st_tmp_1);
4548 emit_move_insn (st_addr_2, st_tmp_2);
4549 for (i = words-1; i > 0; --i)
4551 rtx tmp = change_address (dmem, DImode,
4552 gen_rtx_AND (DImode,
4553 plus_constant(dmema,
4554 WORDS_BIG_ENDIAN ? i*8-1 : i*8),
4556 set_mem_alias_set (tmp, 0);
4557 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
4559 if (WORDS_BIG_ENDIAN)
4560 emit_move_insn (st_addr_2, st_tmp_2);
4562 emit_move_insn (st_addr_1, st_tmp_1);
4566 /* Expand string/block move operations.
4568 operands[0] is the pointer to the destination.
4569 operands[1] is the pointer to the source.
4570 operands[2] is the number of bytes to move.
4571 operands[3] is the alignment. */
4574 alpha_expand_block_move (operands)
4577 rtx bytes_rtx = operands[2];
4578 rtx align_rtx = operands[3];
4579 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
4580 HOST_WIDE_INT bytes = orig_bytes;
4581 HOST_WIDE_INT src_align = INTVAL (align_rtx) * BITS_PER_UNIT;
4582 HOST_WIDE_INT dst_align = src_align;
4583 rtx orig_src = operands[1];
4584 rtx orig_dst = operands[0];
4585 rtx data_regs[2 * MAX_MOVE_WORDS + 16];
4587 unsigned int i, words, ofs, nregs = 0;
4589 if (orig_bytes <= 0)
4591 else if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
4594 /* Look for additional alignment information from recorded register info. */
4596 tmp = XEXP (orig_src, 0);
4597 if (GET_CODE (tmp) == REG)
4598 src_align = MAX (src_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4599 else if (GET_CODE (tmp) == PLUS
4600 && GET_CODE (XEXP (tmp, 0)) == REG
4601 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4603 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4604 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4608 if (a >= 64 && c % 8 == 0)
4610 else if (a >= 32 && c % 4 == 0)
4612 else if (a >= 16 && c % 2 == 0)
4617 tmp = XEXP (orig_dst, 0);
4618 if (GET_CODE (tmp) == REG)
4619 dst_align = MAX (dst_align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4620 else if (GET_CODE (tmp) == PLUS
4621 && GET_CODE (XEXP (tmp, 0)) == REG
4622 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4624 unsigned HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4625 unsigned int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4629 if (a >= 64 && c % 8 == 0)
4631 else if (a >= 32 && c % 4 == 0)
4633 else if (a >= 16 && c % 2 == 0)
4638 /* Load the entire block into registers. */
4639 if (GET_CODE (XEXP (orig_src, 0)) == ADDRESSOF)
4641 enum machine_mode mode;
4643 tmp = XEXP (XEXP (orig_src, 0), 0);
4645 /* Don't use the existing register if we're reading more than
4646 is held in the register. Nor if there is not a mode that
4647 handles the exact size. */
4648 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4650 && GET_MODE_SIZE (GET_MODE (tmp)) >= bytes)
4654 data_regs[nregs] = gen_lowpart (DImode, tmp);
4655 data_regs[nregs + 1] = gen_highpart (DImode, tmp);
4659 data_regs[nregs++] = gen_lowpart (mode, tmp);
4664 /* No appropriate mode; fall back on memory. */
4665 orig_src = replace_equiv_address (orig_src,
4666 copy_addr_to_reg (XEXP (orig_src, 0)));
4667 src_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4671 if (src_align >= 64 && bytes >= 8)
4675 for (i = 0; i < words; ++i)
4676 data_regs[nregs + i] = gen_reg_rtx (DImode);
4678 for (i = 0; i < words; ++i)
4679 emit_move_insn (data_regs[nregs + i],
4680 adjust_address (orig_src, DImode, ofs + i * 8));
4687 if (src_align >= 32 && bytes >= 4)
4691 for (i = 0; i < words; ++i)
4692 data_regs[nregs + i] = gen_reg_rtx (SImode);
4694 for (i = 0; i < words; ++i)
4695 emit_move_insn (data_regs[nregs + i],
4696 adjust_address (orig_src, SImode, ofs + i * 4));
4707 for (i = 0; i < words+1; ++i)
4708 data_regs[nregs + i] = gen_reg_rtx (DImode);
4710 alpha_expand_unaligned_load_words (data_regs + nregs, orig_src,
4718 if (! TARGET_BWX && bytes >= 4)
4720 data_regs[nregs++] = tmp = gen_reg_rtx (SImode);
4721 alpha_expand_unaligned_load (tmp, orig_src, 4, ofs, 0);
4728 if (src_align >= 16)
4731 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
4732 emit_move_insn (tmp, adjust_address (orig_src, HImode, ofs));
4735 } while (bytes >= 2);
4737 else if (! TARGET_BWX)
4739 data_regs[nregs++] = tmp = gen_reg_rtx (HImode);
4740 alpha_expand_unaligned_load (tmp, orig_src, 2, ofs, 0);
4748 data_regs[nregs++] = tmp = gen_reg_rtx (QImode);
4749 emit_move_insn (tmp, adjust_address (orig_src, QImode, ofs));
4756 if (nregs > ARRAY_SIZE (data_regs))
4759 /* Now save it back out again. */
4763 if (GET_CODE (XEXP (orig_dst, 0)) == ADDRESSOF)
4765 enum machine_mode mode;
4766 tmp = XEXP (XEXP (orig_dst, 0), 0);
4768 mode = mode_for_size (orig_bytes * BITS_PER_UNIT, MODE_INT, 1);
4769 if (GET_MODE (tmp) == mode)
4773 emit_move_insn (tmp, data_regs[0]);
4778 else if (nregs == 2 && mode == TImode)
4780 /* Undo the subregging done above when copying between
4781 two TImode registers. */
4782 if (GET_CODE (data_regs[0]) == SUBREG
4783 && GET_MODE (SUBREG_REG (data_regs[0])) == TImode)
4784 emit_move_insn (tmp, SUBREG_REG (data_regs[0]));
4790 emit_move_insn (gen_lowpart (DImode, tmp), data_regs[0]);
4791 emit_move_insn (gen_highpart (DImode, tmp), data_regs[1]);
4795 emit_no_conflict_block (seq, tmp, data_regs[0],
4796 data_regs[1], NULL_RTX);
4804 /* ??? If nregs > 1, consider reconstructing the word in regs. */
4805 /* ??? Optimize mode < dst_mode with strict_low_part. */
4807 /* No appropriate mode; fall back on memory. We can speed things
4808 up by recognizing extra alignment information. */
4809 orig_dst = replace_equiv_address (orig_dst,
4810 copy_addr_to_reg (XEXP (orig_dst, 0)));
4811 dst_align = GET_MODE_BITSIZE (GET_MODE (tmp));
4814 /* Write out the data in whatever chunks reading the source allowed. */
4815 if (dst_align >= 64)
4817 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4819 emit_move_insn (adjust_address (orig_dst, DImode, ofs),
4826 if (dst_align >= 32)
4828 /* If the source has remaining DImode regs, write them out in
4830 while (i < nregs && GET_MODE (data_regs[i]) == DImode)
4832 tmp = expand_binop (DImode, lshr_optab, data_regs[i], GEN_INT (32),
4833 NULL_RTX, 1, OPTAB_WIDEN);
4835 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4836 gen_lowpart (SImode, data_regs[i]));
4837 emit_move_insn (adjust_address (orig_dst, SImode, ofs + 4),
4838 gen_lowpart (SImode, tmp));
4843 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4845 emit_move_insn (adjust_address (orig_dst, SImode, ofs),
4852 if (i < nregs && GET_MODE (data_regs[i]) == DImode)
4854 /* Write out a remaining block of words using unaligned methods. */
4856 for (words = 1; i + words < nregs; words++)
4857 if (GET_MODE (data_regs[i + words]) != DImode)
4861 alpha_expand_unaligned_store (orig_dst, data_regs[i], 8, ofs);
4863 alpha_expand_unaligned_store_words (data_regs + i, orig_dst,
4870 /* Due to the above, this won't be aligned. */
4871 /* ??? If we have more than one of these, consider constructing full
4872 words in registers and using alpha_expand_unaligned_store_words. */
4873 while (i < nregs && GET_MODE (data_regs[i]) == SImode)
4875 alpha_expand_unaligned_store (orig_dst, data_regs[i], 4, ofs);
4880 if (dst_align >= 16)
4881 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4883 emit_move_insn (adjust_address (orig_dst, HImode, ofs), data_regs[i]);
4888 while (i < nregs && GET_MODE (data_regs[i]) == HImode)
4890 alpha_expand_unaligned_store (orig_dst, data_regs[i], 2, ofs);
4895 while (i < nregs && GET_MODE (data_regs[i]) == QImode)
4897 emit_move_insn (adjust_address (orig_dst, QImode, ofs), data_regs[i]);
4911 alpha_expand_block_clear (operands)
4914 rtx bytes_rtx = operands[1];
4915 rtx align_rtx = operands[2];
4916 HOST_WIDE_INT orig_bytes = INTVAL (bytes_rtx);
4917 HOST_WIDE_INT bytes = orig_bytes;
4918 HOST_WIDE_INT align = INTVAL (align_rtx) * BITS_PER_UNIT;
4919 HOST_WIDE_INT alignofs = 0;
4920 rtx orig_dst = operands[0];
4922 int i, words, ofs = 0;
4924 if (orig_bytes <= 0)
4926 if (orig_bytes > MAX_MOVE_WORDS * UNITS_PER_WORD)
4929 /* Look for stricter alignment. */
4930 tmp = XEXP (orig_dst, 0);
4931 if (GET_CODE (tmp) == REG)
4932 align = MAX (align, REGNO_POINTER_ALIGN (REGNO (tmp)));
4933 else if (GET_CODE (tmp) == PLUS
4934 && GET_CODE (XEXP (tmp, 0)) == REG
4935 && GET_CODE (XEXP (tmp, 1)) == CONST_INT)
4937 HOST_WIDE_INT c = INTVAL (XEXP (tmp, 1));
4938 int a = REGNO_POINTER_ALIGN (REGNO (XEXP (tmp, 0)));
4943 align = a, alignofs = 8 - c % 8;
4945 align = a, alignofs = 4 - c % 4;
4947 align = a, alignofs = 2 - c % 2;
4950 else if (GET_CODE (tmp) == ADDRESSOF)
4952 enum machine_mode mode;
4954 mode = mode_for_size (bytes * BITS_PER_UNIT, MODE_INT, 1);
4955 if (GET_MODE (XEXP (tmp, 0)) == mode)
4957 emit_move_insn (XEXP (tmp, 0), const0_rtx);
4961 /* No appropriate mode; fall back on memory. */
4962 orig_dst = replace_equiv_address (orig_dst, copy_addr_to_reg (tmp));
4963 align = GET_MODE_BITSIZE (GET_MODE (XEXP (tmp, 0)));
4966 /* Handle an unaligned prefix first. */
4970 #if HOST_BITS_PER_WIDE_INT >= 64
4971 /* Given that alignofs is bounded by align, the only time BWX could
4972 generate three stores is for a 7 byte fill. Prefer two individual
4973 stores over a load/mask/store sequence. */
4974 if ((!TARGET_BWX || alignofs == 7)
4976 && !(alignofs == 4 && bytes >= 4))
4978 enum machine_mode mode = (align >= 64 ? DImode : SImode);
4979 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
4983 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
4984 set_mem_alias_set (mem, 0);
4986 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8));
4987 if (bytes < alignofs)
4989 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8);
5000 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
5001 NULL_RTX, 1, OPTAB_WIDEN);
5003 emit_move_insn (mem, tmp);
5007 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
5009 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
5014 if (TARGET_BWX && align >= 16 && (alignofs & 3) == 2 && bytes >= 2)
5016 emit_move_insn (adjust_address (orig_dst, HImode, ofs), const0_rtx);
5021 if (alignofs == 4 && bytes >= 4)
5023 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
5029 /* If we've not used the extra lead alignment information by now,
5030 we won't be able to. Downgrade align to match what's left over. */
5033 alignofs = alignofs & -alignofs;
5034 align = MIN (align, alignofs * BITS_PER_UNIT);
5038 /* Handle a block of contiguous long-words. */
5040 if (align >= 64 && bytes >= 8)
5044 for (i = 0; i < words; ++i)
5045 emit_move_insn (adjust_address (orig_dst, DImode, ofs + i * 8),
5052 /* If the block is large and appropriately aligned, emit a single
5053 store followed by a sequence of stq_u insns. */
5055 if (align >= 32 && bytes > 16)
5059 emit_move_insn (adjust_address (orig_dst, SImode, ofs), const0_rtx);
5063 orig_dsta = XEXP (orig_dst, 0);
5064 if (GET_CODE (orig_dsta) == LO_SUM)
5065 orig_dsta = force_reg (Pmode, orig_dsta);
5068 for (i = 0; i < words; ++i)
5071 = change_address (orig_dst, DImode,
5072 gen_rtx_AND (DImode,
5073 plus_constant (orig_dsta, ofs + i*8),
5075 set_mem_alias_set (mem, 0);
5076 emit_move_insn (mem, const0_rtx);
5079 /* Depending on the alignment, the first stq_u may have overlapped
5080 with the initial stl, which means that the last stq_u didn't
5081 write as much as it would appear. Leave those questionable bytes
5083 bytes -= words * 8 - 4;
5084 ofs += words * 8 - 4;
5087 /* Handle a smaller block of aligned words. */
5089 if ((align >= 64 && bytes == 4)
5090 || (align == 32 && bytes >= 4))
5094 for (i = 0; i < words; ++i)
5095 emit_move_insn (adjust_address (orig_dst, SImode, ofs + i * 4),
5102 /* An unaligned block uses stq_u stores for as many as possible. */
5108 alpha_expand_unaligned_store_words (NULL, orig_dst, words, ofs);
5114 /* Next clean up any trailing pieces. */
5116 #if HOST_BITS_PER_WIDE_INT >= 64
5117 /* Count the number of bits in BYTES for which aligned stores could
5120 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
5124 /* If we have appropriate alignment (and it wouldn't take too many
5125 instructions otherwise), mask out the bytes we need. */
5126 if (TARGET_BWX ? words > 2 : bytes > 0)
5133 mem = adjust_address (orig_dst, DImode, ofs);
5134 set_mem_alias_set (mem, 0);
5136 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5138 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
5139 NULL_RTX, 1, OPTAB_WIDEN);
5141 emit_move_insn (mem, tmp);
5144 else if (align >= 32 && bytes < 4)
5149 mem = adjust_address (orig_dst, SImode, ofs);
5150 set_mem_alias_set (mem, 0);
5152 mask = ~(HOST_WIDE_INT)0 << (bytes * 8);
5154 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
5155 NULL_RTX, 1, OPTAB_WIDEN);
5157 emit_move_insn (mem, tmp);
5163 if (!TARGET_BWX && bytes >= 4)
5165 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
5175 emit_move_insn (adjust_address (orig_dst, HImode, ofs),
5179 } while (bytes >= 2);
5181 else if (! TARGET_BWX)
5183 alpha_expand_unaligned_store (orig_dst, const0_rtx, 2, ofs);
5191 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
5199 /* Returns a mask so that zap(x, value) == x & mask. */
5202 alpha_expand_zap_mask (value)
5203 HOST_WIDE_INT value;
5208 if (HOST_BITS_PER_WIDE_INT >= 64)
5210 HOST_WIDE_INT mask = 0;
5212 for (i = 7; i >= 0; --i)
5215 if (!((value >> i) & 1))
5219 result = gen_int_mode (mask, DImode);
5221 else if (HOST_BITS_PER_WIDE_INT == 32)
5223 HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
5225 for (i = 7; i >= 4; --i)
5228 if (!((value >> i) & 1))
5232 for (i = 3; i >= 0; --i)
5235 if (!((value >> i) & 1))
5239 result = immed_double_const (mask_lo, mask_hi, DImode);
5248 alpha_expand_builtin_vector_binop (gen, mode, op0, op1, op2)
5249 rtx (*gen) PARAMS ((rtx, rtx, rtx));
5250 enum machine_mode mode;
5253 op0 = gen_lowpart (mode, op0);
5255 if (op1 == const0_rtx)
5256 op1 = CONST0_RTX (mode);
5258 op1 = gen_lowpart (mode, op1);
5260 if (op2 == const0_rtx)
5261 op2 = CONST0_RTX (mode);
5263 op2 = gen_lowpart (mode, op2);
5265 emit_insn ((*gen) (op0, op1, op2));
5268 /* Adjust the cost of a scheduling dependency. Return the new cost of
5269 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
5272 alpha_adjust_cost (insn, link, dep_insn, cost)
5278 enum attr_type insn_type, dep_insn_type;
5280 /* If the dependence is an anti-dependence, there is no cost. For an
5281 output dependence, there is sometimes a cost, but it doesn't seem
5282 worth handling those few cases. */
5283 if (REG_NOTE_KIND (link) != 0)
5286 /* If we can't recognize the insns, we can't really do anything. */
5287 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
5290 insn_type = get_attr_type (insn);
5291 dep_insn_type = get_attr_type (dep_insn);
5293 /* Bring in the user-defined memory latency. */
5294 if (dep_insn_type == TYPE_ILD
5295 || dep_insn_type == TYPE_FLD
5296 || dep_insn_type == TYPE_LDSYM)
5297 cost += alpha_memory_latency-1;
5299 /* Everything else handled in DFA bypasses now. */
5304 /* The number of instructions that can be issued per cycle. */
5309 return (alpha_cpu == PROCESSOR_EV4 ? 2 : 4);
5313 alpha_use_dfa_pipeline_interface ()
5318 /* How many alternative schedules to try. This should be as wide as the
5319 scheduling freedom in the DFA, but no wider. Making this value too
5320 large results extra work for the scheduler.
5322 For EV4, loads can be issued to either IB0 or IB1, thus we have 2
5323 alternative schedules. For EV5, we can choose between E0/E1 and
5324 FA/FM. For EV6, an arithmatic insn can be issued to U0/U1/L0/L1. */
5327 alpha_multipass_dfa_lookahead ()
5329 return (alpha_cpu == PROCESSOR_EV6 ? 4 : 2);
5332 /* Machine-specific function data. */
5334 struct machine_function GTY(())
5337 /* List of call information words for calls from this function. */
5338 struct rtx_def *first_ciw;
5339 struct rtx_def *last_ciw;
5342 /* List of deferred case vectors. */
5343 struct rtx_def *addr_list;
5346 const char *some_ld_name;
5349 /* How to allocate a 'struct machine_function'. */
5351 static struct machine_function *
5352 alpha_init_machine_status ()
5354 return ((struct machine_function *)
5355 ggc_alloc_cleared (sizeof (struct machine_function)));
5358 /* Functions to save and restore alpha_return_addr_rtx. */
5360 /* Start the ball rolling with RETURN_ADDR_RTX. */
5363 alpha_return_addr (count, frame)
5365 rtx frame ATTRIBUTE_UNUSED;
5370 return get_hard_reg_initial_val (Pmode, REG_RA);
5373 /* Return or create a pseudo containing the gp value for the current
5374 function. Needed only if TARGET_LD_BUGGY_LDGP. */
5377 alpha_gp_save_rtx ()
5379 rtx r = get_hard_reg_initial_val (DImode, 29);
5380 if (GET_CODE (r) != MEM)
5381 r = gen_mem_addressof (r, NULL_TREE);
5386 alpha_ra_ever_killed ()
5390 if (!has_hard_reg_initial_val (Pmode, REG_RA))
5391 return regs_ever_live[REG_RA];
5393 push_topmost_sequence ();
5395 pop_topmost_sequence ();
5397 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX);
5401 /* Return the trap mode suffix applicable to the current
5402 instruction, or NULL. */
5405 get_trap_mode_suffix ()
5407 enum attr_trap_suffix s = get_attr_trap_suffix (current_output_insn);
5411 case TRAP_SUFFIX_NONE:
5414 case TRAP_SUFFIX_SU:
5415 if (alpha_fptm >= ALPHA_FPTM_SU)
5419 case TRAP_SUFFIX_SUI:
5420 if (alpha_fptm >= ALPHA_FPTM_SUI)
5424 case TRAP_SUFFIX_V_SV:
5432 case ALPHA_FPTM_SUI:
5437 case TRAP_SUFFIX_V_SV_SVI:
5446 case ALPHA_FPTM_SUI:
5451 case TRAP_SUFFIX_U_SU_SUI:
5460 case ALPHA_FPTM_SUI:
5468 /* Return the rounding mode suffix applicable to the current
5469 instruction, or NULL. */
5472 get_round_mode_suffix ()
5474 enum attr_round_suffix s = get_attr_round_suffix (current_output_insn);
5478 case ROUND_SUFFIX_NONE:
5480 case ROUND_SUFFIX_NORMAL:
5483 case ALPHA_FPRM_NORM:
5485 case ALPHA_FPRM_MINF:
5487 case ALPHA_FPRM_CHOP:
5489 case ALPHA_FPRM_DYN:
5494 case ROUND_SUFFIX_C:
5500 /* Locate some local-dynamic symbol still in use by this function
5501 so that we can print its name in some movdi_er_tlsldm pattern. */
5504 get_some_local_dynamic_name ()
5508 if (cfun->machine->some_ld_name)
5509 return cfun->machine->some_ld_name;
5511 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
5513 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
5514 return cfun->machine->some_ld_name;
5520 get_some_local_dynamic_name_1 (px, data)
5522 void *data ATTRIBUTE_UNUSED;
5526 if (GET_CODE (x) == SYMBOL_REF)
5528 const char *str = XSTR (x, 0);
5529 if (str[0] == '@' && str[1] == 'D')
5531 cfun->machine->some_ld_name = str;
5539 /* Print an operand. Recognize special options, documented below. */
5542 print_operand (file, x, code)
5552 /* Print the assembler name of the current function. */
5553 assemble_name (file, alpha_fnname);
5557 assemble_name (file, get_some_local_dynamic_name ());
5562 const char *trap = get_trap_mode_suffix ();
5563 const char *round = get_round_mode_suffix ();
5566 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"),
5567 (trap ? trap : ""), (round ? round : ""));
5572 /* Generates single precision instruction suffix. */
5573 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
5577 /* Generates double precision instruction suffix. */
5578 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
5582 if (alpha_this_literal_sequence_number == 0)
5583 alpha_this_literal_sequence_number = alpha_next_sequence_number++;
5584 fprintf (file, "%d", alpha_this_literal_sequence_number);
5588 if (alpha_this_gpdisp_sequence_number == 0)
5589 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5590 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5594 if (GET_CODE (x) == HIGH)
5595 output_addr_const (file, XEXP (x, 0));
5597 output_operand_lossage ("invalid %%H value");
5604 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD_CALL)
5606 x = XVECEXP (x, 0, 0);
5607 lituse = "lituse_tlsgd";
5609 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM_CALL)
5611 x = XVECEXP (x, 0, 0);
5612 lituse = "lituse_tlsldm";
5614 else if (GET_CODE (x) == CONST_INT)
5615 lituse = "lituse_jsr";
5618 output_operand_lossage ("invalid %%J value");
5622 if (x != const0_rtx)
5623 fprintf (file, "\t\t!%s!%d", lituse, (int) INTVAL (x));
5628 /* If this operand is the constant zero, write it as "$31". */
5629 if (GET_CODE (x) == REG)
5630 fprintf (file, "%s", reg_names[REGNO (x)]);
5631 else if (x == CONST0_RTX (GET_MODE (x)))
5632 fprintf (file, "$31");
5634 output_operand_lossage ("invalid %%r value");
5638 /* Similar, but for floating-point. */
5639 if (GET_CODE (x) == REG)
5640 fprintf (file, "%s", reg_names[REGNO (x)]);
5641 else if (x == CONST0_RTX (GET_MODE (x)))
5642 fprintf (file, "$f31");
5644 output_operand_lossage ("invalid %%R value");
5648 /* Write the 1's complement of a constant. */
5649 if (GET_CODE (x) != CONST_INT)
5650 output_operand_lossage ("invalid %%N value");
5652 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
5656 /* Write 1 << C, for a constant C. */
5657 if (GET_CODE (x) != CONST_INT)
5658 output_operand_lossage ("invalid %%P value");
5660 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x));
5664 /* Write the high-order 16 bits of a constant, sign-extended. */
5665 if (GET_CODE (x) != CONST_INT)
5666 output_operand_lossage ("invalid %%h value");
5668 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) >> 16);
5672 /* Write the low-order 16 bits of a constant, sign-extended. */
5673 if (GET_CODE (x) != CONST_INT)
5674 output_operand_lossage ("invalid %%L value");
5676 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5677 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
5681 /* Write mask for ZAP insn. */
5682 if (GET_CODE (x) == CONST_DOUBLE)
5684 HOST_WIDE_INT mask = 0;
5685 HOST_WIDE_INT value;
5687 value = CONST_DOUBLE_LOW (x);
5688 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5693 value = CONST_DOUBLE_HIGH (x);
5694 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5697 mask |= (1 << (i + sizeof (int)));
5699 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
5702 else if (GET_CODE (x) == CONST_INT)
5704 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5706 for (i = 0; i < 8; i++, value >>= 8)
5710 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask);
5713 output_operand_lossage ("invalid %%m value");
5717 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
5718 if (GET_CODE (x) != CONST_INT
5719 || (INTVAL (x) != 8 && INTVAL (x) != 16
5720 && INTVAL (x) != 32 && INTVAL (x) != 64))
5721 output_operand_lossage ("invalid %%M value");
5723 fprintf (file, "%s",
5724 (INTVAL (x) == 8 ? "b"
5725 : INTVAL (x) == 16 ? "w"
5726 : INTVAL (x) == 32 ? "l"
5731 /* Similar, except do it from the mask. */
5732 if (GET_CODE (x) == CONST_INT)
5734 HOST_WIDE_INT value = INTVAL (x);
5741 if (value == 0xffff)
5746 if (value == 0xffffffff)
5757 else if (HOST_BITS_PER_WIDE_INT == 32
5758 && GET_CODE (x) == CONST_DOUBLE
5759 && CONST_DOUBLE_LOW (x) == 0xffffffff
5760 && CONST_DOUBLE_HIGH (x) == 0)
5765 output_operand_lossage ("invalid %%U value");
5769 /* Write the constant value divided by 8 for little-endian mode or
5770 (56 - value) / 8 for big-endian mode. */
5772 if (GET_CODE (x) != CONST_INT
5773 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN
5776 || (INTVAL (x) & 7) != 0)
5777 output_operand_lossage ("invalid %%s value");
5779 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5781 ? (56 - INTVAL (x)) / 8
5786 /* Same, except compute (64 - c) / 8 */
5788 if (GET_CODE (x) != CONST_INT
5789 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5790 && (INTVAL (x) & 7) != 8)
5791 output_operand_lossage ("invalid %%s value");
5793 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
5798 /* On Unicos/Mk systems: use a DEX expression if the symbol
5799 clashes with a register name. */
5800 int dex = unicosmk_need_dex (x);
5802 fprintf (file, "DEX(%d)", dex);
5804 output_addr_const (file, x);
5808 case 'C': case 'D': case 'c': case 'd':
5809 /* Write out comparison name. */
5811 enum rtx_code c = GET_CODE (x);
5813 if (GET_RTX_CLASS (c) != '<')
5814 output_operand_lossage ("invalid %%C value");
5816 else if (code == 'D')
5817 c = reverse_condition (c);
5818 else if (code == 'c')
5819 c = swap_condition (c);
5820 else if (code == 'd')
5821 c = swap_condition (reverse_condition (c));
5824 fprintf (file, "ule");
5826 fprintf (file, "ult");
5827 else if (c == UNORDERED)
5828 fprintf (file, "un");
5830 fprintf (file, "%s", GET_RTX_NAME (c));
5835 /* Write the divide or modulus operator. */
5836 switch (GET_CODE (x))
5839 fprintf (file, "div%s", GET_MODE (x) == SImode ? "l" : "q");
5842 fprintf (file, "div%su", GET_MODE (x) == SImode ? "l" : "q");
5845 fprintf (file, "rem%s", GET_MODE (x) == SImode ? "l" : "q");
5848 fprintf (file, "rem%su", GET_MODE (x) == SImode ? "l" : "q");
5851 output_operand_lossage ("invalid %%E value");
5857 /* Write "_u" for unaligned access. */
5858 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)
5859 fprintf (file, "_u");
5863 if (GET_CODE (x) == REG)
5864 fprintf (file, "%s", reg_names[REGNO (x)]);
5865 else if (GET_CODE (x) == MEM)
5866 output_address (XEXP (x, 0));
5867 else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
5869 switch (XINT (XEXP (x, 0), 1))
5873 output_addr_const (file, XVECEXP (XEXP (x, 0), 0, 0));
5876 output_operand_lossage ("unknown relocation unspec");
5881 output_addr_const (file, x);
5885 output_operand_lossage ("invalid %%xn code");
5890 print_operand_address (file, addr)
5895 HOST_WIDE_INT offset = 0;
5897 if (GET_CODE (addr) == AND)
5898 addr = XEXP (addr, 0);
5900 if (GET_CODE (addr) == PLUS
5901 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5903 offset = INTVAL (XEXP (addr, 1));
5904 addr = XEXP (addr, 0);
5907 if (GET_CODE (addr) == LO_SUM)
5909 const char *reloc16, *reloclo;
5910 rtx op1 = XEXP (addr, 1);
5912 if (GET_CODE (op1) == CONST && GET_CODE (XEXP (op1, 0)) == UNSPEC)
5914 op1 = XEXP (op1, 0);
5915 switch (XINT (op1, 1))
5919 reloclo = (alpha_tls_size == 16 ? "dtprel" : "dtprello");
5923 reloclo = (alpha_tls_size == 16 ? "tprel" : "tprello");
5926 output_operand_lossage ("unknown relocation unspec");
5930 output_addr_const (file, XVECEXP (op1, 0, 0));
5935 reloclo = "gprellow";
5936 output_addr_const (file, op1);
5942 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5945 addr = XEXP (addr, 0);
5946 if (GET_CODE (addr) == REG)
5947 basereg = REGNO (addr);
5948 else if (GET_CODE (addr) == SUBREG
5949 && GET_CODE (SUBREG_REG (addr)) == REG)
5950 basereg = subreg_regno (addr);
5954 fprintf (file, "($%d)\t\t!%s", basereg,
5955 (basereg == 29 ? reloc16 : reloclo));
5959 if (GET_CODE (addr) == REG)
5960 basereg = REGNO (addr);
5961 else if (GET_CODE (addr) == SUBREG
5962 && GET_CODE (SUBREG_REG (addr)) == REG)
5963 basereg = subreg_regno (addr);
5964 else if (GET_CODE (addr) == CONST_INT)
5965 offset = INTVAL (addr);
5967 #if TARGET_ABI_OPEN_VMS
5968 else if (GET_CODE (addr) == SYMBOL_REF)
5970 fprintf (file, "%s", XSTR (addr, 0));
5973 else if (GET_CODE (addr) == CONST
5974 && GET_CODE (XEXP (addr, 0)) == PLUS
5975 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF)
5977 fprintf (file, "%s+%d",
5978 XSTR (XEXP (XEXP (addr, 0), 0), 0),
5979 INTVAL (XEXP (XEXP (addr, 0), 1)));
5987 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset);
5988 fprintf (file, "($%d)", basereg);
5991 /* Emit RTL insns to initialize the variable parts of a trampoline at
5992 TRAMP. FNADDR is an RTX for the address of the function's pure
5993 code. CXT is an RTX for the static chain value for the function.
5995 The three offset parameters are for the individual template's
5996 layout. A JMPOFS < 0 indicates that the trampoline does not
5997 contain instructions at all.
5999 We assume here that a function will be called many more times than
6000 its address is taken (e.g., it might be passed to qsort), so we
6001 take the trouble to initialize the "hint" field in the JMP insn.
6002 Note that the hint field is PC (new) + 4 * bits 13:0. */
6005 alpha_initialize_trampoline (tramp, fnaddr, cxt, fnofs, cxtofs, jmpofs)
6006 rtx tramp, fnaddr, cxt;
6007 int fnofs, cxtofs, jmpofs;
6009 rtx temp, temp1, addr;
6010 /* VMS really uses DImode pointers in memory at this point. */
6011 enum machine_mode mode = TARGET_ABI_OPEN_VMS ? Pmode : ptr_mode;
6013 #ifdef POINTERS_EXTEND_UNSIGNED
6014 fnaddr = convert_memory_address (mode, fnaddr);
6015 cxt = convert_memory_address (mode, cxt);
6018 /* Store function address and CXT. */
6019 addr = memory_address (mode, plus_constant (tramp, fnofs));
6020 emit_move_insn (gen_rtx_MEM (mode, addr), fnaddr);
6021 addr = memory_address (mode, plus_constant (tramp, cxtofs));
6022 emit_move_insn (gen_rtx_MEM (mode, addr), cxt);
6024 /* This has been disabled since the hint only has a 32k range, and in
6025 no existing OS is the stack within 32k of the text segment. */
6026 if (0 && jmpofs >= 0)
6028 /* Compute hint value. */
6029 temp = force_operand (plus_constant (tramp, jmpofs+4), NULL_RTX);
6030 temp = expand_binop (DImode, sub_optab, fnaddr, temp, temp, 1,
6032 temp = expand_shift (RSHIFT_EXPR, Pmode, temp,
6033 build_int_2 (2, 0), NULL_RTX, 1);
6034 temp = expand_and (SImode, gen_lowpart (SImode, temp),
6035 GEN_INT (0x3fff), 0);
6037 /* Merge in the hint. */
6038 addr = memory_address (SImode, plus_constant (tramp, jmpofs));
6039 temp1 = force_reg (SImode, gen_rtx_MEM (SImode, addr));
6040 temp1 = expand_and (SImode, temp1, GEN_INT (0xffffc000), NULL_RTX);
6041 temp1 = expand_binop (SImode, ior_optab, temp1, temp, temp1, 1,
6043 emit_move_insn (gen_rtx_MEM (SImode, addr), temp1);
6046 #ifdef TRANSFER_FROM_TRAMPOLINE
6047 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
6048 0, VOIDmode, 1, addr, Pmode);
6052 emit_insn (gen_imb ());
6055 /* Determine where to put an argument to a function.
6056 Value is zero to push the argument on the stack,
6057 or a hard register in which to store the argument.
6059 MODE is the argument's machine mode.
6060 TYPE is the data type of the argument (as a tree).
6061 This is null for libcalls where that information may
6063 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6064 the preceding args and about the function being called.
6065 NAMED is nonzero if this argument is a named parameter
6066 (otherwise it is an extra parameter matching an ellipsis).
6068 On Alpha the first 6 words of args are normally in registers
6069 and the rest are pushed. */
6072 function_arg (cum, mode, type, named)
6073 CUMULATIVE_ARGS cum;
6074 enum machine_mode mode;
6076 int named ATTRIBUTE_UNUSED;
6081 /* Set up defaults for FP operands passed in FP registers, and
6082 integral operands passed in integer registers. */
6084 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
6085 || GET_MODE_CLASS (mode) == MODE_FLOAT))
6090 /* ??? Irritatingly, the definition of CUMULATIVE_ARGS is different for
6091 the three platforms, so we can't avoid conditional compilation. */
6092 #if TARGET_ABI_OPEN_VMS
6094 if (mode == VOIDmode)
6095 return alpha_arg_info_reg_val (cum);
6097 num_args = cum.num_args;
6098 if (num_args >= 6 || MUST_PASS_IN_STACK (mode, type))
6102 #if TARGET_ABI_UNICOSMK
6106 /* If this is the last argument, generate the call info word (CIW). */
6107 /* ??? We don't include the caller's line number in the CIW because
6108 I don't know how to determine it if debug infos are turned off. */
6109 if (mode == VOIDmode)
6118 for (i = 0; i < cum.num_reg_words && i < 5; i++)
6119 if (cum.reg_args_type[i])
6120 lo |= (1 << (7 - i));
6122 if (cum.num_reg_words == 6 && cum.reg_args_type[5])
6125 lo |= cum.num_reg_words;
6127 #if HOST_BITS_PER_WIDE_INT == 32
6128 hi = (cum.num_args << 20) | cum.num_arg_words;
6130 lo = lo | ((HOST_WIDE_INT) cum.num_args << 52)
6131 | ((HOST_WIDE_INT) cum.num_arg_words << 32);
6134 ciw = immed_double_const (lo, hi, DImode);
6136 return gen_rtx_UNSPEC (DImode, gen_rtvec (1, ciw),
6137 UNSPEC_UMK_LOAD_CIW);
6140 size = ALPHA_ARG_SIZE (mode, type, named);
6141 num_args = cum.num_reg_words;
6142 if (MUST_PASS_IN_STACK (mode, type)
6143 || cum.num_reg_words + size > 6 || cum.force_stack)
6145 else if (type && TYPE_MODE (type) == BLKmode)
6149 reg1 = gen_rtx_REG (DImode, num_args + 16);
6150 reg1 = gen_rtx_EXPR_LIST (DImode, reg1, const0_rtx);
6152 /* The argument fits in two registers. Note that we still need to
6153 reserve a register for empty structures. */
6157 return gen_rtx_PARALLEL (mode, gen_rtvec (1, reg1));
6160 reg2 = gen_rtx_REG (DImode, num_args + 17);
6161 reg2 = gen_rtx_EXPR_LIST (DImode, reg2, GEN_INT (8));
6162 return gen_rtx_PARALLEL (mode, gen_rtvec (2, reg1, reg2));
6172 /* VOID is passed as a special flag for "last argument". */
6173 if (type == void_type_node)
6175 else if (MUST_PASS_IN_STACK (mode, type))
6177 else if (FUNCTION_ARG_PASS_BY_REFERENCE (cum, mode, type, named))
6180 #endif /* TARGET_ABI_UNICOSMK */
6181 #endif /* TARGET_ABI_OPEN_VMS */
6183 return gen_rtx_REG (mode, num_args + basereg);
6187 alpha_build_va_list ()
6189 tree base, ofs, record, type_decl;
6191 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
6192 return ptr_type_node;
6194 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
6195 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6196 TREE_CHAIN (record) = type_decl;
6197 TYPE_NAME (record) = type_decl;
6199 /* C++? SET_IS_AGGR_TYPE (record, 1); */
6201 ofs = build_decl (FIELD_DECL, get_identifier ("__offset"),
6203 DECL_FIELD_CONTEXT (ofs) = record;
6205 base = build_decl (FIELD_DECL, get_identifier ("__base"),
6207 DECL_FIELD_CONTEXT (base) = record;
6208 TREE_CHAIN (base) = ofs;
6210 TYPE_FIELDS (record) = base;
6211 layout_type (record);
6217 alpha_va_start (valist, nextarg)
6219 rtx nextarg ATTRIBUTE_UNUSED;
6221 HOST_WIDE_INT offset;
6222 tree t, offset_field, base_field;
6224 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
6227 if (TARGET_ABI_UNICOSMK)
6228 std_expand_builtin_va_start (valist, nextarg);
6230 /* For Unix, SETUP_INCOMING_VARARGS moves the starting address base
6231 up by 48, storing fp arg registers in the first 48 bytes, and the
6232 integer arg registers in the next 48 bytes. This is only done,
6233 however, if any integer registers need to be stored.
6235 If no integer registers need be stored, then we must subtract 48
6236 in order to account for the integer arg registers which are counted
6237 in argsize above, but which are not actually stored on the stack. */
6240 offset = TARGET_ABI_OPEN_VMS ? UNITS_PER_WORD : 6 * UNITS_PER_WORD;
6242 offset = -6 * UNITS_PER_WORD;
6244 if (TARGET_ABI_OPEN_VMS)
6246 nextarg = plus_constant (nextarg, offset);
6247 nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
6248 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
6249 make_tree (ptr_type_node, nextarg));
6250 TREE_SIDE_EFFECTS (t) = 1;
6252 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6256 base_field = TYPE_FIELDS (TREE_TYPE (valist));
6257 offset_field = TREE_CHAIN (base_field);
6259 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6260 valist, base_field);
6261 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6262 valist, offset_field);
6264 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6265 t = build (PLUS_EXPR, ptr_type_node, t, build_int_2 (offset, 0));
6266 t = build (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
6267 TREE_SIDE_EFFECTS (t) = 1;
6268 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6270 t = build_int_2 (NUM_ARGS * UNITS_PER_WORD, 0);
6271 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
6272 TREE_SIDE_EFFECTS (t) = 1;
6273 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6278 alpha_va_arg (valist, type)
6282 tree t, type_size, rounded_size;
6283 tree offset_field, base_field, addr_tree, addend;
6284 tree wide_type, wide_ofs;
6287 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK)
6288 return std_expand_builtin_va_arg (valist, type);
6290 if (type == error_mark_node
6291 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
6292 || TREE_OVERFLOW (type_size))
6293 rounded_size = size_zero_node;
6295 rounded_size = fold (build (MULT_EXPR, sizetype,
6296 fold (build (TRUNC_DIV_EXPR, sizetype,
6297 fold (build (PLUS_EXPR, sizetype,
6303 base_field = TYPE_FIELDS (TREE_TYPE (valist));
6304 offset_field = TREE_CHAIN (base_field);
6306 base_field = build (COMPONENT_REF, TREE_TYPE (base_field),
6307 valist, base_field);
6308 offset_field = build (COMPONENT_REF, TREE_TYPE (offset_field),
6309 valist, offset_field);
6311 /* If the type could not be passed in registers, skip the block
6312 reserved for the registers. */
6313 if (MUST_PASS_IN_STACK (TYPE_MODE (type), type))
6315 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6316 build (MAX_EXPR, TREE_TYPE (offset_field),
6317 offset_field, build_int_2 (6*8, 0)));
6318 TREE_SIDE_EFFECTS (t) = 1;
6319 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6322 wide_type = make_signed_type (64);
6323 wide_ofs = save_expr (build1 (CONVERT_EXPR, wide_type, offset_field));
6327 if (TYPE_MODE (type) == TFmode || TYPE_MODE (type) == TCmode)
6330 rounded_size = size_int (UNITS_PER_WORD);
6332 else if (FLOAT_TYPE_P (type))
6334 tree fpaddend, cond;
6336 fpaddend = fold (build (PLUS_EXPR, TREE_TYPE (addend),
6337 addend, build_int_2 (-6*8, 0)));
6339 cond = fold (build (LT_EXPR, integer_type_node,
6340 wide_ofs, build_int_2 (6*8, 0)));
6342 addend = fold (build (COND_EXPR, TREE_TYPE (addend), cond,
6346 addr_tree = build (PLUS_EXPR, TREE_TYPE (base_field),
6347 base_field, addend);
6349 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
6350 addr = copy_to_reg (addr);
6352 t = build (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field,
6353 build (PLUS_EXPR, TREE_TYPE (offset_field),
6354 offset_field, rounded_size));
6355 TREE_SIDE_EFFECTS (t) = 1;
6356 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6360 addr = force_reg (Pmode, addr);
6361 addr = gen_rtx_MEM (Pmode, addr);
6371 ALPHA_BUILTIN_CMPBGE,
6372 ALPHA_BUILTIN_EXTBL,
6373 ALPHA_BUILTIN_EXTWL,
6374 ALPHA_BUILTIN_EXTLL,
6375 ALPHA_BUILTIN_EXTQL,
6376 ALPHA_BUILTIN_EXTWH,
6377 ALPHA_BUILTIN_EXTLH,
6378 ALPHA_BUILTIN_EXTQH,
6379 ALPHA_BUILTIN_INSBL,
6380 ALPHA_BUILTIN_INSWL,
6381 ALPHA_BUILTIN_INSLL,
6382 ALPHA_BUILTIN_INSQL,
6383 ALPHA_BUILTIN_INSWH,
6384 ALPHA_BUILTIN_INSLH,
6385 ALPHA_BUILTIN_INSQH,
6386 ALPHA_BUILTIN_MSKBL,
6387 ALPHA_BUILTIN_MSKWL,
6388 ALPHA_BUILTIN_MSKLL,
6389 ALPHA_BUILTIN_MSKQL,
6390 ALPHA_BUILTIN_MSKWH,
6391 ALPHA_BUILTIN_MSKLH,
6392 ALPHA_BUILTIN_MSKQH,
6393 ALPHA_BUILTIN_UMULH,
6395 ALPHA_BUILTIN_ZAPNOT,
6396 ALPHA_BUILTIN_AMASK,
6397 ALPHA_BUILTIN_IMPLVER,
6399 ALPHA_BUILTIN_THREAD_POINTER,
6400 ALPHA_BUILTIN_SET_THREAD_POINTER,
6403 ALPHA_BUILTIN_MINUB8,
6404 ALPHA_BUILTIN_MINSB8,
6405 ALPHA_BUILTIN_MINUW4,
6406 ALPHA_BUILTIN_MINSW4,
6407 ALPHA_BUILTIN_MAXUB8,
6408 ALPHA_BUILTIN_MAXSB8,
6409 ALPHA_BUILTIN_MAXUW4,
6410 ALPHA_BUILTIN_MAXSW4,
6414 ALPHA_BUILTIN_UNPKBL,
6415 ALPHA_BUILTIN_UNPKBW,
6420 ALPHA_BUILTIN_CTPOP,
6425 static unsigned int const code_for_builtin[ALPHA_BUILTIN_max] = {
6426 CODE_FOR_builtin_cmpbge,
6427 CODE_FOR_builtin_extbl,
6428 CODE_FOR_builtin_extwl,
6429 CODE_FOR_builtin_extll,
6430 CODE_FOR_builtin_extql,
6431 CODE_FOR_builtin_extwh,
6432 CODE_FOR_builtin_extlh,
6433 CODE_FOR_builtin_extqh,
6434 CODE_FOR_builtin_insbl,
6435 CODE_FOR_builtin_inswl,
6436 CODE_FOR_builtin_insll,
6437 CODE_FOR_builtin_insql,
6438 CODE_FOR_builtin_inswh,
6439 CODE_FOR_builtin_inslh,
6440 CODE_FOR_builtin_insqh,
6441 CODE_FOR_builtin_mskbl,
6442 CODE_FOR_builtin_mskwl,
6443 CODE_FOR_builtin_mskll,
6444 CODE_FOR_builtin_mskql,
6445 CODE_FOR_builtin_mskwh,
6446 CODE_FOR_builtin_msklh,
6447 CODE_FOR_builtin_mskqh,
6448 CODE_FOR_umuldi3_highpart,
6449 CODE_FOR_builtin_zap,
6450 CODE_FOR_builtin_zapnot,
6451 CODE_FOR_builtin_amask,
6452 CODE_FOR_builtin_implver,
6453 CODE_FOR_builtin_rpcc,
6458 CODE_FOR_builtin_minub8,
6459 CODE_FOR_builtin_minsb8,
6460 CODE_FOR_builtin_minuw4,
6461 CODE_FOR_builtin_minsw4,
6462 CODE_FOR_builtin_maxub8,
6463 CODE_FOR_builtin_maxsb8,
6464 CODE_FOR_builtin_maxuw4,
6465 CODE_FOR_builtin_maxsw4,
6466 CODE_FOR_builtin_perr,
6467 CODE_FOR_builtin_pklb,
6468 CODE_FOR_builtin_pkwb,
6469 CODE_FOR_builtin_unpkbl,
6470 CODE_FOR_builtin_unpkbw,
6473 CODE_FOR_builtin_cttz,
6474 CODE_FOR_builtin_ctlz,
6475 CODE_FOR_builtin_ctpop
6478 struct alpha_builtin_def
6481 enum alpha_builtin code;
6482 unsigned int target_mask;
6485 static struct alpha_builtin_def const zero_arg_builtins[] = {
6486 { "__builtin_alpha_implver", ALPHA_BUILTIN_IMPLVER, 0 },
6487 { "__builtin_alpha_rpcc", ALPHA_BUILTIN_RPCC, 0 }
6490 static struct alpha_builtin_def const one_arg_builtins[] = {
6491 { "__builtin_alpha_amask", ALPHA_BUILTIN_AMASK, 0 },
6492 { "__builtin_alpha_pklb", ALPHA_BUILTIN_PKLB, MASK_MAX },
6493 { "__builtin_alpha_pkwb", ALPHA_BUILTIN_PKWB, MASK_MAX },
6494 { "__builtin_alpha_unpkbl", ALPHA_BUILTIN_UNPKBL, MASK_MAX },
6495 { "__builtin_alpha_unpkbw", ALPHA_BUILTIN_UNPKBW, MASK_MAX },
6496 { "__builtin_alpha_cttz", ALPHA_BUILTIN_CTTZ, MASK_CIX },
6497 { "__builtin_alpha_ctlz", ALPHA_BUILTIN_CTLZ, MASK_CIX },
6498 { "__builtin_alpha_ctpop", ALPHA_BUILTIN_CTPOP, MASK_CIX }
6501 static struct alpha_builtin_def const two_arg_builtins[] = {
6502 { "__builtin_alpha_cmpbge", ALPHA_BUILTIN_CMPBGE, 0 },
6503 { "__builtin_alpha_extbl", ALPHA_BUILTIN_EXTBL, 0 },
6504 { "__builtin_alpha_extwl", ALPHA_BUILTIN_EXTWL, 0 },
6505 { "__builtin_alpha_extll", ALPHA_BUILTIN_EXTLL, 0 },
6506 { "__builtin_alpha_extql", ALPHA_BUILTIN_EXTQL, 0 },
6507 { "__builtin_alpha_extwh", ALPHA_BUILTIN_EXTWH, 0 },
6508 { "__builtin_alpha_extlh", ALPHA_BUILTIN_EXTLH, 0 },
6509 { "__builtin_alpha_extqh", ALPHA_BUILTIN_EXTQH, 0 },
6510 { "__builtin_alpha_insbl", ALPHA_BUILTIN_INSBL, 0 },
6511 { "__builtin_alpha_inswl", ALPHA_BUILTIN_INSWL, 0 },
6512 { "__builtin_alpha_insll", ALPHA_BUILTIN_INSLL, 0 },
6513 { "__builtin_alpha_insql", ALPHA_BUILTIN_INSQL, 0 },
6514 { "__builtin_alpha_inswh", ALPHA_BUILTIN_INSWH, 0 },
6515 { "__builtin_alpha_inslh", ALPHA_BUILTIN_INSLH, 0 },
6516 { "__builtin_alpha_insqh", ALPHA_BUILTIN_INSQH, 0 },
6517 { "__builtin_alpha_mskbl", ALPHA_BUILTIN_MSKBL, 0 },
6518 { "__builtin_alpha_mskwl", ALPHA_BUILTIN_MSKWL, 0 },
6519 { "__builtin_alpha_mskll", ALPHA_BUILTIN_MSKLL, 0 },
6520 { "__builtin_alpha_mskql", ALPHA_BUILTIN_MSKQL, 0 },
6521 { "__builtin_alpha_mskwh", ALPHA_BUILTIN_MSKWH, 0 },
6522 { "__builtin_alpha_msklh", ALPHA_BUILTIN_MSKLH, 0 },
6523 { "__builtin_alpha_mskqh", ALPHA_BUILTIN_MSKQH, 0 },
6524 { "__builtin_alpha_umulh", ALPHA_BUILTIN_UMULH, 0 },
6525 { "__builtin_alpha_zap", ALPHA_BUILTIN_ZAP, 0 },
6526 { "__builtin_alpha_zapnot", ALPHA_BUILTIN_ZAPNOT, 0 },
6527 { "__builtin_alpha_minub8", ALPHA_BUILTIN_MINUB8, MASK_MAX },
6528 { "__builtin_alpha_minsb8", ALPHA_BUILTIN_MINSB8, MASK_MAX },
6529 { "__builtin_alpha_minuw4", ALPHA_BUILTIN_MINUW4, MASK_MAX },
6530 { "__builtin_alpha_minsw4", ALPHA_BUILTIN_MINSW4, MASK_MAX },
6531 { "__builtin_alpha_maxub8", ALPHA_BUILTIN_MAXUB8, MASK_MAX },
6532 { "__builtin_alpha_maxsb8", ALPHA_BUILTIN_MAXSB8, MASK_MAX },
6533 { "__builtin_alpha_maxuw4", ALPHA_BUILTIN_MAXUW4, MASK_MAX },
6534 { "__builtin_alpha_maxsw4", ALPHA_BUILTIN_MAXSW4, MASK_MAX },
6535 { "__builtin_alpha_perr", ALPHA_BUILTIN_PERR, MASK_MAX }
6539 alpha_init_builtins ()
6541 const struct alpha_builtin_def *p;
6545 ftype = build_function_type (long_integer_type_node, void_list_node);
6547 p = zero_arg_builtins;
6548 for (i = 0; i < ARRAY_SIZE (zero_arg_builtins); ++i, ++p)
6549 if ((target_flags & p->target_mask) == p->target_mask)
6550 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6553 ftype = build_function_type_list (long_integer_type_node,
6554 long_integer_type_node, NULL_TREE);
6556 p = one_arg_builtins;
6557 for (i = 0; i < ARRAY_SIZE (one_arg_builtins); ++i, ++p)
6558 if ((target_flags & p->target_mask) == p->target_mask)
6559 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6562 ftype = build_function_type_list (long_integer_type_node,
6563 long_integer_type_node,
6564 long_integer_type_node, NULL_TREE);
6566 p = two_arg_builtins;
6567 for (i = 0; i < ARRAY_SIZE (two_arg_builtins); ++i, ++p)
6568 if ((target_flags & p->target_mask) == p->target_mask)
6569 builtin_function (p->name, ftype, p->code, BUILT_IN_MD,
6572 ftype = build_function_type (ptr_type_node, void_list_node);
6573 builtin_function ("__builtin_thread_pointer", ftype,
6574 ALPHA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6577 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6578 builtin_function ("__builtin_set_thread_pointer", ftype,
6579 ALPHA_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6583 /* Expand an expression EXP that calls a built-in function,
6584 with result going to TARGET if that's convenient
6585 (and in mode MODE if that's convenient).
6586 SUBTARGET may be used as the target for computing one of EXP's operands.
6587 IGNORE is nonzero if the value is to be ignored. */
6590 alpha_expand_builtin (exp, target, subtarget, mode, ignore)
6593 rtx subtarget ATTRIBUTE_UNUSED;
6594 enum machine_mode mode ATTRIBUTE_UNUSED;
6595 int ignore ATTRIBUTE_UNUSED;
6599 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6600 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6601 tree arglist = TREE_OPERAND (exp, 1);
6602 enum insn_code icode;
6603 rtx op[MAX_ARGS], pat;
6607 if (fcode >= ALPHA_BUILTIN_max)
6608 internal_error ("bad builtin fcode");
6609 icode = code_for_builtin[fcode];
6611 internal_error ("bad builtin fcode");
6613 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6615 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6617 arglist = TREE_CHAIN (arglist), arity++)
6619 const struct insn_operand_data *insn_op;
6621 tree arg = TREE_VALUE (arglist);
6622 if (arg == error_mark_node)
6624 if (arity > MAX_ARGS)
6627 insn_op = &insn_data[icode].operand[arity + nonvoid];
6629 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6631 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6632 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6637 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6639 || GET_MODE (target) != tmode
6640 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6641 target = gen_reg_rtx (tmode);
6647 pat = GEN_FCN (icode) (target);
6651 pat = GEN_FCN (icode) (target, op[0]);
6653 pat = GEN_FCN (icode) (op[0]);
6656 pat = GEN_FCN (icode) (target, op[0], op[1]);
6671 /* This page contains routines that are used to determine what the function
6672 prologue and epilogue code will do and write them out. */
6674 /* Compute the size of the save area in the stack. */
6676 /* These variables are used for communication between the following functions.
6677 They indicate various things about the current function being compiled
6678 that are used to tell what kind of prologue, epilogue and procedure
6679 descriptior to generate. */
6681 /* Nonzero if we need a stack procedure. */
6682 enum alpha_procedure_types {PT_NULL = 0, PT_REGISTER = 1, PT_STACK = 2};
6683 static enum alpha_procedure_types alpha_procedure_type;
6685 /* Register number (either FP or SP) that is used to unwind the frame. */
6686 static int vms_unwind_regno;
6688 /* Register number used to save FP. We need not have one for RA since
6689 we don't modify it for register procedures. This is only defined
6690 for register frame procedures. */
6691 static int vms_save_fp_regno;
6693 /* Register number used to reference objects off our PV. */
6694 static int vms_base_regno;
6696 /* Compute register masks for saved registers. */
6699 alpha_sa_mask (imaskP, fmaskP)
6700 unsigned long *imaskP;
6701 unsigned long *fmaskP;
6703 unsigned long imask = 0;
6704 unsigned long fmask = 0;
6707 /* Irritatingly, there are two kinds of thunks -- those created with
6708 ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go through
6709 the regular part of the compiler. In the ASM_OUTPUT_MI_THUNK case
6710 we don't have valid register life info, but assemble_start_function
6711 wants to output .frame and .mask directives. */
6712 if (current_function_is_thunk && !no_new_pseudos)
6719 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
6720 imask |= (1L << HARD_FRAME_POINTER_REGNUM);
6722 /* One for every register we have to save. */
6723 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6724 if (! fixed_regs[i] && ! call_used_regs[i]
6725 && regs_ever_live[i] && i != REG_RA
6726 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
6731 fmask |= (1L << (i - 32));
6734 /* We need to restore these for the handler. */
6735 if (current_function_calls_eh_return)
6738 unsigned regno = EH_RETURN_DATA_REGNO (i);
6739 if (regno == INVALID_REGNUM)
6741 imask |= 1L << regno;
6744 /* If any register spilled, then spill the return address also. */
6745 /* ??? This is required by the Digital stack unwind specification
6746 and isn't needed if we're doing Dwarf2 unwinding. */
6747 if (imask || fmask || alpha_ra_ever_killed ())
6748 imask |= (1L << REG_RA);
6757 unsigned long mask[2];
6761 alpha_sa_mask (&mask[0], &mask[1]);
6763 if (TARGET_ABI_UNICOSMK)
6765 if (mask[0] || mask[1])
6770 for (j = 0; j < 2; ++j)
6771 for (i = 0; i < 32; ++i)
6772 if ((mask[j] >> i) & 1)
6776 if (TARGET_ABI_UNICOSMK)
6778 /* We might not need to generate a frame if we don't make any calls
6779 (including calls to __T3E_MISMATCH if this is a vararg function),
6780 don't have any local variables which require stack slots, don't
6781 use alloca and have not determined that we need a frame for other
6784 alpha_procedure_type
6785 = (sa_size || get_frame_size() != 0
6786 || current_function_outgoing_args_size
6787 || current_function_stdarg || current_function_calls_alloca
6788 || frame_pointer_needed)
6789 ? PT_STACK : PT_REGISTER;
6791 /* Always reserve space for saving callee-saved registers if we
6792 need a frame as required by the calling convention. */
6793 if (alpha_procedure_type == PT_STACK)
6796 else if (TARGET_ABI_OPEN_VMS)
6798 /* Start by assuming we can use a register procedure if we don't
6799 make any calls (REG_RA not used) or need to save any
6800 registers and a stack procedure if we do. */
6801 if ((mask[0] >> REG_RA) & 1)
6802 alpha_procedure_type = PT_STACK;
6803 else if (get_frame_size() != 0)
6804 alpha_procedure_type = PT_REGISTER;
6806 alpha_procedure_type = PT_NULL;
6808 /* Don't reserve space for saving FP & RA yet. Do that later after we've
6809 made the final decision on stack procedure vs register procedure. */
6810 if (alpha_procedure_type == PT_STACK)
6813 /* Decide whether to refer to objects off our PV via FP or PV.
6814 If we need FP for something else or if we receive a nonlocal
6815 goto (which expects PV to contain the value), we must use PV.
6816 Otherwise, start by assuming we can use FP. */
6819 = (frame_pointer_needed
6820 || current_function_has_nonlocal_label
6821 || alpha_procedure_type == PT_STACK
6822 || current_function_outgoing_args_size)
6823 ? REG_PV : HARD_FRAME_POINTER_REGNUM;
6825 /* If we want to copy PV into FP, we need to find some register
6826 in which to save FP. */
6828 vms_save_fp_regno = -1;
6829 if (vms_base_regno == HARD_FRAME_POINTER_REGNUM)
6830 for (i = 0; i < 32; i++)
6831 if (! fixed_regs[i] && call_used_regs[i] && ! regs_ever_live[i])
6832 vms_save_fp_regno = i;
6834 if (vms_save_fp_regno == -1 && alpha_procedure_type == PT_REGISTER)
6835 vms_base_regno = REG_PV, alpha_procedure_type = PT_STACK;
6836 else if (alpha_procedure_type == PT_NULL)
6837 vms_base_regno = REG_PV;
6839 /* Stack unwinding should be done via FP unless we use it for PV. */
6840 vms_unwind_regno = (vms_base_regno == REG_PV
6841 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
6843 /* If this is a stack procedure, allow space for saving FP and RA. */
6844 if (alpha_procedure_type == PT_STACK)
6849 /* Our size must be even (multiple of 16 bytes). */
6858 alpha_pv_save_size ()
6861 return alpha_procedure_type == PT_STACK ? 8 : 0;
6868 return vms_unwind_regno == HARD_FRAME_POINTER_REGNUM;
6871 #if TARGET_ABI_OPEN_VMS
6873 const struct attribute_spec vms_attribute_table[] =
6875 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
6876 { "overlaid", 0, 0, true, false, false, NULL },
6877 { "global", 0, 0, true, false, false, NULL },
6878 { "initialize", 0, 0, true, false, false, NULL },
6879 { NULL, 0, 0, false, false, false, NULL }
6885 find_lo_sum (px, data)
6887 void *data ATTRIBUTE_UNUSED;
6889 return GET_CODE (*px) == LO_SUM;
6893 alpha_does_function_need_gp ()
6897 /* The GP being variable is an OSF abi thing. */
6898 if (! TARGET_ABI_OSF)
6901 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
6904 if (current_function_is_thunk)
6907 /* If we need a GP (we have a LDSYM insn or a CALL_INSN), load it first.
6908 Even if we are a static function, we still need to do this in case
6909 our address is taken and passed to something like qsort. */
6911 push_topmost_sequence ();
6912 insn = get_insns ();
6913 pop_topmost_sequence ();
6915 for (; insn; insn = NEXT_INSN (insn))
6917 && GET_CODE (PATTERN (insn)) != USE
6918 && GET_CODE (PATTERN (insn)) != CLOBBER)
6920 enum attr_type type = get_attr_type (insn);
6921 if (type == TYPE_LDSYM || type == TYPE_JSR)
6923 if (TARGET_EXPLICIT_RELOCS
6924 && for_each_rtx (&PATTERN (insn), find_lo_sum, NULL) > 0)
6931 /* Write a version stamp. Don't write anything if we are running as a
6932 cross-compiler. Otherwise, use the versions in /usr/include/stamp.h. */
6939 alpha_write_verstamp (file)
6940 FILE *file ATTRIBUTE_UNUSED;
6943 fprintf (file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
6947 /* Helper function to set RTX_FRAME_RELATED_P on instructions, including
6951 set_frame_related_p ()
6953 rtx seq = get_insns ();
6964 while (insn != NULL_RTX)
6966 RTX_FRAME_RELATED_P (insn) = 1;
6967 insn = NEXT_INSN (insn);
6969 seq = emit_insn (seq);
6973 seq = emit_insn (seq);
6974 RTX_FRAME_RELATED_P (seq) = 1;
6979 #define FRP(exp) (start_sequence (), exp, set_frame_related_p ())
6981 /* Write function prologue. */
6983 /* On vms we have two kinds of functions:
6985 - stack frame (PROC_STACK)
6986 these are 'normal' functions with local vars and which are
6987 calling other functions
6988 - register frame (PROC_REGISTER)
6989 keeps all data in registers, needs no stack
6991 We must pass this to the assembler so it can generate the
6992 proper pdsc (procedure descriptor)
6993 This is done with the '.pdesc' command.
6995 On not-vms, we don't really differentiate between the two, as we can
6996 simply allocate stack without saving registers. */
6999 alpha_expand_prologue ()
7001 /* Registers to save. */
7002 unsigned long imask = 0;
7003 unsigned long fmask = 0;
7004 /* Stack space needed for pushing registers clobbered by us. */
7005 HOST_WIDE_INT sa_size;
7006 /* Complete stack size needed. */
7007 HOST_WIDE_INT frame_size;
7008 /* Offset from base reg to register save area. */
7009 HOST_WIDE_INT reg_offset;
7013 sa_size = alpha_sa_size ();
7015 frame_size = get_frame_size ();
7016 if (TARGET_ABI_OPEN_VMS)
7017 frame_size = ALPHA_ROUND (sa_size
7018 + (alpha_procedure_type == PT_STACK ? 8 : 0)
7020 + current_function_pretend_args_size);
7021 else if (TARGET_ABI_UNICOSMK)
7022 /* We have to allocate space for the DSIB if we generate a frame. */
7023 frame_size = ALPHA_ROUND (sa_size
7024 + (alpha_procedure_type == PT_STACK ? 48 : 0))
7025 + ALPHA_ROUND (frame_size
7026 + current_function_outgoing_args_size);
7028 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7030 + ALPHA_ROUND (frame_size
7031 + current_function_pretend_args_size));
7033 if (TARGET_ABI_OPEN_VMS)
7036 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7038 alpha_sa_mask (&imask, &fmask);
7040 /* Emit an insn to reload GP, if needed. */
7043 alpha_function_needs_gp = alpha_does_function_need_gp ();
7044 if (alpha_function_needs_gp)
7045 emit_insn (gen_prologue_ldgp ());
7048 /* TARGET_PROFILING_NEEDS_GP actually implies that we need to insert
7049 the call to mcount ourselves, rather than having the linker do it
7050 magically in response to -pg. Since _mcount has special linkage,
7051 don't represent the call as a call. */
7052 if (TARGET_PROFILING_NEEDS_GP && current_function_profile)
7053 emit_insn (gen_prologue_mcount ());
7055 if (TARGET_ABI_UNICOSMK)
7056 unicosmk_gen_dsib (&imask);
7058 /* Adjust the stack by the frame size. If the frame size is > 4096
7059 bytes, we need to be sure we probe somewhere in the first and last
7060 4096 bytes (we can probably get away without the latter test) and
7061 every 8192 bytes in between. If the frame size is > 32768, we
7062 do this in a loop. Otherwise, we generate the explicit probe
7065 Note that we are only allowed to adjust sp once in the prologue. */
7067 if (frame_size <= 32768)
7069 if (frame_size > 4096)
7074 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK
7077 while ((probed += 8192) < frame_size);
7079 /* We only have to do this probe if we aren't saving registers. */
7080 if (sa_size == 0 && probed + 4096 < frame_size)
7081 emit_insn (gen_probe_stack (GEN_INT (-frame_size)));
7084 if (frame_size != 0)
7085 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
7086 GEN_INT (TARGET_ABI_UNICOSMK
7092 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
7093 number of 8192 byte blocks to probe. We then probe each block
7094 in the loop and then set SP to the proper location. If the
7095 amount remaining is > 4096, we have to do one more probe if we
7096 are not saving any registers. */
7098 HOST_WIDE_INT blocks = (frame_size + 4096) / 8192;
7099 HOST_WIDE_INT leftover = frame_size + 4096 - blocks * 8192;
7100 rtx ptr = gen_rtx_REG (DImode, 22);
7101 rtx count = gen_rtx_REG (DImode, 23);
7104 emit_move_insn (count, GEN_INT (blocks));
7105 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx,
7106 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
7108 /* Because of the difficulty in emitting a new basic block this
7109 late in the compilation, generate the loop as a single insn. */
7110 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
7112 if (leftover > 4096 && sa_size == 0)
7114 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover));
7115 MEM_VOLATILE_P (last) = 1;
7116 emit_move_insn (last, const0_rtx);
7119 if (TARGET_ABI_WINDOWS_NT)
7121 /* For NT stack unwind (done by 'reverse execution'), it's
7122 not OK to take the result of a loop, even though the value
7123 is already in ptr, so we reload it via a single operation
7124 and subtract it to sp.
7126 Yes, that's correct -- we have to reload the whole constant
7127 into a temporary via ldah+lda then subtract from sp. To
7128 ensure we get ldah+lda, we use a special pattern. */
7130 HOST_WIDE_INT lo, hi;
7131 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7132 hi = frame_size - lo;
7134 emit_move_insn (ptr, GEN_INT (hi));
7135 emit_insn (gen_nt_lda (ptr, GEN_INT (lo)));
7136 seq = emit_insn (gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx,
7141 seq = emit_insn (gen_adddi3 (stack_pointer_rtx, ptr,
7142 GEN_INT (-leftover)));
7145 /* This alternative is special, because the DWARF code cannot
7146 possibly intuit through the loop above. So we invent this
7147 note it looks at instead. */
7148 RTX_FRAME_RELATED_P (seq) = 1;
7150 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
7151 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7152 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7153 GEN_INT (TARGET_ABI_UNICOSMK
7159 if (!TARGET_ABI_UNICOSMK)
7161 /* Cope with very large offsets to the register save area. */
7162 sa_reg = stack_pointer_rtx;
7163 if (reg_offset + sa_size > 0x8000)
7165 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7168 if (low + sa_size <= 0x8000)
7169 bias = reg_offset - low, reg_offset = low;
7171 bias = reg_offset, reg_offset = 0;
7173 sa_reg = gen_rtx_REG (DImode, 24);
7174 FRP (emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx,
7178 /* Save regs in stack order. Beginning with VMS PV. */
7179 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7181 mem = gen_rtx_MEM (DImode, stack_pointer_rtx);
7182 set_mem_alias_set (mem, alpha_sr_alias_set);
7183 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_PV)));
7186 /* Save register RA next. */
7187 if (imask & (1L << REG_RA))
7189 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7190 set_mem_alias_set (mem, alpha_sr_alias_set);
7191 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
7192 imask &= ~(1L << REG_RA);
7196 /* Now save any other registers required to be saved. */
7197 for (i = 0; i < 32; i++)
7198 if (imask & (1L << i))
7200 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7201 set_mem_alias_set (mem, alpha_sr_alias_set);
7202 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7206 for (i = 0; i < 32; i++)
7207 if (fmask & (1L << i))
7209 mem = gen_rtx_MEM (DFmode, plus_constant (sa_reg, reg_offset));
7210 set_mem_alias_set (mem, alpha_sr_alias_set);
7211 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7215 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
7217 /* The standard frame on the T3E includes space for saving registers.
7218 We just have to use it. We don't have to save the return address and
7219 the old frame pointer here - they are saved in the DSIB. */
7222 for (i = 9; i < 15; i++)
7223 if (imask & (1L << i))
7225 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7227 set_mem_alias_set (mem, alpha_sr_alias_set);
7228 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, i)));
7231 for (i = 2; i < 10; i++)
7232 if (fmask & (1L << i))
7234 mem = gen_rtx_MEM (DFmode, plus_constant (hard_frame_pointer_rtx,
7236 set_mem_alias_set (mem, alpha_sr_alias_set);
7237 FRP (emit_move_insn (mem, gen_rtx_REG (DFmode, i+32)));
7242 if (TARGET_ABI_OPEN_VMS)
7244 if (alpha_procedure_type == PT_REGISTER)
7245 /* Register frame procedures save the fp.
7246 ?? Ought to have a dwarf2 save for this. */
7247 emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
7248 hard_frame_pointer_rtx);
7250 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
7251 emit_insn (gen_force_movdi (gen_rtx_REG (DImode, vms_base_regno),
7252 gen_rtx_REG (DImode, REG_PV)));
7254 if (alpha_procedure_type != PT_NULL
7255 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
7256 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
7258 /* If we have to allocate space for outgoing args, do it now. */
7259 if (current_function_outgoing_args_size != 0)
7262 plus_constant (hard_frame_pointer_rtx,
7264 (current_function_outgoing_args_size)))));
7266 else if (!TARGET_ABI_UNICOSMK)
7268 /* If we need a frame pointer, set it from the stack pointer. */
7269 if (frame_pointer_needed)
7271 if (TARGET_CAN_FAULT_IN_PROLOGUE)
7272 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
7274 /* This must always be the last instruction in the
7275 prologue, thus we emit a special move + clobber. */
7276 FRP (emit_insn (gen_init_fp (hard_frame_pointer_rtx,
7277 stack_pointer_rtx, sa_reg)));
7281 /* The ABIs for VMS and OSF/1 say that while we can schedule insns into
7282 the prologue, for exception handling reasons, we cannot do this for
7283 any insn that might fault. We could prevent this for mems with a
7284 (clobber:BLK (scratch)), but this doesn't work for fp insns. So we
7285 have to prevent all such scheduling with a blockage.
7287 Linux, on the other hand, never bothered to implement OSF/1's
7288 exception handling, and so doesn't care about such things. Anyone
7289 planning to use dwarf2 frame-unwind info can also omit the blockage. */
7291 if (! TARGET_CAN_FAULT_IN_PROLOGUE)
7292 emit_insn (gen_blockage ());
7295 /* Output the textual info surrounding the prologue. */
7298 alpha_start_function (file, fnname, decl)
7301 tree decl ATTRIBUTE_UNUSED;
7303 unsigned long imask = 0;
7304 unsigned long fmask = 0;
7305 /* Stack space needed for pushing registers clobbered by us. */
7306 HOST_WIDE_INT sa_size;
7307 /* Complete stack size needed. */
7308 HOST_WIDE_INT frame_size;
7309 /* Offset from base reg to register save area. */
7310 HOST_WIDE_INT reg_offset;
7311 char *entry_label = (char *) alloca (strlen (fnname) + 6);
7314 /* Don't emit an extern directive for functions defined in the same file. */
7315 if (TARGET_ABI_UNICOSMK)
7318 name_tree = get_identifier (fnname);
7319 TREE_ASM_WRITTEN (name_tree) = 1;
7322 alpha_fnname = fnname;
7323 sa_size = alpha_sa_size ();
7325 frame_size = get_frame_size ();
7326 if (TARGET_ABI_OPEN_VMS)
7327 frame_size = ALPHA_ROUND (sa_size
7328 + (alpha_procedure_type == PT_STACK ? 8 : 0)
7330 + current_function_pretend_args_size);
7331 else if (TARGET_ABI_UNICOSMK)
7332 frame_size = ALPHA_ROUND (sa_size
7333 + (alpha_procedure_type == PT_STACK ? 48 : 0))
7334 + ALPHA_ROUND (frame_size
7335 + current_function_outgoing_args_size);
7337 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7339 + ALPHA_ROUND (frame_size
7340 + current_function_pretend_args_size));
7342 if (TARGET_ABI_OPEN_VMS)
7345 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7347 alpha_sa_mask (&imask, &fmask);
7349 /* Ecoff can handle multiple .file directives, so put out file and lineno.
7350 We have to do that before the .ent directive as we cannot switch
7351 files within procedures with native ecoff because line numbers are
7352 linked to procedure descriptors.
7353 Outputting the lineno helps debugging of one line functions as they
7354 would otherwise get no line number at all. Please note that we would
7355 like to put out last_linenum from final.c, but it is not accessible. */
7357 if (write_symbols == SDB_DEBUG)
7359 #ifdef ASM_OUTPUT_SOURCE_FILENAME
7360 ASM_OUTPUT_SOURCE_FILENAME (file,
7361 DECL_SOURCE_FILE (current_function_decl));
7363 #ifdef ASM_OUTPUT_SOURCE_LINE
7364 if (debug_info_level != DINFO_LEVEL_TERSE)
7365 ASM_OUTPUT_SOURCE_LINE (file,
7366 DECL_SOURCE_LINE (current_function_decl));
7370 /* Issue function start and label. */
7371 if (TARGET_ABI_OPEN_VMS
7372 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
7374 fputs ("\t.ent ", file);
7375 assemble_name (file, fnname);
7378 /* If the function needs GP, we'll write the "..ng" label there.
7379 Otherwise, do it here. */
7381 && ! alpha_function_needs_gp
7382 && ! current_function_is_thunk)
7385 assemble_name (file, fnname);
7386 fputs ("..ng:\n", file);
7390 strcpy (entry_label, fnname);
7391 if (TARGET_ABI_OPEN_VMS)
7392 strcat (entry_label, "..en");
7394 /* For public functions, the label must be globalized by appending an
7395 additional colon. */
7396 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
7397 strcat (entry_label, ":");
7399 ASM_OUTPUT_LABEL (file, entry_label);
7400 inside_function = TRUE;
7402 if (TARGET_ABI_OPEN_VMS)
7403 fprintf (file, "\t.base $%d\n", vms_base_regno);
7405 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT
7406 && !flag_inhibit_size_directive)
7408 /* Set flags in procedure descriptor to request IEEE-conformant
7409 math-library routines. The value we set it to is PDSC_EXC_IEEE
7410 (/usr/include/pdsc.h). */
7411 fputs ("\t.eflag 48\n", file);
7414 /* Set up offsets to alpha virtual arg/local debugging pointer. */
7415 alpha_auto_offset = -frame_size + current_function_pretend_args_size;
7416 alpha_arg_offset = -frame_size + 48;
7418 /* Describe our frame. If the frame size is larger than an integer,
7419 print it as zero to avoid an assembler error. We won't be
7420 properly describing such a frame, but that's the best we can do. */
7421 if (TARGET_ABI_UNICOSMK)
7423 else if (TARGET_ABI_OPEN_VMS)
7425 fprintf (file, "\t.frame $%d,", vms_unwind_regno);
7426 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7427 frame_size >= ((HOST_WIDE_INT) 1 << 31) ? 0 : frame_size);
7428 fputs (",$26,", file);
7429 fprintf (file, HOST_WIDE_INT_PRINT_DEC, reg_offset);
7432 else if (!flag_inhibit_size_directive)
7434 fprintf (file, "\t.frame $%d,",
7435 (frame_pointer_needed
7436 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM));
7437 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7438 frame_size >= (1l << 31) ? 0 : frame_size);
7439 fprintf (file, ",$26,%d\n", current_function_pretend_args_size);
7442 /* Describe which registers were spilled. */
7443 if (TARGET_ABI_UNICOSMK)
7445 else if (TARGET_ABI_OPEN_VMS)
7448 /* ??? Does VMS care if mask contains ra? The old code didn't
7449 set it, so I don't here. */
7450 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1L << REG_RA));
7452 fprintf (file, "\t.fmask 0x%lx,0\n", fmask);
7453 if (alpha_procedure_type == PT_REGISTER)
7454 fprintf (file, "\t.fp_save $%d\n", vms_save_fp_regno);
7456 else if (!flag_inhibit_size_directive)
7460 fprintf (file, "\t.mask 0x%lx,", imask);
7461 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7462 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7465 for (i = 0; i < 32; ++i)
7466 if (imask & (1L << i))
7472 fprintf (file, "\t.fmask 0x%lx,", fmask);
7473 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
7474 frame_size >= (1l << 31) ? 0 : reg_offset - frame_size);
7479 #if TARGET_ABI_OPEN_VMS
7480 /* Ifdef'ed cause link_section are only available then. */
7481 readonly_data_section ();
7482 fprintf (file, "\t.align 3\n");
7483 assemble_name (file, fnname); fputs ("..na:\n", file);
7484 fputs ("\t.ascii \"", file);
7485 assemble_name (file, fnname);
7486 fputs ("\\0\"\n", file);
7487 alpha_need_linkage (fnname, 1);
7492 /* Emit the .prologue note at the scheduled end of the prologue. */
7495 alpha_output_function_end_prologue (file)
7498 if (TARGET_ABI_UNICOSMK)
7500 else if (TARGET_ABI_OPEN_VMS)
7501 fputs ("\t.prologue\n", file);
7502 else if (TARGET_ABI_WINDOWS_NT)
7503 fputs ("\t.prologue 0\n", file);
7504 else if (!flag_inhibit_size_directive)
7505 fprintf (file, "\t.prologue %d\n",
7506 alpha_function_needs_gp || current_function_is_thunk);
7509 /* Write function epilogue. */
7511 /* ??? At some point we will want to support full unwind, and so will
7512 need to mark the epilogue as well. At the moment, we just confuse
7515 #define FRP(exp) exp
7518 alpha_expand_epilogue ()
7520 /* Registers to save. */
7521 unsigned long imask = 0;
7522 unsigned long fmask = 0;
7523 /* Stack space needed for pushing registers clobbered by us. */
7524 HOST_WIDE_INT sa_size;
7525 /* Complete stack size needed. */
7526 HOST_WIDE_INT frame_size;
7527 /* Offset from base reg to register save area. */
7528 HOST_WIDE_INT reg_offset;
7529 int fp_is_frame_pointer, fp_offset;
7530 rtx sa_reg, sa_reg_exp = NULL;
7531 rtx sp_adj1, sp_adj2, mem;
7535 sa_size = alpha_sa_size ();
7537 frame_size = get_frame_size ();
7538 if (TARGET_ABI_OPEN_VMS)
7539 frame_size = ALPHA_ROUND (sa_size
7540 + (alpha_procedure_type == PT_STACK ? 8 : 0)
7542 + current_function_pretend_args_size);
7543 else if (TARGET_ABI_UNICOSMK)
7544 frame_size = ALPHA_ROUND (sa_size
7545 + (alpha_procedure_type == PT_STACK ? 48 : 0))
7546 + ALPHA_ROUND (frame_size
7547 + current_function_outgoing_args_size);
7549 frame_size = (ALPHA_ROUND (current_function_outgoing_args_size)
7551 + ALPHA_ROUND (frame_size
7552 + current_function_pretend_args_size));
7554 if (TARGET_ABI_OPEN_VMS)
7556 if (alpha_procedure_type == PT_STACK)
7562 reg_offset = ALPHA_ROUND (current_function_outgoing_args_size);
7564 alpha_sa_mask (&imask, &fmask);
7567 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7568 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed));
7570 sa_reg = stack_pointer_rtx;
7572 if (current_function_calls_eh_return)
7573 eh_ofs = EH_RETURN_STACKADJ_RTX;
7577 if (!TARGET_ABI_UNICOSMK && sa_size)
7579 /* If we have a frame pointer, restore SP from it. */
7580 if ((TARGET_ABI_OPEN_VMS
7581 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM)
7582 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed))
7583 FRP (emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx));
7585 /* Cope with very large offsets to the register save area. */
7586 if (reg_offset + sa_size > 0x8000)
7588 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7591 if (low + sa_size <= 0x8000)
7592 bias = reg_offset - low, reg_offset = low;
7594 bias = reg_offset, reg_offset = 0;
7596 sa_reg = gen_rtx_REG (DImode, 22);
7597 sa_reg_exp = plus_constant (stack_pointer_rtx, bias);
7599 FRP (emit_move_insn (sa_reg, sa_reg_exp));
7602 /* Restore registers in order, excepting a true frame pointer. */
7604 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset));
7606 set_mem_alias_set (mem, alpha_sr_alias_set);
7607 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7610 imask &= ~(1L << REG_RA);
7612 for (i = 0; i < 32; ++i)
7613 if (imask & (1L << i))
7615 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
7616 fp_offset = reg_offset;
7619 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset));
7620 set_mem_alias_set (mem, alpha_sr_alias_set);
7621 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
7626 for (i = 0; i < 32; ++i)
7627 if (fmask & (1L << i))
7629 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset));
7630 set_mem_alias_set (mem, alpha_sr_alias_set);
7631 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
7635 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
7637 /* Restore callee-saved general-purpose registers. */
7641 for (i = 9; i < 15; i++)
7642 if (imask & (1L << i))
7644 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
7646 set_mem_alias_set (mem, alpha_sr_alias_set);
7647 FRP (emit_move_insn (gen_rtx_REG (DImode, i), mem));
7651 for (i = 2; i < 10; i++)
7652 if (fmask & (1L << i))
7654 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
7656 set_mem_alias_set (mem, alpha_sr_alias_set);
7657 FRP (emit_move_insn (gen_rtx_REG (DFmode, i+32), mem));
7661 /* Restore the return address from the DSIB. */
7663 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx, -8));
7664 set_mem_alias_set (mem, alpha_sr_alias_set);
7665 FRP (emit_move_insn (gen_rtx_REG (DImode, REG_RA), mem));
7668 if (frame_size || eh_ofs)
7670 sp_adj1 = stack_pointer_rtx;
7674 sp_adj1 = gen_rtx_REG (DImode, 23);
7675 emit_move_insn (sp_adj1,
7676 gen_rtx_PLUS (Pmode, stack_pointer_rtx, eh_ofs));
7679 /* If the stack size is large, begin computation into a temporary
7680 register so as not to interfere with a potential fp restore,
7681 which must be consecutive with an SP restore. */
7682 if (frame_size < 32768
7683 && ! (TARGET_ABI_UNICOSMK && current_function_calls_alloca))
7684 sp_adj2 = GEN_INT (frame_size);
7685 else if (TARGET_ABI_UNICOSMK)
7687 sp_adj1 = gen_rtx_REG (DImode, 23);
7688 FRP (emit_move_insn (sp_adj1, hard_frame_pointer_rtx));
7689 sp_adj2 = const0_rtx;
7691 else if (frame_size < 0x40007fffL)
7693 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7695 sp_adj2 = plus_constant (sp_adj1, frame_size - low);
7696 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
7700 sp_adj1 = gen_rtx_REG (DImode, 23);
7701 FRP (emit_move_insn (sp_adj1, sp_adj2));
7703 sp_adj2 = GEN_INT (low);
7707 rtx tmp = gen_rtx_REG (DImode, 23);
7708 FRP (sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3));
7711 /* We can't drop new things to memory this late, afaik,
7712 so build it up by pieces. */
7713 FRP (sp_adj2 = alpha_emit_set_long_const (tmp, frame_size,
7714 -(frame_size < 0)));
7720 /* From now on, things must be in order. So emit blockages. */
7722 /* Restore the frame pointer. */
7723 if (TARGET_ABI_UNICOSMK)
7725 emit_insn (gen_blockage ());
7726 mem = gen_rtx_MEM (DImode,
7727 plus_constant (hard_frame_pointer_rtx, -16));
7728 set_mem_alias_set (mem, alpha_sr_alias_set);
7729 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
7731 else if (fp_is_frame_pointer)
7733 emit_insn (gen_blockage ());
7734 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
7735 set_mem_alias_set (mem, alpha_sr_alias_set);
7736 FRP (emit_move_insn (hard_frame_pointer_rtx, mem));
7738 else if (TARGET_ABI_OPEN_VMS)
7740 emit_insn (gen_blockage ());
7741 FRP (emit_move_insn (hard_frame_pointer_rtx,
7742 gen_rtx_REG (DImode, vms_save_fp_regno)));
7745 /* Restore the stack pointer. */
7746 emit_insn (gen_blockage ());
7747 if (sp_adj2 == const0_rtx)
7748 FRP (emit_move_insn (stack_pointer_rtx, sp_adj1));
7750 FRP (emit_move_insn (stack_pointer_rtx,
7751 gen_rtx_PLUS (DImode, sp_adj1, sp_adj2)));
7755 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_REGISTER)
7757 emit_insn (gen_blockage ());
7758 FRP (emit_move_insn (hard_frame_pointer_rtx,
7759 gen_rtx_REG (DImode, vms_save_fp_regno)));
7761 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
7763 /* Decrement the frame pointer if the function does not have a
7766 emit_insn (gen_blockage ());
7767 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
7768 hard_frame_pointer_rtx, GEN_INT (-1))));
7773 #if TARGET_ABI_OPEN_VMS
7774 #include <splay-tree.h>
7776 /* Structure to collect function names for final output
7779 enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN};
7780 enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR};
7792 enum links_kind lkind;
7793 enum reloc_kind rkind;
7796 static splay_tree alpha_funcs_tree;
7797 static splay_tree alpha_links_tree;
7799 static int mark_alpha_links_node PARAMS ((splay_tree_node, void *));
7800 static void mark_alpha_links PARAMS ((void *));
7801 static int alpha_write_one_linkage PARAMS ((splay_tree_node, void *));
7803 static int alpha_funcs_num;
7806 /* Output the rest of the textual info surrounding the epilogue. */
7809 alpha_end_function (file, fnname, decl)
7814 /* End the function. */
7815 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive)
7817 fputs ("\t.end ", file);
7818 assemble_name (file, fnname);
7821 inside_function = FALSE;
7823 #if TARGET_ABI_OPEN_VMS
7824 alpha_write_linkage (file, fnname, decl);
7827 /* Show that we know this function if it is called again.
7829 Do this only for functions whose symbols bind locally.
7831 Don't do this for functions not defined in the .text section, as
7832 otherwise it's not unlikely that the destination is out of range
7833 for a direct branch. */
7835 if ((*targetm.binds_local_p) (decl) && decl_in_text_section (decl))
7836 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
7838 /* Output jump tables and the static subroutine information block. */
7839 if (TARGET_ABI_UNICOSMK)
7841 unicosmk_output_ssib (file, fnname);
7842 unicosmk_output_deferred_case_vectors (file);
7846 /* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
7848 In order to avoid the hordes of differences between generated code
7849 with and without TARGET_EXPLICIT_RELOCS, and to avoid duplicating
7850 lots of code loading up large constants, generate rtl and emit it
7851 instead of going straight to text.
7853 Not sure why this idea hasn't been explored before... */
7856 alpha_output_mi_thunk_osf (file, thunk_fndecl, delta, function)
7858 tree thunk_fndecl ATTRIBUTE_UNUSED;
7859 HOST_WIDE_INT delta;
7862 HOST_WIDE_INT hi, lo;
7863 rtx this, insn, funexp;
7865 /* We always require a valid GP. */
7866 emit_insn (gen_prologue_ldgp ());
7867 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7869 /* Find the "this" pointer. If the function returns a structure,
7870 the structure return pointer is in $16. */
7871 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
7872 this = gen_rtx_REG (Pmode, 17);
7874 this = gen_rtx_REG (Pmode, 16);
7876 /* Add DELTA. When possible we use ldah+lda. Otherwise load the
7877 entire constant for the add. */
7878 lo = ((delta & 0xffff) ^ 0x8000) - 0x8000;
7879 hi = (((delta - lo) & 0xffffffff) ^ 0x80000000) - 0x80000000;
7880 if (hi + lo == delta)
7883 emit_insn (gen_adddi3 (this, this, GEN_INT (hi)));
7885 emit_insn (gen_adddi3 (this, this, GEN_INT (lo)));
7889 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0),
7890 delta, -(delta < 0));
7891 emit_insn (gen_adddi3 (this, this, tmp));
7894 /* Generate a tail call to the target function. */
7895 if (! TREE_USED (function))
7897 assemble_external (function);
7898 TREE_USED (function) = 1;
7900 funexp = XEXP (DECL_RTL (function), 0);
7901 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
7902 insn = emit_call_insn (gen_sibcall (funexp, const0_rtx));
7903 SIBLING_CALL_P (insn) = 1;
7905 /* Run just enough of rest_of_compilation to get the insns emitted.
7906 There's not really enough bulk here to make other passes such as
7907 instruction scheduling worth while. Note that use_thunk calls
7908 assemble_start_function and assemble_end_function. */
7909 insn = get_insns ();
7910 shorten_branches (insn);
7911 final_start_function (insn, file, 1);
7912 final (insn, file, 1, 0);
7913 final_end_function ();
7916 /* Debugging support. */
7920 /* Count the number of sdb related labels are generated (to find block
7921 start and end boundaries). */
7923 int sdb_label_count = 0;
7925 /* Next label # for each statement. */
7927 static int sym_lineno = 0;
7929 /* Count the number of .file directives, so that .loc is up to date. */
7931 static int num_source_filenames = 0;
7933 /* Name of the file containing the current function. */
7935 static const char *current_function_file = "";
7937 /* Offsets to alpha virtual arg/local debugging pointers. */
7939 long alpha_arg_offset;
7940 long alpha_auto_offset;
7942 /* Emit a new filename to a stream. */
7945 alpha_output_filename (stream, name)
7949 static int first_time = TRUE;
7950 char ltext_label_name[100];
7955 ++num_source_filenames;
7956 current_function_file = name;
7957 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7958 output_quoted_string (stream, name);
7959 fprintf (stream, "\n");
7960 if (!TARGET_GAS && write_symbols == DBX_DEBUG)
7961 fprintf (stream, "\t#@stabs\n");
7964 else if (write_symbols == DBX_DEBUG)
7966 ASM_GENERATE_INTERNAL_LABEL (ltext_label_name, "Ltext", 0);
7967 fprintf (stream, "%s", ASM_STABS_OP);
7968 output_quoted_string (stream, name);
7969 fprintf (stream, ",%d,0,0,%s\n", N_SOL, <ext_label_name[1]);
7972 else if (name != current_function_file
7973 && strcmp (name, current_function_file) != 0)
7975 if (inside_function && ! TARGET_GAS)
7976 fprintf (stream, "\t#.file\t%d ", num_source_filenames);
7979 ++num_source_filenames;
7980 current_function_file = name;
7981 fprintf (stream, "\t.file\t%d ", num_source_filenames);
7984 output_quoted_string (stream, name);
7985 fprintf (stream, "\n");
7989 /* Emit a linenumber to a stream. */
7992 alpha_output_lineno (stream, line)
7996 if (write_symbols == DBX_DEBUG)
7998 /* mips-tfile doesn't understand .stabd directives. */
8000 fprintf (stream, "$LM%d:\n%s%d,0,%d,$LM%d\n",
8001 sym_lineno, ASM_STABN_OP, N_SLINE, line, sym_lineno);
8004 fprintf (stream, "\n\t.loc\t%d %d\n", num_source_filenames, line);
8007 /* Structure to show the current status of registers and memory. */
8009 struct shadow_summary
8012 unsigned int i : 31; /* Mask of int regs */
8013 unsigned int fp : 31; /* Mask of fp regs */
8014 unsigned int mem : 1; /* mem == imem | fpmem */
8018 static void summarize_insn PARAMS ((rtx, struct shadow_summary *, int));
8019 static void alpha_handle_trap_shadows PARAMS ((rtx));
8021 /* Summary the effects of expression X on the machine. Update SUM, a pointer
8022 to the summary structure. SET is nonzero if the insn is setting the
8023 object, otherwise zero. */
8026 summarize_insn (x, sum, set)
8028 struct shadow_summary *sum;
8031 const char *format_ptr;
8037 switch (GET_CODE (x))
8039 /* ??? Note that this case would be incorrect if the Alpha had a
8040 ZERO_EXTRACT in SET_DEST. */
8042 summarize_insn (SET_SRC (x), sum, 0);
8043 summarize_insn (SET_DEST (x), sum, 1);
8047 summarize_insn (XEXP (x, 0), sum, 1);
8051 summarize_insn (XEXP (x, 0), sum, 0);
8055 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
8056 summarize_insn (ASM_OPERANDS_INPUT (x, i), sum, 0);
8060 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
8061 summarize_insn (XVECEXP (x, 0, i), sum, 0);
8065 summarize_insn (SUBREG_REG (x), sum, 0);
8070 int regno = REGNO (x);
8071 unsigned long mask = ((unsigned long) 1) << (regno % 32);
8073 if (regno == 31 || regno == 63)
8079 sum->defd.i |= mask;
8081 sum->defd.fp |= mask;
8086 sum->used.i |= mask;
8088 sum->used.fp |= mask;
8099 /* Find the regs used in memory address computation: */
8100 summarize_insn (XEXP (x, 0), sum, 0);
8103 case CONST_INT: case CONST_DOUBLE:
8104 case SYMBOL_REF: case LABEL_REF: case CONST:
8105 case SCRATCH: case ASM_INPUT:
8108 /* Handle common unary and binary ops for efficiency. */
8109 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
8110 case MOD: case UDIV: case UMOD: case AND: case IOR:
8111 case XOR: case ASHIFT: case ROTATE: case ASHIFTRT: case LSHIFTRT:
8112 case ROTATERT: case SMIN: case SMAX: case UMIN: case UMAX:
8113 case NE: case EQ: case GE: case GT: case LE:
8114 case LT: case GEU: case GTU: case LEU: case LTU:
8115 summarize_insn (XEXP (x, 0), sum, 0);
8116 summarize_insn (XEXP (x, 1), sum, 0);
8119 case NEG: case NOT: case SIGN_EXTEND: case ZERO_EXTEND:
8120 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE: case FLOAT:
8121 case FIX: case UNSIGNED_FLOAT: case UNSIGNED_FIX: case ABS:
8122 case SQRT: case FFS:
8123 summarize_insn (XEXP (x, 0), sum, 0);
8127 format_ptr = GET_RTX_FORMAT (GET_CODE (x));
8128 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
8129 switch (format_ptr[i])
8132 summarize_insn (XEXP (x, i), sum, 0);
8136 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8137 summarize_insn (XVECEXP (x, i, j), sum, 0);
8149 /* Ensure a sufficient number of `trapb' insns are in the code when
8150 the user requests code with a trap precision of functions or
8153 In naive mode, when the user requests a trap-precision of
8154 "instruction", a trapb is needed after every instruction that may
8155 generate a trap. This ensures that the code is resumption safe but
8158 When optimizations are turned on, we delay issuing a trapb as long
8159 as possible. In this context, a trap shadow is the sequence of
8160 instructions that starts with a (potentially) trap generating
8161 instruction and extends to the next trapb or call_pal instruction
8162 (but GCC never generates call_pal by itself). We can delay (and
8163 therefore sometimes omit) a trapb subject to the following
8166 (a) On entry to the trap shadow, if any Alpha register or memory
8167 location contains a value that is used as an operand value by some
8168 instruction in the trap shadow (live on entry), then no instruction
8169 in the trap shadow may modify the register or memory location.
8171 (b) Within the trap shadow, the computation of the base register
8172 for a memory load or store instruction may not involve using the
8173 result of an instruction that might generate an UNPREDICTABLE
8176 (c) Within the trap shadow, no register may be used more than once
8177 as a destination register. (This is to make life easier for the
8180 (d) The trap shadow may not include any branch instructions. */
8183 alpha_handle_trap_shadows (insns)
8186 struct shadow_summary shadow;
8187 int trap_pending, exception_nesting;
8191 exception_nesting = 0;
8194 shadow.used.mem = 0;
8195 shadow.defd = shadow.used;
8197 for (i = insns; i ; i = NEXT_INSN (i))
8199 if (GET_CODE (i) == NOTE)
8201 switch (NOTE_LINE_NUMBER (i))
8203 case NOTE_INSN_EH_REGION_BEG:
8204 exception_nesting++;
8209 case NOTE_INSN_EH_REGION_END:
8210 exception_nesting--;
8215 case NOTE_INSN_EPILOGUE_BEG:
8216 if (trap_pending && alpha_tp >= ALPHA_TP_FUNC)
8221 else if (trap_pending)
8223 if (alpha_tp == ALPHA_TP_FUNC)
8225 if (GET_CODE (i) == JUMP_INSN
8226 && GET_CODE (PATTERN (i)) == RETURN)
8229 else if (alpha_tp == ALPHA_TP_INSN)
8233 struct shadow_summary sum;
8238 sum.defd = sum.used;
8240 switch (GET_CODE (i))
8243 /* Annoyingly, get_attr_trap will abort on these. */
8244 if (GET_CODE (PATTERN (i)) == USE
8245 || GET_CODE (PATTERN (i)) == CLOBBER)
8248 summarize_insn (PATTERN (i), &sum, 0);
8250 if ((sum.defd.i & shadow.defd.i)
8251 || (sum.defd.fp & shadow.defd.fp))
8253 /* (c) would be violated */
8257 /* Combine shadow with summary of current insn: */
8258 shadow.used.i |= sum.used.i;
8259 shadow.used.fp |= sum.used.fp;
8260 shadow.used.mem |= sum.used.mem;
8261 shadow.defd.i |= sum.defd.i;
8262 shadow.defd.fp |= sum.defd.fp;
8263 shadow.defd.mem |= sum.defd.mem;
8265 if ((sum.defd.i & shadow.used.i)
8266 || (sum.defd.fp & shadow.used.fp)
8267 || (sum.defd.mem & shadow.used.mem))
8269 /* (a) would be violated (also takes care of (b)) */
8270 if (get_attr_trap (i) == TRAP_YES
8271 && ((sum.defd.i & sum.used.i)
8272 || (sum.defd.fp & sum.used.fp)))
8291 n = emit_insn_before (gen_trapb (), i);
8292 PUT_MODE (n, TImode);
8293 PUT_MODE (i, TImode);
8297 shadow.used.mem = 0;
8298 shadow.defd = shadow.used;
8303 if ((exception_nesting > 0 || alpha_tp >= ALPHA_TP_FUNC)
8304 && GET_CODE (i) == INSN
8305 && GET_CODE (PATTERN (i)) != USE
8306 && GET_CODE (PATTERN (i)) != CLOBBER
8307 && get_attr_trap (i) == TRAP_YES)
8309 if (optimize && !trap_pending)
8310 summarize_insn (PATTERN (i), &shadow, 0);
8316 /* Alpha can only issue instruction groups simultaneously if they are
8317 suitibly aligned. This is very processor-specific. */
8319 enum alphaev4_pipe {
8326 enum alphaev5_pipe {
8337 static enum alphaev4_pipe alphaev4_insn_pipe PARAMS ((rtx));
8338 static enum alphaev5_pipe alphaev5_insn_pipe PARAMS ((rtx));
8339 static rtx alphaev4_next_group PARAMS ((rtx, int *, int *));
8340 static rtx alphaev5_next_group PARAMS ((rtx, int *, int *));
8341 static rtx alphaev4_next_nop PARAMS ((int *));
8342 static rtx alphaev5_next_nop PARAMS ((int *));
8344 static void alpha_align_insns
8345 PARAMS ((rtx, unsigned int, rtx (*)(rtx, int *, int *), rtx (*)(int *)));
8347 static enum alphaev4_pipe
8348 alphaev4_insn_pipe (insn)
8351 if (recog_memoized (insn) < 0)
8353 if (get_attr_length (insn) != 4)
8356 switch (get_attr_type (insn))
8389 static enum alphaev5_pipe
8390 alphaev5_insn_pipe (insn)
8393 if (recog_memoized (insn) < 0)
8395 if (get_attr_length (insn) != 4)
8398 switch (get_attr_type (insn))
8438 /* IN_USE is a mask of the slots currently filled within the insn group.
8439 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
8440 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
8442 LEN is, of course, the length of the group in bytes. */
8445 alphaev4_next_group (insn, pin_use, plen)
8447 int *pin_use, *plen;
8454 || GET_CODE (PATTERN (insn)) == CLOBBER
8455 || GET_CODE (PATTERN (insn)) == USE)
8460 enum alphaev4_pipe pipe;
8462 pipe = alphaev4_insn_pipe (insn);
8466 /* Force complex instructions to start new groups. */
8470 /* If this is a completely unrecognized insn, its an asm.
8471 We don't know how long it is, so record length as -1 to
8472 signal a needed realignment. */
8473 if (recog_memoized (insn) < 0)
8476 len = get_attr_length (insn);
8480 if (in_use & EV4_IB0)
8482 if (in_use & EV4_IB1)
8487 in_use |= EV4_IB0 | EV4_IBX;
8491 if (in_use & EV4_IB0)
8493 if (!(in_use & EV4_IBX) || (in_use & EV4_IB1))
8501 if (in_use & EV4_IB1)
8511 /* Haifa doesn't do well scheduling branches. */
8512 if (GET_CODE (insn) == JUMP_INSN)
8516 insn = next_nonnote_insn (insn);
8518 if (!insn || ! INSN_P (insn))
8521 /* Let Haifa tell us where it thinks insn group boundaries are. */
8522 if (GET_MODE (insn) == TImode)
8525 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8530 insn = next_nonnote_insn (insn);
8538 /* IN_USE is a mask of the slots currently filled within the insn group.
8539 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
8540 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
8542 LEN is, of course, the length of the group in bytes. */
8545 alphaev5_next_group (insn, pin_use, plen)
8547 int *pin_use, *plen;
8554 || GET_CODE (PATTERN (insn)) == CLOBBER
8555 || GET_CODE (PATTERN (insn)) == USE)
8560 enum alphaev5_pipe pipe;
8562 pipe = alphaev5_insn_pipe (insn);
8566 /* Force complex instructions to start new groups. */
8570 /* If this is a completely unrecognized insn, its an asm.
8571 We don't know how long it is, so record length as -1 to
8572 signal a needed realignment. */
8573 if (recog_memoized (insn) < 0)
8576 len = get_attr_length (insn);
8579 /* ??? Most of the places below, we would like to abort, as
8580 it would indicate an error either in Haifa, or in the
8581 scheduling description. Unfortunately, Haifa never
8582 schedules the last instruction of the BB, so we don't
8583 have an accurate TI bit to go off. */
8585 if (in_use & EV5_E0)
8587 if (in_use & EV5_E1)
8592 in_use |= EV5_E0 | EV5_E01;
8596 if (in_use & EV5_E0)
8598 if (!(in_use & EV5_E01) || (in_use & EV5_E1))
8606 if (in_use & EV5_E1)
8612 if (in_use & EV5_FA)
8614 if (in_use & EV5_FM)
8619 in_use |= EV5_FA | EV5_FAM;
8623 if (in_use & EV5_FA)
8629 if (in_use & EV5_FM)
8642 /* Haifa doesn't do well scheduling branches. */
8643 /* ??? If this is predicted not-taken, slotting continues, except
8644 that no more IBR, FBR, or JSR insns may be slotted. */
8645 if (GET_CODE (insn) == JUMP_INSN)
8649 insn = next_nonnote_insn (insn);
8651 if (!insn || ! INSN_P (insn))
8654 /* Let Haifa tell us where it thinks insn group boundaries are. */
8655 if (GET_MODE (insn) == TImode)
8658 if (GET_CODE (insn) == CLOBBER || GET_CODE (insn) == USE)
8663 insn = next_nonnote_insn (insn);
8672 alphaev4_next_nop (pin_use)
8675 int in_use = *pin_use;
8678 if (!(in_use & EV4_IB0))
8683 else if ((in_use & (EV4_IBX|EV4_IB1)) == EV4_IBX)
8688 else if (TARGET_FP && !(in_use & EV4_IB1))
8701 alphaev5_next_nop (pin_use)
8704 int in_use = *pin_use;
8707 if (!(in_use & EV5_E1))
8712 else if (TARGET_FP && !(in_use & EV5_FA))
8717 else if (TARGET_FP && !(in_use & EV5_FM))
8729 /* The instruction group alignment main loop. */
8732 alpha_align_insns (insns, max_align, next_group, next_nop)
8734 unsigned int max_align;
8735 rtx (*next_group) PARAMS ((rtx, int *, int *));
8736 rtx (*next_nop) PARAMS ((int *));
8738 /* ALIGN is the known alignment for the insn group. */
8740 /* OFS is the offset of the current insn in the insn group. */
8742 int prev_in_use, in_use, len;
8745 /* Let shorten branches care for assigning alignments to code labels. */
8746 shorten_branches (insns);
8748 if (align_functions < 4)
8750 else if ((unsigned int) align_functions < max_align)
8751 align = align_functions;
8755 ofs = prev_in_use = 0;
8757 if (GET_CODE (i) == NOTE)
8758 i = next_nonnote_insn (i);
8762 next = (*next_group) (i, &in_use, &len);
8764 /* When we see a label, resync alignment etc. */
8765 if (GET_CODE (i) == CODE_LABEL)
8767 unsigned int new_align = 1 << label_to_alignment (i);
8769 if (new_align >= align)
8771 align = new_align < max_align ? new_align : max_align;
8775 else if (ofs & (new_align-1))
8776 ofs = (ofs | (new_align-1)) + 1;
8781 /* Handle complex instructions special. */
8782 else if (in_use == 0)
8784 /* Asms will have length < 0. This is a signal that we have
8785 lost alignment knowledge. Assume, however, that the asm
8786 will not mis-align instructions. */
8795 /* If the known alignment is smaller than the recognized insn group,
8796 realign the output. */
8797 else if ((int) align < len)
8799 unsigned int new_log_align = len > 8 ? 4 : 3;
8802 where = prev = prev_nonnote_insn (i);
8803 if (!where || GET_CODE (where) != CODE_LABEL)
8806 /* Can't realign between a call and its gp reload. */
8807 if (! (TARGET_EXPLICIT_RELOCS
8808 && prev && GET_CODE (prev) == CALL_INSN))
8810 emit_insn_before (gen_realign (GEN_INT (new_log_align)), where);
8811 align = 1 << new_log_align;
8816 /* If the group won't fit in the same INT16 as the previous,
8817 we need to add padding to keep the group together. Rather
8818 than simply leaving the insn filling to the assembler, we
8819 can make use of the knowledge of what sorts of instructions
8820 were issued in the previous group to make sure that all of
8821 the added nops are really free. */
8822 else if (ofs + len > (int) align)
8824 int nop_count = (align - ofs) / 4;
8827 /* Insert nops before labels, branches, and calls to truely merge
8828 the execution of the nops with the previous instruction group. */
8829 where = prev_nonnote_insn (i);
8832 if (GET_CODE (where) == CODE_LABEL)
8834 rtx where2 = prev_nonnote_insn (where);
8835 if (where2 && GET_CODE (where2) == JUMP_INSN)
8838 else if (GET_CODE (where) == INSN)
8845 emit_insn_before ((*next_nop)(&prev_in_use), where);
8846 while (--nop_count);
8850 ofs = (ofs + len) & (align - 1);
8851 prev_in_use = in_use;
8856 /* Machine dependent reorg pass. */
8862 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions)
8863 alpha_handle_trap_shadows (insns);
8865 /* Due to the number of extra trapb insns, don't bother fixing up
8866 alignment when trap precision is instruction. Moreover, we can
8867 only do our job when sched2 is run. */
8868 if (optimize && !optimize_size
8869 && alpha_tp != ALPHA_TP_INSN
8870 && flag_schedule_insns_after_reload)
8872 if (alpha_cpu == PROCESSOR_EV4)
8873 alpha_align_insns (insns, 8, alphaev4_next_group, alphaev4_next_nop);
8874 else if (alpha_cpu == PROCESSOR_EV5)
8875 alpha_align_insns (insns, 16, alphaev5_next_group, alphaev5_next_nop);
8879 /* Check a floating-point value for validity for a particular machine mode. */
8881 static const char * const float_strings[] =
8883 /* These are for FLOAT_VAX. */
8884 "1.70141173319264430e+38", /* 2^127 (2^24 - 1) / 2^24 */
8885 "-1.70141173319264430e+38",
8886 "2.93873587705571877e-39", /* 2^-128 */
8887 "-2.93873587705571877e-39",
8888 /* These are for the default broken IEEE mode, which traps
8889 on infinity or denormal numbers. */
8890 "3.402823466385288598117e+38", /* 2^128 (1 - 2^-24) */
8891 "-3.402823466385288598117e+38",
8892 "1.1754943508222875079687e-38", /* 2^-126 */
8893 "-1.1754943508222875079687e-38",
8896 static REAL_VALUE_TYPE float_values[8];
8897 static int inited_float_values = 0;
8900 check_float_value (mode, d, overflow)
8901 enum machine_mode mode;
8903 int overflow ATTRIBUTE_UNUSED;
8906 if (TARGET_IEEE || TARGET_IEEE_CONFORMANT || TARGET_IEEE_WITH_INEXACT)
8909 if (inited_float_values == 0)
8912 for (i = 0; i < 8; i++)
8913 float_values[i] = REAL_VALUE_ATOF (float_strings[i], DFmode);
8915 inited_float_values = 1;
8921 REAL_VALUE_TYPE *fvptr;
8923 if (TARGET_FLOAT_VAX)
8924 fvptr = &float_values[0];
8926 fvptr = &float_values[4];
8928 memcpy (&r, d, sizeof (REAL_VALUE_TYPE));
8929 if (REAL_VALUES_LESS (fvptr[0], r))
8931 memcpy (d, &fvptr[0], sizeof (REAL_VALUE_TYPE));
8934 else if (REAL_VALUES_LESS (r, fvptr[1]))
8936 memcpy (d, &fvptr[1], sizeof (REAL_VALUE_TYPE));
8939 else if (REAL_VALUES_LESS (dconst0, r)
8940 && REAL_VALUES_LESS (r, fvptr[2]))
8942 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
8945 else if (REAL_VALUES_LESS (r, dconst0)
8946 && REAL_VALUES_LESS (fvptr[3], r))
8948 memcpy (d, &dconst0, sizeof (REAL_VALUE_TYPE));
8956 #ifdef OBJECT_FORMAT_ELF
8958 /* Switch to the section to which we should output X. The only thing
8959 special we do here is to honor small data. */
8962 alpha_elf_select_rtx_section (mode, x, align)
8963 enum machine_mode mode;
8965 unsigned HOST_WIDE_INT align;
8967 if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
8968 /* ??? Consider using mergable sdata sections. */
8971 default_elf_select_rtx_section (mode, x, align);
8974 #endif /* OBJECT_FORMAT_ELF */
8976 #if TARGET_ABI_OPEN_VMS
8978 /* Return the VMS argument type corresponding to MODE. */
8981 alpha_arg_type (mode)
8982 enum machine_mode mode;
8987 return TARGET_FLOAT_VAX ? FF : FS;
8989 return TARGET_FLOAT_VAX ? FD : FT;
8995 /* Return an rtx for an integer representing the VMS Argument Information
8999 alpha_arg_info_reg_val (cum)
9000 CUMULATIVE_ARGS cum;
9002 unsigned HOST_WIDE_INT regval = cum.num_args;
9005 for (i = 0; i < 6; i++)
9006 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
9008 return GEN_INT (regval);
9011 /* Protect alpha_links from garbage collection. */
9014 mark_alpha_links_node (node, data)
9015 splay_tree_node node;
9016 void *data ATTRIBUTE_UNUSED;
9018 struct alpha_links *links = (struct alpha_links *) node->value;
9019 ggc_mark_rtx (links->linkage);
9024 mark_alpha_links (ptr)
9027 splay_tree tree = *(splay_tree *) ptr;
9028 splay_tree_foreach (tree, mark_alpha_links_node, NULL);
9031 /* Make (or fake) .linkage entry for function call.
9033 IS_LOCAL is 0 if name is used in call, 1 if name is used in definition.
9035 Return an SYMBOL_REF rtx for the linkage. */
9038 alpha_need_linkage (name, is_local)
9042 splay_tree_node node;
9043 struct alpha_links *al;
9044 struct alpha_funcs *cfaf;
9051 alpha_funcs_tree = splay_tree_new
9052 ((splay_tree_compare_fn) splay_tree_compare_pointers,
9053 (splay_tree_delete_key_fn) free,
9054 (splay_tree_delete_key_fn) free);
9056 cfaf = (struct alpha_funcs *) xmalloc (sizeof (struct alpha_funcs));
9059 cfaf->num = ++alpha_funcs_num;
9061 splay_tree_insert (alpha_funcs_tree,
9062 (splay_tree_key) current_function_decl,
9063 (splay_tree_value) cfaf);
9067 if (alpha_links_tree)
9069 /* Is this name already defined? */
9071 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9074 al = (struct alpha_links *) node->value;
9077 /* Defined here but external assumed. */
9078 if (al->lkind == KIND_EXTERN)
9079 al->lkind = KIND_LOCAL;
9083 /* Used here but unused assumed. */
9084 if (al->lkind == KIND_UNUSED)
9085 al->lkind = KIND_LOCAL;
9092 alpha_links_tree = splay_tree_new
9093 ((splay_tree_compare_fn) strcmp,
9094 (splay_tree_delete_key_fn) free,
9095 (splay_tree_delete_key_fn) free);
9097 ggc_add_root (&alpha_links_tree, 1, 1, mark_alpha_links);
9100 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9101 name = xstrdup (name);
9103 /* Assume external if no definition. */
9104 al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
9106 /* Ensure we have an IDENTIFIER so assemble_name can mark it used. */
9107 get_identifier (name);
9109 /* Construct a SYMBOL_REF for us to call. */
9111 size_t name_len = strlen (name);
9112 char *linksym = alloca (name_len + 6);
9114 memcpy (linksym + 1, name, name_len);
9115 memcpy (linksym + 1 + name_len, "..lk", 5);
9116 al->linkage = gen_rtx_SYMBOL_REF (Pmode,
9117 ggc_alloc_string (linksym, name_len + 5));
9120 splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
9121 (splay_tree_value) al);
9127 alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9133 splay_tree_node cfunnode;
9134 struct alpha_funcs *cfaf;
9135 struct alpha_links *al;
9136 const char *name = XSTR (linkage, 0);
9138 cfaf = (struct alpha_funcs *) 0;
9139 al = (struct alpha_links *) 0;
9141 cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
9142 cfaf = (struct alpha_funcs *) cfunnode->value;
9146 splay_tree_node lnode;
9148 /* Is this name already defined? */
9150 lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name);
9152 al = (struct alpha_links *) lnode->value;
9156 cfaf->links = splay_tree_new
9157 ((splay_tree_compare_fn) strcmp,
9158 (splay_tree_delete_key_fn) free,
9159 (splay_tree_delete_key_fn) free);
9160 ggc_add_root (&cfaf->links, 1, 1, mark_alpha_links);
9169 splay_tree_node node = 0;
9170 struct alpha_links *anl;
9175 name_len = strlen (name);
9177 al = (struct alpha_links *) xmalloc (sizeof (struct alpha_links));
9178 al->num = cfaf->num;
9180 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9183 anl = (struct alpha_links *) node->value;
9184 al->lkind = anl->lkind;
9187 sprintf (buf, "$%d..%s..lk", cfaf->num, name);
9188 buflen = strlen (buf);
9189 linksym = alloca (buflen + 1);
9190 memcpy (linksym, buf, buflen + 1);
9192 al->linkage = gen_rtx_SYMBOL_REF
9193 (Pmode, ggc_alloc_string (linksym, buflen + 1));
9195 splay_tree_insert (cfaf->links, (splay_tree_key) name,
9196 (splay_tree_value) al);
9200 al->rkind = KIND_CODEADDR;
9202 al->rkind = KIND_LINKAGE;
9205 return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8));
9211 alpha_write_one_linkage (node, data)
9212 splay_tree_node node;
9215 const char *const name = (const char *) node->key;
9216 struct alpha_links *link = (struct alpha_links *) node->value;
9217 FILE *stream = (FILE *) data;
9219 fprintf (stream, "$%d..%s..lk:\n", link->num, name);
9220 if (link->rkind == KIND_CODEADDR)
9222 if (link->lkind == KIND_LOCAL)
9224 /* Local and used */
9225 fprintf (stream, "\t.quad %s..en\n", name);
9229 /* External and used, request code address. */
9230 fprintf (stream, "\t.code_address %s\n", name);
9235 if (link->lkind == KIND_LOCAL)
9237 /* Local and used, build linkage pair. */
9238 fprintf (stream, "\t.quad %s..en\n", name);
9239 fprintf (stream, "\t.quad %s\n", name);
9243 /* External and used, request linkage pair. */
9244 fprintf (stream, "\t.linkage %s\n", name);
9252 alpha_write_linkage (stream, funname, fundecl)
9254 const char *funname;
9257 splay_tree_node node;
9258 struct alpha_funcs *func;
9261 fprintf (stream, "\t.align 3\n");
9262 node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl);
9263 func = (struct alpha_funcs *) node->value;
9265 fputs ("\t.name ", stream);
9266 assemble_name (stream, funname);
9267 fputs ("..na\n", stream);
9268 ASM_OUTPUT_LABEL (stream, funname);
9269 fprintf (stream, "\t.pdesc ");
9270 assemble_name (stream, funname);
9271 fprintf (stream, "..en,%s\n",
9272 alpha_procedure_type == PT_STACK ? "stack"
9273 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
9277 splay_tree_foreach (func->links, alpha_write_one_linkage, stream);
9278 /* splay_tree_delete (func->links); */
9282 /* Given a decl, a section name, and whether the decl initializer
9283 has relocs, choose attributes for the section. */
9285 #define SECTION_VMS_OVERLAY SECTION_FORGET
9286 #define SECTION_VMS_GLOBAL SECTION_MACH_DEP
9287 #define SECTION_VMS_INITIALIZE (SECTION_VMS_GLOBAL << 1)
9290 vms_section_type_flags (decl, name, reloc)
9295 unsigned int flags = default_section_type_flags (decl, name, reloc);
9297 if (decl && DECL_ATTRIBUTES (decl)
9298 && lookup_attribute ("overlaid", DECL_ATTRIBUTES (decl)))
9299 flags |= SECTION_VMS_OVERLAY;
9300 if (decl && DECL_ATTRIBUTES (decl)
9301 && lookup_attribute ("global", DECL_ATTRIBUTES (decl)))
9302 flags |= SECTION_VMS_GLOBAL;
9303 if (decl && DECL_ATTRIBUTES (decl)
9304 && lookup_attribute ("initialize", DECL_ATTRIBUTES (decl)))
9305 flags |= SECTION_VMS_INITIALIZE;
9310 /* Switch to an arbitrary section NAME with attributes as specified
9311 by FLAGS. ALIGN specifies any known alignment requirements for
9312 the section; 0 if the default should be used. */
9315 vms_asm_named_section (name, flags)
9319 fputc ('\n', asm_out_file);
9320 fprintf (asm_out_file, ".section\t%s", name);
9322 if (flags & SECTION_VMS_OVERLAY)
9323 fprintf (asm_out_file, ",OVR");
9324 if (flags & SECTION_VMS_GLOBAL)
9325 fprintf (asm_out_file, ",GBL");
9326 if (flags & SECTION_VMS_INITIALIZE)
9327 fprintf (asm_out_file, ",NOMOD");
9328 if (flags & SECTION_DEBUG)
9329 fprintf (asm_out_file, ",NOWRT");
9331 fputc ('\n', asm_out_file);
9334 /* Record an element in the table of global constructors. SYMBOL is
9335 a SYMBOL_REF of the function to be called; PRIORITY is a number
9336 between 0 and MAX_INIT_PRIORITY.
9338 Differs from default_ctors_section_asm_out_constructor in that the
9339 width of the .ctors entry is always 64 bits, rather than the 32 bits
9340 used by a normal pointer. */
9343 vms_asm_out_constructor (symbol, priority)
9345 int priority ATTRIBUTE_UNUSED;
9348 assemble_align (BITS_PER_WORD);
9349 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
9353 vms_asm_out_destructor (symbol, priority)
9355 int priority ATTRIBUTE_UNUSED;
9358 assemble_align (BITS_PER_WORD);
9359 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
9364 alpha_need_linkage (name, is_local)
9365 const char *name ATTRIBUTE_UNUSED;
9366 int is_local ATTRIBUTE_UNUSED;
9372 alpha_use_linkage (linkage, cfundecl, lflag, rflag)
9373 rtx linkage ATTRIBUTE_UNUSED;
9374 tree cfundecl ATTRIBUTE_UNUSED;
9375 int lflag ATTRIBUTE_UNUSED;
9376 int rflag ATTRIBUTE_UNUSED;
9381 #endif /* TARGET_ABI_OPEN_VMS */
9383 #if TARGET_ABI_UNICOSMK
9385 static void unicosmk_output_module_name PARAMS ((FILE *));
9386 static void unicosmk_output_default_externs PARAMS ((FILE *));
9387 static void unicosmk_output_dex PARAMS ((FILE *));
9388 static void unicosmk_output_externs PARAMS ((FILE *));
9389 static void unicosmk_output_addr_vec PARAMS ((FILE *, rtx));
9390 static const char *unicosmk_ssib_name PARAMS ((void));
9391 static int unicosmk_special_name PARAMS ((const char *));
9393 /* Define the offset between two registers, one to be eliminated, and the
9394 other its replacement, at the start of a routine. */
9397 unicosmk_initial_elimination_offset (from, to)
9403 fixed_size = alpha_sa_size();
9404 if (fixed_size != 0)
9407 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9409 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
9411 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9412 return (ALPHA_ROUND (current_function_outgoing_args_size)
9413 + ALPHA_ROUND (get_frame_size()));
9414 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
9415 return (ALPHA_ROUND (fixed_size)
9416 + ALPHA_ROUND (get_frame_size()
9417 + current_function_outgoing_args_size));
9422 /* Output the module name for .ident and .end directives. We have to strip
9423 directories and add make sure that the module name starts with a letter
9427 unicosmk_output_module_name (file)
9432 /* Strip directories. */
9434 name = strrchr (main_input_filename, '/');
9438 name = main_input_filename;
9440 /* CAM only accepts module names that start with a letter or '$'. We
9441 prefix the module name with a '$' if necessary. */
9443 if (!ISALPHA (*name))
9445 output_clean_symbol_name (file, name);
9448 /* Output text that to appear at the beginning of an assembler file. */
9451 unicosmk_asm_file_start (file)
9456 fputs ("\t.ident\t", file);
9457 unicosmk_output_module_name (file);
9458 fputs ("\n\n", file);
9460 /* The Unicos/Mk assembler uses different register names. Instead of trying
9461 to support them, we simply use micro definitions. */
9463 /* CAM has different register names: rN for the integer register N and fN
9464 for the floating-point register N. Instead of trying to use these in
9465 alpha.md, we define the symbols $N and $fN to refer to the appropriate
9468 for (i = 0; i < 32; ++i)
9469 fprintf (file, "$%d <- r%d\n", i, i);
9471 for (i = 0; i < 32; ++i)
9472 fprintf (file, "$f%d <- f%d\n", i, i);
9476 /* The .align directive fill unused space with zeroes which does not work
9477 in code sections. We define the macro 'gcc@code@align' which uses nops
9478 instead. Note that it assumes that code sections always have the
9479 biggest possible alignment since . refers to the current offset from
9480 the beginning of the section. */
9482 fputs ("\t.macro gcc@code@align n\n", file);
9483 fputs ("gcc@n@bytes = 1 << n\n", file);
9484 fputs ("gcc@here = . % gcc@n@bytes\n", file);
9485 fputs ("\t.if ne, gcc@here, 0\n", file);
9486 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", file);
9487 fputs ("\tbis r31,r31,r31\n", file);
9488 fputs ("\t.endr\n", file);
9489 fputs ("\t.endif\n", file);
9490 fputs ("\t.endm gcc@code@align\n\n", file);
9492 /* Output extern declarations which should always be visible. */
9493 unicosmk_output_default_externs (file);
9495 /* Open a dummy section. We always need to be inside a section for the
9496 section-switching code to work correctly.
9497 ??? This should be a module id or something like that. I still have to
9498 figure out what the rules for those are. */
9499 fputs ("\n\t.psect\t$SG00000,data\n", file);
9502 /* Output text to appear at the end of an assembler file. This includes all
9503 pending extern declarations and DEX expressions. */
9506 unicosmk_asm_file_end (file)
9509 fputs ("\t.endp\n\n", file);
9511 /* Output all pending externs. */
9513 unicosmk_output_externs (file);
9515 /* Output dex definitions used for functions whose names conflict with
9518 unicosmk_output_dex (file);
9520 fputs ("\t.end\t", file);
9521 unicosmk_output_module_name (file);
9525 /* Output the definition of a common variable. */
9528 unicosmk_output_common (file, name, size, align)
9535 printf ("T3E__: common %s\n", name);
9538 fputs("\t.endp\n\n\t.psect ", file);
9539 assemble_name(file, name);
9540 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
9541 fprintf(file, "\t.byte\t0:%d\n", size);
9543 /* Mark the symbol as defined in this module. */
9544 name_tree = get_identifier (name);
9545 TREE_ASM_WRITTEN (name_tree) = 1;
9548 #define SECTION_PUBLIC SECTION_MACH_DEP
9549 #define SECTION_MAIN (SECTION_PUBLIC << 1)
9550 static int current_section_align;
9553 unicosmk_section_type_flags (decl, name, reloc)
9556 int reloc ATTRIBUTE_UNUSED;
9558 unsigned int flags = default_section_type_flags (decl, name, reloc);
9563 if (TREE_CODE (decl) == FUNCTION_DECL)
9565 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
9566 if (align_functions_log > current_section_align)
9567 current_section_align = align_functions_log;
9569 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
9570 flags |= SECTION_MAIN;
9573 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
9575 if (TREE_PUBLIC (decl))
9576 flags |= SECTION_PUBLIC;
9581 /* Generate a section name for decl and associate it with the
9585 unicosmk_unique_section (decl, reloc)
9587 int reloc ATTRIBUTE_UNUSED;
9595 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
9596 name = alpha_strip_name_encoding (name);
9597 len = strlen (name);
9599 if (TREE_CODE (decl) == FUNCTION_DECL)
9603 /* It is essential that we prefix the section name here because
9604 otherwise the section names generated for constructors and
9605 destructors confuse collect2. */
9607 string = alloca (len + 6);
9608 sprintf (string, "code@%s", name);
9609 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9611 else if (TREE_PUBLIC (decl))
9612 DECL_SECTION_NAME (decl) = build_string (len, name);
9617 string = alloca (len + 6);
9618 sprintf (string, "data@%s", name);
9619 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
9623 /* Switch to an arbitrary section NAME with attributes as specified
9624 by FLAGS. ALIGN specifies any known alignment requirements for
9625 the section; 0 if the default should be used. */
9628 unicosmk_asm_named_section (name, flags)
9634 /* Close the previous section. */
9636 fputs ("\t.endp\n\n", asm_out_file);
9638 /* Find out what kind of section we are opening. */
9640 if (flags & SECTION_MAIN)
9641 fputs ("\t.start\tmain\n", asm_out_file);
9643 if (flags & SECTION_CODE)
9645 else if (flags & SECTION_PUBLIC)
9650 if (current_section_align != 0)
9651 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
9652 current_section_align, kind);
9654 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
9658 unicosmk_insert_attributes (decl, attr_ptr)
9660 tree *attr_ptr ATTRIBUTE_UNUSED;
9663 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
9664 unicosmk_unique_section (decl, 0);
9667 /* Output an alignment directive. We have to use the macro 'gcc@code@align'
9668 in code sections because .align fill unused space with zeroes. */
9671 unicosmk_output_align (file, align)
9675 if (inside_function)
9676 fprintf (file, "\tgcc@code@align\t%d\n", align);
9678 fprintf (file, "\t.align\t%d\n", align);
9681 /* Add a case vector to the current function's list of deferred case
9682 vectors. Case vectors have to be put into a separate section because CAM
9683 does not allow data definitions in code sections. */
9686 unicosmk_defer_case_vector (lab, vec)
9690 struct machine_function *machine = cfun->machine;
9692 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
9693 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
9694 machine->addr_list);
9697 /* Output a case vector. */
9700 unicosmk_output_addr_vec (file, vec)
9704 rtx lab = XEXP (vec, 0);
9705 rtx body = XEXP (vec, 1);
9706 int vlen = XVECLEN (body, 0);
9709 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (lab));
9711 for (idx = 0; idx < vlen; idx++)
9713 ASM_OUTPUT_ADDR_VEC_ELT
9714 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
9718 /* Output current function's deferred case vectors. */
9721 unicosmk_output_deferred_case_vectors (file)
9724 struct machine_function *machine = cfun->machine;
9727 if (machine->addr_list == NULL_RTX)
9731 for (t = machine->addr_list; t; t = XEXP (t, 1))
9732 unicosmk_output_addr_vec (file, XEXP (t, 0));
9735 /* Set up the dynamic subprogram information block (DSIB) and update the
9736 frame pointer register ($15) for subroutines which have a frame. If the
9737 subroutine doesn't have a frame, simply increment $15. */
9740 unicosmk_gen_dsib (imaskP)
9741 unsigned long * imaskP;
9743 if (alpha_procedure_type == PT_STACK)
9745 const char *ssib_name;
9748 /* Allocate 64 bytes for the DSIB. */
9750 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
9752 emit_insn (gen_blockage ());
9754 /* Save the return address. */
9756 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
9757 set_mem_alias_set (mem, alpha_sr_alias_set);
9758 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
9759 (*imaskP) &= ~(1L << REG_RA);
9761 /* Save the old frame pointer. */
9763 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
9764 set_mem_alias_set (mem, alpha_sr_alias_set);
9765 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
9766 (*imaskP) &= ~(1L << HARD_FRAME_POINTER_REGNUM);
9768 emit_insn (gen_blockage ());
9770 /* Store the SSIB pointer. */
9772 ssib_name = ggc_strdup (unicosmk_ssib_name ());
9773 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
9774 set_mem_alias_set (mem, alpha_sr_alias_set);
9776 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
9777 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
9778 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
9780 /* Save the CIW index. */
9782 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
9783 set_mem_alias_set (mem, alpha_sr_alias_set);
9784 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
9786 emit_insn (gen_blockage ());
9788 /* Set the new frame pointer. */
9790 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9791 stack_pointer_rtx, GEN_INT (64))));
9796 /* Increment the frame pointer register to indicate that we do not
9799 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
9800 hard_frame_pointer_rtx, GEN_INT (1))));
9804 #define SSIB_PREFIX "__SSIB_"
9805 #define SSIB_PREFIX_LEN 7
9807 /* Generate the name of the SSIB section for the current function. */
9810 unicosmk_ssib_name ()
9812 /* This is ok since CAM won't be able to deal with names longer than that
9815 static char name[256];
9821 x = DECL_RTL (cfun->decl);
9822 if (GET_CODE (x) != MEM)
9825 if (GET_CODE (x) != SYMBOL_REF)
9827 fnname = alpha_strip_name_encoding (XSTR (x, 0));
9829 len = strlen (fnname);
9830 if (len + SSIB_PREFIX_LEN > 255)
9831 len = 255 - SSIB_PREFIX_LEN;
9833 strcpy (name, SSIB_PREFIX);
9834 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
9835 name[len + SSIB_PREFIX_LEN] = 0;
9840 /* Output the static subroutine information block for the current
9844 unicosmk_output_ssib (file, fnname)
9852 struct machine_function *machine = cfun->machine;
9855 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
9856 unicosmk_ssib_name ());
9858 /* Some required stuff and the function name length. */
9860 len = strlen (fnname);
9861 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
9864 ??? We don't do that yet. */
9866 fputs ("\t.quad\t0\n", file);
9868 /* Function address. */
9870 fputs ("\t.quad\t", file);
9871 assemble_name (file, fnname);
9874 fputs ("\t.quad\t0\n", file);
9875 fputs ("\t.quad\t0\n", file);
9878 ??? We do it the same way Cray CC does it but this could be
9881 for( i = 0; i < len; i++ )
9882 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
9883 if( (len % 8) == 0 )
9884 fputs ("\t.quad\t0\n", file);
9886 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
9888 /* All call information words used in the function. */
9890 for (x = machine->first_ciw; x; x = XEXP (x, 1))
9893 fprintf (file, "\t.quad\t");
9894 #if HOST_BITS_PER_WIDE_INT == 32
9895 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
9896 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
9898 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (ciw));
9900 fprintf (file, "\n");
9904 /* Add a call information word (CIW) to the list of the current function's
9905 CIWs and return its index.
9907 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
9910 unicosmk_add_call_info_word (x)
9914 struct machine_function *machine = cfun->machine;
9916 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
9917 if (machine->first_ciw == NULL_RTX)
9918 machine->first_ciw = node;
9920 XEXP (machine->last_ciw, 1) = node;
9922 machine->last_ciw = node;
9923 ++machine->ciw_count;
9925 return GEN_INT (machine->ciw_count
9926 + strlen (current_function_name)/8 + 5);
9929 static char unicosmk_section_buf[100];
9932 unicosmk_text_section ()
9934 static int count = 0;
9935 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@text___%d,code",
9937 return unicosmk_section_buf;
9941 unicosmk_data_section ()
9943 static int count = 1;
9944 sprintf (unicosmk_section_buf, "\t.endp\n\n\t.psect\tgcc@data___%d,data",
9946 return unicosmk_section_buf;
9949 /* The Cray assembler doesn't accept extern declarations for symbols which
9950 are defined in the same file. We have to keep track of all global
9951 symbols which are referenced and/or defined in a source file and output
9952 extern declarations for those which are referenced but not defined at
9955 /* List of identifiers for which an extern declaration might have to be
9958 struct unicosmk_extern_list
9960 struct unicosmk_extern_list *next;
9964 static struct unicosmk_extern_list *unicosmk_extern_head = 0;
9966 /* Output extern declarations which are required for every asm file. */
9969 unicosmk_output_default_externs (file)
9972 static const char *const externs[] =
9973 { "__T3E_MISMATCH" };
9978 n = ARRAY_SIZE (externs);
9980 for (i = 0; i < n; i++)
9981 fprintf (file, "\t.extern\t%s\n", externs[i]);
9984 /* Output extern declarations for global symbols which are have been
9985 referenced but not defined. */
9988 unicosmk_output_externs (file)
9991 struct unicosmk_extern_list *p;
9992 const char *real_name;
9996 len = strlen (user_label_prefix);
9997 for (p = unicosmk_extern_head; p != 0; p = p->next)
9999 /* We have to strip the encoding and possibly remove user_label_prefix
10000 from the identifier in order to handle -fleading-underscore and
10001 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
10002 real_name = alpha_strip_name_encoding (p->name);
10003 if (len && p->name[0] == '*'
10004 && !memcmp (real_name, user_label_prefix, len))
10007 name_tree = get_identifier (real_name);
10008 if (! TREE_ASM_WRITTEN (name_tree))
10010 TREE_ASM_WRITTEN (name_tree) = 1;
10011 fputs ("\t.extern\t", file);
10012 assemble_name (file, p->name);
10018 /* Record an extern. */
10021 unicosmk_add_extern (name)
10024 struct unicosmk_extern_list *p;
10026 p = (struct unicosmk_extern_list *)
10027 xmalloc (sizeof (struct unicosmk_extern_list));
10028 p->next = unicosmk_extern_head;
10030 unicosmk_extern_head = p;
10033 /* The Cray assembler generates incorrect code if identifiers which
10034 conflict with register names are used as instruction operands. We have
10035 to replace such identifiers with DEX expressions. */
10037 /* Structure to collect identifiers which have been replaced by DEX
10040 struct unicosmk_dex {
10041 struct unicosmk_dex *next;
10045 /* List of identifiers which have been replaced by DEX expressions. The DEX
10046 number is determined by the position in the list. */
10048 static struct unicosmk_dex *unicosmk_dex_list = NULL;
10050 /* The number of elements in the DEX list. */
10052 static int unicosmk_dex_count = 0;
10054 /* Check if NAME must be replaced by a DEX expression. */
10057 unicosmk_special_name (name)
10060 if (name[0] == '*')
10063 if (name[0] == '$')
10066 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
10071 case '1': case '2':
10072 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
10075 return (name[2] == '\0'
10076 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
10079 return (ISDIGIT (name[1]) && name[2] == '\0');
10083 /* Return the DEX number if X must be replaced by a DEX expression and 0
10087 unicosmk_need_dex (x)
10090 struct unicosmk_dex *dex;
10094 if (GET_CODE (x) != SYMBOL_REF)
10098 if (! unicosmk_special_name (name))
10101 i = unicosmk_dex_count;
10102 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10104 if (! strcmp (name, dex->name))
10109 dex = (struct unicosmk_dex *) xmalloc (sizeof (struct unicosmk_dex));
10111 dex->next = unicosmk_dex_list;
10112 unicosmk_dex_list = dex;
10114 ++unicosmk_dex_count;
10115 return unicosmk_dex_count;
10118 /* Output the DEX definitions for this file. */
10121 unicosmk_output_dex (file)
10124 struct unicosmk_dex *dex;
10127 if (unicosmk_dex_list == NULL)
10130 fprintf (file, "\t.dexstart\n");
10132 i = unicosmk_dex_count;
10133 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10135 fprintf (file, "\tDEX (%d) = ", i);
10136 assemble_name (file, dex->name);
10141 fprintf (file, "\t.dexend\n");
10147 unicosmk_output_deferred_case_vectors (file)
10148 FILE *file ATTRIBUTE_UNUSED;
10152 unicosmk_gen_dsib (imaskP)
10153 unsigned long * imaskP ATTRIBUTE_UNUSED;
10157 unicosmk_output_ssib (file, fnname)
10158 FILE * file ATTRIBUTE_UNUSED;
10159 const char * fnname ATTRIBUTE_UNUSED;
10163 unicosmk_add_call_info_word (x)
10164 rtx x ATTRIBUTE_UNUSED;
10170 unicosmk_need_dex (x)
10171 rtx x ATTRIBUTE_UNUSED;
10176 #endif /* TARGET_ABI_UNICOSMK */
10178 #include "gt-alpha.h"