1 /* Output routines for GCC for ARM.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
5 and Martin Simmons (@harleqn.co.uk).
6 More major hacks by Richard Earnshaw (rearnsha@arm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published
12 by the Free Software Foundation; either version 2, or (at your
13 option) any later version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
18 License for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to
22 the Free Software Foundation, 59 Temple Place - Suite 330,
23 Boston, MA 02111-1307, USA. */
27 #include "coretypes.h"
33 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "conditions.h"
38 #include "insn-attr.h"
49 #include "integrate.h"
52 #include "target-def.h"
54 /* Forward definitions of types. */
55 typedef struct minipool_node Mnode;
56 typedef struct minipool_fixup Mfix;
58 const struct attribute_spec arm_attribute_table[];
60 /* Forward function declarations. */
61 static void arm_add_gc_roots (void);
62 static int arm_gen_constant (enum rtx_code, enum machine_mode, HOST_WIDE_INT,
64 static unsigned bit_count (unsigned long);
65 static int arm_address_register_rtx_p (rtx, int);
66 static int arm_legitimate_index_p (enum machine_mode, rtx, int);
67 static int thumb_base_register_rtx_p (rtx, enum machine_mode, int);
68 inline static int thumb_index_register_rtx_p (rtx, int);
69 static int const_ok_for_op (HOST_WIDE_INT, enum rtx_code);
70 static rtx emit_multi_reg_push (int);
71 static rtx emit_sfm (int, int);
73 static bool arm_assemble_integer (rtx, unsigned int, int);
75 static const char *fp_const_from_val (REAL_VALUE_TYPE *);
76 static arm_cc get_arm_condition_code (rtx);
77 static void init_fpa_table (void);
78 static HOST_WIDE_INT int_log2 (HOST_WIDE_INT);
79 static rtx is_jump_table (rtx);
80 static const char *output_multi_immediate (rtx *, const char *, const char *,
82 static void print_multi_reg (FILE *, const char *, int, int);
83 static const char *shift_op (rtx, HOST_WIDE_INT *);
84 static struct machine_function *arm_init_machine_status (void);
85 static int number_of_first_bit_set (int);
86 static void replace_symbols_in_block (tree, rtx, rtx);
87 static void thumb_exit (FILE *, int, rtx);
88 static void thumb_pushpop (FILE *, int, int);
89 static rtx is_jump_table (rtx);
90 static HOST_WIDE_INT get_jump_table_size (rtx);
91 static Mnode *move_minipool_fix_forward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
92 static Mnode *add_minipool_forward_ref (Mfix *);
93 static Mnode *move_minipool_fix_backward_ref (Mnode *, Mnode *, HOST_WIDE_INT);
94 static Mnode *add_minipool_backward_ref (Mfix *);
95 static void assign_minipool_offsets (Mfix *);
96 static void arm_print_value (FILE *, rtx);
97 static void dump_minipool (rtx);
98 static int arm_barrier_cost (rtx);
99 static Mfix *create_fix_barrier (Mfix *, HOST_WIDE_INT);
100 static void push_minipool_barrier (rtx, HOST_WIDE_INT);
101 static void push_minipool_fix (rtx, HOST_WIDE_INT, rtx *, enum machine_mode,
103 static void arm_reorg (void);
104 static bool note_invalid_constants (rtx, HOST_WIDE_INT, int);
105 static int current_file_function_operand (rtx);
106 static unsigned long arm_compute_save_reg0_reg12_mask (void);
107 static unsigned long arm_compute_save_reg_mask (void);
108 static unsigned long arm_isr_value (tree);
109 static unsigned long arm_compute_func_type (void);
110 static tree arm_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
111 static tree arm_handle_isr_attribute (tree *, tree, tree, int, bool *);
112 static void arm_output_function_epilogue (FILE *, HOST_WIDE_INT);
113 static void arm_output_function_prologue (FILE *, HOST_WIDE_INT);
114 static void thumb_output_function_prologue (FILE *, HOST_WIDE_INT);
115 static int arm_comp_type_attributes (tree, tree);
116 static void arm_set_default_type_attributes (tree);
117 static int arm_adjust_cost (rtx, rtx, rtx, int);
118 static int arm_use_dfa_pipeline_interface (void);
119 static int count_insns_for_constant (HOST_WIDE_INT, int);
120 static int arm_get_strip_length (int);
121 static bool arm_function_ok_for_sibcall (tree, tree);
122 static void arm_internal_label (FILE *, const char *, unsigned long);
123 static void arm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 static int arm_rtx_costs_1 (rtx, enum rtx_code, enum rtx_code);
126 static bool arm_rtx_costs (rtx, int, int, int *);
127 static int arm_address_cost (rtx);
128 static bool arm_memory_load_p (rtx);
129 static bool arm_cirrus_insn_p (rtx);
130 static void cirrus_reorg (rtx);
131 static void arm_init_builtins (void);
132 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
133 static void arm_init_iwmmxt_builtins (void);
134 static rtx safe_vector_operand (rtx, enum machine_mode);
135 static rtx arm_expand_binop_builtin (enum insn_code, tree, rtx);
136 static rtx arm_expand_unop_builtin (enum insn_code, tree, rtx, int);
137 static rtx arm_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
139 #ifdef OBJECT_FORMAT_ELF
140 static void arm_elf_asm_named_section (const char *, unsigned int);
143 static void arm_encode_section_info (tree, rtx, int);
146 static void aof_globalize_label (FILE *, const char *);
147 static void aof_dump_imports (FILE *);
148 static void aof_dump_pic_table (FILE *);
149 static void aof_file_start (void);
150 static void aof_file_end (void);
154 /* Initialize the GCC target structure. */
155 #ifdef TARGET_DLLIMPORT_DECL_ATTRIBUTES
156 #undef TARGET_MERGE_DECL_ATTRIBUTES
157 #define TARGET_MERGE_DECL_ATTRIBUTES merge_dllimport_decl_attributes
160 #undef TARGET_ATTRIBUTE_TABLE
161 #define TARGET_ATTRIBUTE_TABLE arm_attribute_table
164 #undef TARGET_ASM_BYTE_OP
165 #define TARGET_ASM_BYTE_OP "\tDCB\t"
166 #undef TARGET_ASM_ALIGNED_HI_OP
167 #define TARGET_ASM_ALIGNED_HI_OP "\tDCW\t"
168 #undef TARGET_ASM_ALIGNED_SI_OP
169 #define TARGET_ASM_ALIGNED_SI_OP "\tDCD\t"
170 #undef TARGET_ASM_GLOBALIZE_LABEL
171 #define TARGET_ASM_GLOBALIZE_LABEL aof_globalize_label
172 #undef TARGET_ASM_FILE_START
173 #define TARGET_ASM_FILE_START aof_file_start
174 #undef TARGET_ASM_FILE_END
175 #define TARGET_ASM_FILE_END aof_file_end
177 #undef TARGET_ASM_ALIGNED_SI_OP
178 #define TARGET_ASM_ALIGNED_SI_OP NULL
179 #undef TARGET_ASM_INTEGER
180 #define TARGET_ASM_INTEGER arm_assemble_integer
183 #undef TARGET_ASM_FUNCTION_PROLOGUE
184 #define TARGET_ASM_FUNCTION_PROLOGUE arm_output_function_prologue
186 #undef TARGET_ASM_FUNCTION_EPILOGUE
187 #define TARGET_ASM_FUNCTION_EPILOGUE arm_output_function_epilogue
189 #undef TARGET_COMP_TYPE_ATTRIBUTES
190 #define TARGET_COMP_TYPE_ATTRIBUTES arm_comp_type_attributes
192 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
193 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES arm_set_default_type_attributes
195 #undef TARGET_SCHED_ADJUST_COST
196 #define TARGET_SCHED_ADJUST_COST arm_adjust_cost
198 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
199 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE arm_use_dfa_pipeline_interface
201 #undef TARGET_ENCODE_SECTION_INFO
203 #define TARGET_ENCODE_SECTION_INFO arm_pe_encode_section_info
205 #define TARGET_ENCODE_SECTION_INFO arm_encode_section_info
208 #undef TARGET_STRIP_NAME_ENCODING
209 #define TARGET_STRIP_NAME_ENCODING arm_strip_name_encoding
211 #undef TARGET_ASM_INTERNAL_LABEL
212 #define TARGET_ASM_INTERNAL_LABEL arm_internal_label
214 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
215 #define TARGET_FUNCTION_OK_FOR_SIBCALL arm_function_ok_for_sibcall
217 #undef TARGET_ASM_OUTPUT_MI_THUNK
218 #define TARGET_ASM_OUTPUT_MI_THUNK arm_output_mi_thunk
219 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
220 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
222 #undef TARGET_RTX_COSTS
223 #define TARGET_RTX_COSTS arm_rtx_costs
224 #undef TARGET_ADDRESS_COST
225 #define TARGET_ADDRESS_COST arm_address_cost
227 #undef TARGET_MACHINE_DEPENDENT_REORG
228 #define TARGET_MACHINE_DEPENDENT_REORG arm_reorg
230 #undef TARGET_INIT_BUILTINS
231 #define TARGET_INIT_BUILTINS arm_init_builtins
232 #undef TARGET_EXPAND_BUILTIN
233 #define TARGET_EXPAND_BUILTIN arm_expand_builtin
235 struct gcc_target targetm = TARGET_INITIALIZER;
237 /* Obstack for minipool constant handling. */
238 static struct obstack minipool_obstack;
239 static char * minipool_startobj;
241 /* The maximum number of insns skipped which
242 will be conditionalised if possible. */
243 static int max_insns_skipped = 5;
245 extern FILE * asm_out_file;
247 /* True if we are currently building a constant table. */
248 int making_const_table;
250 /* Define the information needed to generate branch insns. This is
251 stored from the compare operation. */
252 rtx arm_compare_op0, arm_compare_op1;
254 /* What type of floating point are we tuning for? */
255 enum fputype arm_fpu_tune;
257 /* What type of floating point instructions are available? */
258 enum fputype arm_fpu_arch;
260 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode. */
261 enum prog_mode_type arm_prgmode;
263 /* Set by the -mfp=... option. */
264 const char * target_fp_name = NULL;
266 /* Used to parse -mstructure_size_boundary command line option. */
267 const char * structure_size_string = NULL;
268 int arm_structure_size_boundary = DEFAULT_STRUCTURE_SIZE_BOUNDARY;
270 /* Bit values used to identify processor capabilities. */
271 #define FL_CO_PROC (1 << 0) /* Has external co-processor bus */
272 #define FL_FAST_MULT (1 << 1) /* Fast multiply */
273 #define FL_MODE26 (1 << 2) /* 26-bit mode support */
274 #define FL_MODE32 (1 << 3) /* 32-bit mode support */
275 #define FL_ARCH4 (1 << 4) /* Architecture rel 4 */
276 #define FL_ARCH5 (1 << 5) /* Architecture rel 5 */
277 #define FL_THUMB (1 << 6) /* Thumb aware */
278 #define FL_LDSCHED (1 << 7) /* Load scheduling necessary */
279 #define FL_STRONG (1 << 8) /* StrongARM */
280 #define FL_ARCH5E (1 << 9) /* DSP extensions to v5 */
281 #define FL_XSCALE (1 << 10) /* XScale */
282 #define FL_CIRRUS (1 << 11) /* Cirrus/DSP. */
283 #define FL_IWMMXT (1 << 29) /* XScale v2 or "Intel Wireless MMX technology". */
284 #define FL_ARCH6J (1 << 12) /* Architecture rel 6. Adds
285 media instructions. */
286 #define FL_VFPV2 (1 << 13) /* Vector Floating Point V2. */
288 /* The bits in this mask specify which
289 instructions we are allowed to generate. */
290 static unsigned long insn_flags = 0;
292 /* The bits in this mask specify which instruction scheduling options should
293 be used. Note - there is an overlap with the FL_FAST_MULT. For some
294 hardware we want to be able to generate the multiply instructions, but to
295 tune as if they were not present in the architecture. */
296 static unsigned long tune_flags = 0;
298 /* The following are used in the arm.md file as equivalents to bits
299 in the above two flag variables. */
301 /* Nonzero if this is an "M" variant of the processor. */
302 int arm_fast_multiply = 0;
304 /* Nonzero if this chip supports the ARM Architecture 4 extensions. */
307 /* Nonzero if this chip supports the ARM Architecture 5 extensions. */
310 /* Nonzero if this chip supports the ARM Architecture 5E extensions. */
313 /* Nonzero if this chip can benefit from load scheduling. */
314 int arm_ld_sched = 0;
316 /* Nonzero if this chip is a StrongARM. */
317 int arm_is_strong = 0;
319 /* Nonzero if this chip supports Intel Wireless MMX technology. */
320 int arm_arch_iwmmxt = 0;
322 /* Nonzero if this chip is an XScale. */
323 int arm_arch_xscale = 0;
325 /* Nonzero if tuning for XScale */
326 int arm_tune_xscale = 0;
328 /* Nonzero if this chip is an ARM6 or an ARM7. */
329 int arm_is_6_or_7 = 0;
331 /* Nonzero if this chip is a Cirrus/DSP. */
332 int arm_is_cirrus = 0;
334 /* Nonzero if generating Thumb instructions. */
337 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
338 must report the mode of the memory reference from PRINT_OPERAND to
339 PRINT_OPERAND_ADDRESS. */
340 enum machine_mode output_memory_reference_mode;
342 /* The register number to be used for the PIC offset register. */
343 const char * arm_pic_register_string = NULL;
344 int arm_pic_register = INVALID_REGNUM;
346 /* Set to 1 when a return insn is output, this means that the epilogue
348 int return_used_this_function;
350 /* Set to 1 after arm_reorg has started. Reset to start at the start of
351 the next function. */
352 static int after_arm_reorg = 0;
354 /* The maximum number of insns to be used when loading a constant. */
355 static int arm_constant_limit = 3;
357 /* For an explanation of these variables, see final_prescan_insn below. */
359 enum arm_cond_code arm_current_cc;
361 int arm_target_label;
363 /* The condition codes of the ARM, and the inverse function. */
364 static const char * const arm_condition_codes[] =
366 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
367 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
370 #define streq(string1, string2) (strcmp (string1, string2) == 0)
372 /* Initialization code. */
376 const char *const name;
377 const unsigned long flags;
380 /* Not all of these give usefully different compilation alternatives,
381 but there is no simple way of generalizing them. */
382 static const struct processors all_cores[] =
386 {"arm2", FL_CO_PROC | FL_MODE26 },
387 {"arm250", FL_CO_PROC | FL_MODE26 },
388 {"arm3", FL_CO_PROC | FL_MODE26 },
389 {"arm6", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
390 {"arm60", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
391 {"arm600", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
392 {"arm610", FL_MODE26 | FL_MODE32 },
393 {"arm620", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
394 {"arm7", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
395 /* arm7m doesn't exist on its own, but only with D, (and I), but
396 those don't alter the code, so arm7m is sometimes used. */
397 {"arm7m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
398 {"arm7d", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
399 {"arm7dm", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
400 {"arm7di", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
401 {"arm7dmi", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
402 {"arm70", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
403 {"arm700", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
404 {"arm700i", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
405 {"arm710", FL_MODE26 | FL_MODE32 },
406 {"arm720", FL_MODE26 | FL_MODE32 },
407 {"arm710c", FL_MODE26 | FL_MODE32 },
408 {"arm7100", FL_MODE26 | FL_MODE32 },
409 {"arm7500", FL_MODE26 | FL_MODE32 },
410 /* Doesn't have an external co-proc, but does have embedded fpa. */
411 {"arm7500fe", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
412 /* V4 Architecture Processors */
413 {"arm7tdmi", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
414 {"arm710t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
415 {"arm720t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
416 {"arm740t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
417 {"arm8", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
418 {"arm810", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
419 {"arm9", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
420 {"arm920", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
421 {"arm920t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
422 {"arm940t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
423 {"arm9tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED },
424 {"arm9e", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED },
425 {"ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
426 {"strongarm", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
427 {"strongarm110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
428 {"strongarm1100", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
429 {"strongarm1110", FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_STRONG },
430 /* V5 Architecture Processors */
431 {"arm10tdmi", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
432 {"arm1020t", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_ARCH5 },
433 {"arm926ejs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
434 {"arm1026ejs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
435 {"xscale", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE },
436 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
437 /* V6 Architecture Processors */
438 {"arm1136js", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J },
439 {"arm1136jfs", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J | FL_VFPV2 },
443 static const struct processors all_architectures[] =
445 /* ARM Architectures */
447 { "armv2", FL_CO_PROC | FL_MODE26 },
448 { "armv2a", FL_CO_PROC | FL_MODE26 },
449 { "armv3", FL_CO_PROC | FL_MODE26 | FL_MODE32 },
450 { "armv3m", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT },
451 { "armv4", FL_CO_PROC | FL_MODE26 | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 },
452 /* Strictly, FL_MODE26 is a permitted option for v4t, but there are no
453 implementations that support it, so we will leave it out for now. */
454 { "armv4t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB },
455 { "armv5", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
456 { "armv5t", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 },
457 { "armv5te", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E },
458 { "armv6j", FL_CO_PROC | FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_ARCH5 | FL_ARCH5E | FL_ARCH6J },
459 { "ep9312", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_LDSCHED | FL_CIRRUS },
460 {"iwmmxt", FL_MODE32 | FL_FAST_MULT | FL_ARCH4 | FL_THUMB | FL_LDSCHED | FL_STRONG | FL_ARCH5 | FL_ARCH5E | FL_XSCALE | FL_IWMMXT },
464 /* This is a magic structure. The 'string' field is magically filled in
465 with a pointer to the value specified by the user on the command line
466 assuming that the user has specified such a value. */
468 struct arm_cpu_select arm_select[] =
470 /* string name processors */
471 { NULL, "-mcpu=", all_cores },
472 { NULL, "-march=", all_architectures },
473 { NULL, "-mtune=", all_cores }
476 /* Return the number of bits set in VALUE. */
478 bit_count (unsigned long value)
480 unsigned long count = 0;
485 value &= value - 1; /* Clear the least-significant set bit. */
491 /* Fix up any incompatible options that the user has specified.
492 This has now turned into a maze. */
494 arm_override_options (void)
498 /* Set up the flags based on the cpu/architecture selected by the user. */
499 for (i = ARRAY_SIZE (arm_select); i--;)
501 struct arm_cpu_select * ptr = arm_select + i;
503 if (ptr->string != NULL && ptr->string[0] != '\0')
505 const struct processors * sel;
507 for (sel = ptr->processors; sel->name != NULL; sel++)
508 if (streq (ptr->string, sel->name))
511 tune_flags = sel->flags;
514 /* If we have been given an architecture and a processor
515 make sure that they are compatible. We only generate
516 a warning though, and we prefer the CPU over the
518 if (insn_flags != 0 && (insn_flags ^ sel->flags))
519 warning ("switch -mcpu=%s conflicts with -march= switch",
522 insn_flags = sel->flags;
528 if (sel->name == NULL)
529 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
533 /* If the user did not specify a processor, choose one for them. */
536 const struct processors * sel;
538 static const struct cpu_default
541 const char *const name;
545 { TARGET_CPU_arm2, "arm2" },
546 { TARGET_CPU_arm6, "arm6" },
547 { TARGET_CPU_arm610, "arm610" },
548 { TARGET_CPU_arm710, "arm710" },
549 { TARGET_CPU_arm7m, "arm7m" },
550 { TARGET_CPU_arm7500fe, "arm7500fe" },
551 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
552 { TARGET_CPU_arm8, "arm8" },
553 { TARGET_CPU_arm810, "arm810" },
554 { TARGET_CPU_arm9, "arm9" },
555 { TARGET_CPU_strongarm, "strongarm" },
556 { TARGET_CPU_xscale, "xscale" },
557 { TARGET_CPU_ep9312, "ep9312" },
558 { TARGET_CPU_iwmmxt, "iwmmxt" },
559 { TARGET_CPU_arm926ej_s, "arm926ej-s" },
560 { TARGET_CPU_arm1026ej_s, "arm1026ej-s" },
561 { TARGET_CPU_arm1136j_s, "arm1136j_s" },
562 { TARGET_CPU_arm1136jf_s, "arm1136jf_s" },
563 { TARGET_CPU_generic, "arm" },
566 const struct cpu_default * def;
568 /* Find the default. */
569 for (def = cpu_defaults; def->name; def++)
570 if (def->cpu == TARGET_CPU_DEFAULT)
573 /* Make sure we found the default CPU. */
574 if (def->name == NULL)
577 /* Find the default CPU's flags. */
578 for (sel = all_cores; sel->name != NULL; sel++)
579 if (streq (def->name, sel->name))
582 if (sel->name == NULL)
585 insn_flags = sel->flags;
587 /* Now check to see if the user has specified some command line
588 switch that require certain abilities from the cpu. */
591 if (TARGET_INTERWORK || TARGET_THUMB)
593 sought |= (FL_THUMB | FL_MODE32);
595 /* Force apcs-32 to be used for interworking. */
596 target_flags |= ARM_FLAG_APCS_32;
598 /* There are no ARM processors that support both APCS-26 and
599 interworking. Therefore we force FL_MODE26 to be removed
600 from insn_flags here (if it was set), so that the search
601 below will always be able to find a compatible processor. */
602 insn_flags &= ~FL_MODE26;
604 else if (!TARGET_APCS_32)
607 if (sought != 0 && ((sought & insn_flags) != sought))
609 /* Try to locate a CPU type that supports all of the abilities
610 of the default CPU, plus the extra abilities requested by
612 for (sel = all_cores; sel->name != NULL; sel++)
613 if ((sel->flags & sought) == (sought | insn_flags))
616 if (sel->name == NULL)
618 unsigned current_bit_count = 0;
619 const struct processors * best_fit = NULL;
621 /* Ideally we would like to issue an error message here
622 saying that it was not possible to find a CPU compatible
623 with the default CPU, but which also supports the command
624 line options specified by the programmer, and so they
625 ought to use the -mcpu=<name> command line option to
626 override the default CPU type.
628 Unfortunately this does not work with multilibing. We
629 need to be able to support multilibs for -mapcs-26 and for
630 -mthumb-interwork and there is no CPU that can support both
631 options. Instead if we cannot find a cpu that has both the
632 characteristics of the default cpu and the given command line
633 options we scan the array again looking for a best match. */
634 for (sel = all_cores; sel->name != NULL; sel++)
635 if ((sel->flags & sought) == sought)
639 count = bit_count (sel->flags & insn_flags);
641 if (count >= current_bit_count)
644 current_bit_count = count;
648 if (best_fit == NULL)
654 insn_flags = sel->flags;
658 /* If tuning has not been specified, tune for whichever processor or
659 architecture has been selected. */
661 tune_flags = insn_flags;
663 /* Make sure that the processor choice does not conflict with any of the
664 other command line choices. */
665 if (TARGET_APCS_32 && !(insn_flags & FL_MODE32))
667 /* If APCS-32 was not the default then it must have been set by the
668 user, so issue a warning message. If the user has specified
669 "-mapcs-32 -mcpu=arm2" then we loose here. */
670 if ((TARGET_DEFAULT & ARM_FLAG_APCS_32) == 0)
671 warning ("target CPU does not support APCS-32" );
672 target_flags &= ~ARM_FLAG_APCS_32;
674 else if (!TARGET_APCS_32 && !(insn_flags & FL_MODE26))
676 warning ("target CPU does not support APCS-26" );
677 target_flags |= ARM_FLAG_APCS_32;
680 if (TARGET_INTERWORK && !(insn_flags & FL_THUMB))
682 warning ("target CPU does not support interworking" );
683 target_flags &= ~ARM_FLAG_INTERWORK;
686 if (TARGET_THUMB && !(insn_flags & FL_THUMB))
688 warning ("target CPU does not support THUMB instructions");
689 target_flags &= ~ARM_FLAG_THUMB;
692 if (TARGET_APCS_FRAME && TARGET_THUMB)
694 /* warning ("ignoring -mapcs-frame because -mthumb was used"); */
695 target_flags &= ~ARM_FLAG_APCS_FRAME;
698 /* TARGET_BACKTRACE calls leaf_function_p, which causes a crash if done
699 from here where no function is being compiled currently. */
700 if ((target_flags & (THUMB_FLAG_LEAF_BACKTRACE | THUMB_FLAG_BACKTRACE))
702 warning ("enabling backtrace support is only meaningful when compiling for the Thumb");
704 if (TARGET_ARM && TARGET_CALLEE_INTERWORKING)
705 warning ("enabling callee interworking support is only meaningful when compiling for the Thumb");
707 if (TARGET_ARM && TARGET_CALLER_INTERWORKING)
708 warning ("enabling caller interworking support is only meaningful when compiling for the Thumb");
710 /* If interworking is enabled then APCS-32 must be selected as well. */
711 if (TARGET_INTERWORK)
714 warning ("interworking forces APCS-32 to be used" );
715 target_flags |= ARM_FLAG_APCS_32;
718 if (TARGET_APCS_STACK && !TARGET_APCS_FRAME)
720 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
721 target_flags |= ARM_FLAG_APCS_FRAME;
724 if (TARGET_POKE_FUNCTION_NAME)
725 target_flags |= ARM_FLAG_APCS_FRAME;
727 if (TARGET_APCS_REENT && flag_pic)
728 error ("-fpic and -mapcs-reent are incompatible");
730 if (TARGET_APCS_REENT)
731 warning ("APCS reentrant code not supported. Ignored");
733 /* If this target is normally configured to use APCS frames, warn if they
734 are turned off and debugging is turned on. */
736 && write_symbols != NO_DEBUG
737 && !TARGET_APCS_FRAME
738 && (TARGET_DEFAULT & ARM_FLAG_APCS_FRAME))
739 warning ("-g with -mno-apcs-frame may not give sensible debugging");
741 /* If stack checking is disabled, we can use r10 as the PIC register,
742 which keeps r9 available. */
744 arm_pic_register = TARGET_APCS_STACK ? 9 : 10;
746 if (TARGET_APCS_FLOAT)
747 warning ("passing floating point arguments in fp regs not yet supported");
749 /* Initialize boolean versions of the flags, for use in the arm.md file. */
750 arm_fast_multiply = (insn_flags & FL_FAST_MULT) != 0;
751 arm_arch4 = (insn_flags & FL_ARCH4) != 0;
752 arm_arch5 = (insn_flags & FL_ARCH5) != 0;
753 arm_arch5e = (insn_flags & FL_ARCH5E) != 0;
754 arm_arch_xscale = (insn_flags & FL_XSCALE) != 0;
756 arm_ld_sched = (tune_flags & FL_LDSCHED) != 0;
757 arm_is_strong = (tune_flags & FL_STRONG) != 0;
758 thumb_code = (TARGET_ARM == 0);
759 arm_is_6_or_7 = (((tune_flags & (FL_MODE26 | FL_MODE32))
760 && !(tune_flags & FL_ARCH4))) != 0;
761 arm_tune_xscale = (tune_flags & FL_XSCALE) != 0;
762 arm_is_cirrus = (tune_flags & FL_CIRRUS) != 0;
763 arm_arch_iwmmxt = (insn_flags & FL_IWMMXT) != 0;
765 if (TARGET_IWMMXT && (! TARGET_ATPCS))
766 target_flags |= ARM_FLAG_ATPCS;
770 arm_fpu_tune = FPUTYPE_MAVERICK;
772 /* Ignore -mhard-float if -mcpu=ep9312. */
773 if (TARGET_HARD_FLOAT)
774 target_flags ^= ARM_FLAG_SOFT_FLOAT;
777 /* Default value for floating point code... if no co-processor
778 bus, then schedule for emulated floating point. Otherwise,
779 assume the user has an FPA.
780 Note: this does not prevent use of floating point instructions,
781 -msoft-float does that. */
782 arm_fpu_tune = (tune_flags & FL_CO_PROC) ? FPUTYPE_FPA : FPUTYPE_FPA_EMU3;
786 if (streq (target_fp_name, "2"))
787 arm_fpu_arch = FPUTYPE_FPA_EMU2;
788 else if (streq (target_fp_name, "3"))
789 arm_fpu_arch = FPUTYPE_FPA_EMU3;
791 error ("invalid floating point emulation option: -mfpe-%s",
795 arm_fpu_arch = FPUTYPE_DEFAULT;
799 if (arm_fpu_tune == FPUTYPE_FPA_EMU3)
800 arm_fpu_tune = FPUTYPE_FPA_EMU2;
801 else if (arm_fpu_tune == FPUTYPE_MAVERICK)
802 warning ("-mfpe switch not supported by ep9312 target cpu - ignored.");
803 else if (arm_fpu_tune != FPUTYPE_FPA)
804 arm_fpu_tune = FPUTYPE_FPA_EMU2;
807 /* For arm2/3 there is no need to do any scheduling if there is only
808 a floating point emulator, or we are doing software floating-point. */
809 if ((TARGET_SOFT_FLOAT || arm_fpu_tune != FPUTYPE_FPA)
810 && (tune_flags & FL_MODE32) == 0)
811 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
813 arm_prgmode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
815 if (structure_size_string != NULL)
817 int size = strtol (structure_size_string, NULL, 0);
819 if (size == 8 || size == 32)
820 arm_structure_size_boundary = size;
822 warning ("structure size boundary can only be set to 8 or 32");
825 if (arm_pic_register_string != NULL)
827 int pic_register = decode_reg_name (arm_pic_register_string);
830 warning ("-mpic-register= is useless without -fpic");
832 /* Prevent the user from choosing an obviously stupid PIC register. */
833 else if (pic_register < 0 || call_used_regs[pic_register]
834 || pic_register == HARD_FRAME_POINTER_REGNUM
835 || pic_register == STACK_POINTER_REGNUM
836 || pic_register >= PC_REGNUM)
837 error ("unable to use '%s' for PIC register", arm_pic_register_string);
839 arm_pic_register = pic_register;
842 if (TARGET_THUMB && flag_schedule_insns)
844 /* Don't warn since it's on by default in -O2. */
845 flag_schedule_insns = 0;
850 /* There's some dispute as to whether this should be 1 or 2. However,
851 experiments seem to show that in pathological cases a setting of
852 1 degrades less severly than a setting of 2. This could change if
853 other parts of the compiler change their behavior. */
854 arm_constant_limit = 1;
856 /* If optimizing for size, bump the number of instructions that we
857 are prepared to conditionally execute (even on a StrongARM). */
858 max_insns_skipped = 6;
862 /* For processors with load scheduling, it never costs more than
863 2 cycles to load a constant, and the load scheduler may well
865 if (tune_flags & FL_LDSCHED)
866 arm_constant_limit = 1;
868 /* On XScale the longer latency of a load makes it more difficult
869 to achieve a good schedule, so it's faster to synthesize
870 constants that can be done in two insns. */
872 arm_constant_limit = 2;
874 /* StrongARM has early execution of branches, so a sequence
875 that is worth skipping is shorter. */
877 max_insns_skipped = 3;
880 /* Register global variables with the garbage collector. */
885 arm_add_gc_roots (void)
887 gcc_obstack_init(&minipool_obstack);
888 minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
891 /* A table of known ARM exception types.
892 For use with the interrupt function attribute. */
896 const char *const arg;
897 const unsigned long return_value;
901 static const isr_attribute_arg isr_attribute_args [] =
903 { "IRQ", ARM_FT_ISR },
904 { "irq", ARM_FT_ISR },
905 { "FIQ", ARM_FT_FIQ },
906 { "fiq", ARM_FT_FIQ },
907 { "ABORT", ARM_FT_ISR },
908 { "abort", ARM_FT_ISR },
909 { "ABORT", ARM_FT_ISR },
910 { "abort", ARM_FT_ISR },
911 { "UNDEF", ARM_FT_EXCEPTION },
912 { "undef", ARM_FT_EXCEPTION },
913 { "SWI", ARM_FT_EXCEPTION },
914 { "swi", ARM_FT_EXCEPTION },
915 { NULL, ARM_FT_NORMAL }
918 /* Returns the (interrupt) function type of the current
919 function, or ARM_FT_UNKNOWN if the type cannot be determined. */
922 arm_isr_value (tree argument)
924 const isr_attribute_arg * ptr;
927 /* No argument - default to IRQ. */
928 if (argument == NULL_TREE)
931 /* Get the value of the argument. */
932 if (TREE_VALUE (argument) == NULL_TREE
933 || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
934 return ARM_FT_UNKNOWN;
936 arg = TREE_STRING_POINTER (TREE_VALUE (argument));
938 /* Check it against the list of known arguments. */
939 for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
940 if (streq (arg, ptr->arg))
941 return ptr->return_value;
943 /* An unrecognized interrupt type. */
944 return ARM_FT_UNKNOWN;
947 /* Computes the type of the current function. */
950 arm_compute_func_type (void)
952 unsigned long type = ARM_FT_UNKNOWN;
956 if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
959 /* Decide if the current function is volatile. Such functions
960 never return, and many memory cycles can be saved by not storing
961 register values that will never be needed again. This optimization
962 was added to speed up context switching in a kernel application. */
964 && current_function_nothrow
965 && TREE_THIS_VOLATILE (current_function_decl))
966 type |= ARM_FT_VOLATILE;
968 if (current_function_needs_context)
969 type |= ARM_FT_NESTED;
971 attr = DECL_ATTRIBUTES (current_function_decl);
973 a = lookup_attribute ("naked", attr);
975 type |= ARM_FT_NAKED;
977 if (cfun->machine->eh_epilogue_sp_ofs != NULL_RTX)
978 type |= ARM_FT_EXCEPTION_HANDLER;
981 a = lookup_attribute ("isr", attr);
983 a = lookup_attribute ("interrupt", attr);
986 type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;
988 type |= arm_isr_value (TREE_VALUE (a));
994 /* Returns the type of the current function. */
997 arm_current_func_type (void)
999 if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)
1000 cfun->machine->func_type = arm_compute_func_type ();
1002 return cfun->machine->func_type;
1005 /* Return 1 if it is possible to return using a single instruction. */
1008 use_return_insn (int iscond)
1011 unsigned int func_type;
1012 unsigned long saved_int_regs;
1014 /* Never use a return instruction before reload has run. */
1015 if (!reload_completed)
1018 func_type = arm_current_func_type ();
1020 /* Naked functions and volatile functions need special
1022 if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))
1025 /* So do interrupt functions that use the frame pointer. */
1026 if (IS_INTERRUPT (func_type) && frame_pointer_needed)
1029 /* As do variadic functions. */
1030 if (current_function_pretend_args_size
1031 || cfun->machine->uses_anonymous_args
1032 /* Or if the function calls __builtin_eh_return () */
1033 || ARM_FUNC_TYPE (func_type) == ARM_FT_EXCEPTION_HANDLER
1034 /* Or if the function calls alloca */
1035 || current_function_calls_alloca
1036 /* Or if there is a stack adjustment. */
1037 || (arm_get_frame_size () + current_function_outgoing_args_size != 0))
1040 saved_int_regs = arm_compute_save_reg_mask ();
1042 /* Can't be done if interworking with Thumb, and any registers have been
1044 if (TARGET_INTERWORK && saved_int_regs != 0)
1047 /* On StrongARM, conditional returns are expensive if they aren't
1048 taken and multiple registers have been stacked. */
1049 if (iscond && arm_is_strong)
1051 /* Conditional return when just the LR is stored is a simple
1052 conditional-load instruction, that's not expensive. */
1053 if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))
1056 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
1060 /* If there are saved registers but the LR isn't saved, then we need
1061 two instructions for the return. */
1062 if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))
1065 /* Can't be done if any of the FPA regs are pushed,
1066 since this also requires an insn. */
1067 if (TARGET_HARD_FLOAT)
1068 for (regno = FIRST_ARM_FP_REGNUM; regno <= LAST_ARM_FP_REGNUM; regno++)
1069 if (regs_ever_live[regno] && !call_used_regs[regno])
1072 if (TARGET_REALLY_IWMMXT)
1073 for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)
1074 if (regs_ever_live[regno] && ! call_used_regs [regno])
1080 /* Return TRUE if int I is a valid immediate ARM constant. */
1083 const_ok_for_arm (HOST_WIDE_INT i)
1085 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT)0xFF;
1087 /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must
1088 be all zero, or all one. */
1089 if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0
1090 && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)
1091 != ((~(unsigned HOST_WIDE_INT) 0)
1092 & ~(unsigned HOST_WIDE_INT) 0xffffffff)))
1095 /* Fast return for 0 and powers of 2 */
1096 if ((i & (i - 1)) == 0)
1101 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
1104 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
1105 >> (32 - 2)) | ~(unsigned HOST_WIDE_INT) 0xffffffff;
1107 while (mask != ~(unsigned HOST_WIDE_INT) 0xFF);
1112 /* Return true if I is a valid constant for the operation CODE. */
1114 const_ok_for_op (HOST_WIDE_INT i, enum rtx_code code)
1116 if (const_ok_for_arm (i))
1122 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
1124 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
1130 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
1137 /* Emit a sequence of insns to handle a large constant.
1138 CODE is the code of the operation required, it can be any of SET, PLUS,
1139 IOR, AND, XOR, MINUS;
1140 MODE is the mode in which the operation is being performed;
1141 VAL is the integer to operate on;
1142 SOURCE is the other operand (a register, or a null-pointer for SET);
1143 SUBTARGETS means it is safe to create scratch registers if that will
1144 either produce a simpler sequence, or we will want to cse the values.
1145 Return value is the number of insns emitted. */
1148 arm_split_constant (enum rtx_code code, enum machine_mode mode,
1149 HOST_WIDE_INT val, rtx target, rtx source, int subtargets)
1151 if (subtargets || code == SET
1152 || (GET_CODE (target) == REG && GET_CODE (source) == REG
1153 && REGNO (target) != REGNO (source)))
1155 /* After arm_reorg has been called, we can't fix up expensive
1156 constants by pushing them into memory so we must synthesize
1157 them in-line, regardless of the cost. This is only likely to
1158 be more costly on chips that have load delay slots and we are
1159 compiling without running the scheduler (so no splitting
1160 occurred before the final instruction emission).
1162 Ref: gcc -O1 -mcpu=strongarm gcc.c-torture/compile/980506-2.c
1164 if (!after_arm_reorg
1165 && (arm_gen_constant (code, mode, val, target, source, 1, 0)
1166 > arm_constant_limit + (code != SET)))
1170 /* Currently SET is the only monadic value for CODE, all
1171 the rest are diadic. */
1172 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (val)));
1177 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
1179 emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (val)));
1180 /* For MINUS, the value is subtracted from, since we never
1181 have subtraction of a constant. */
1183 emit_insn (gen_rtx_SET (VOIDmode, target,
1184 gen_rtx_MINUS (mode, temp, source)));
1186 emit_insn (gen_rtx_SET (VOIDmode, target,
1187 gen_rtx (code, mode, source, temp)));
1193 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
1197 count_insns_for_constant (HOST_WIDE_INT remainder, int i)
1199 HOST_WIDE_INT temp1;
1207 if (remainder & (3 << (i - 2)))
1212 temp1 = remainder & ((0x0ff << end)
1213 | ((i < end) ? (0xff >> (32 - end)) : 0));
1214 remainder &= ~temp1;
1219 } while (remainder);
1223 /* As above, but extra parameter GENERATE which, if clear, suppresses
1227 arm_gen_constant (enum rtx_code code, enum machine_mode mode,
1228 HOST_WIDE_INT val, rtx target, rtx source, int subtargets,
1233 int can_negate_initial = 0;
1236 int num_bits_set = 0;
1237 int set_sign_bit_copies = 0;
1238 int clear_sign_bit_copies = 0;
1239 int clear_zero_bit_copies = 0;
1240 int set_zero_bit_copies = 0;
1242 unsigned HOST_WIDE_INT temp1, temp2;
1243 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
1245 /* Find out which operations are safe for a given CODE. Also do a quick
1246 check for degenerate cases; these can occur when DImode operations
1258 can_negate_initial = 1;
1262 if (remainder == 0xffffffff)
1265 emit_insn (gen_rtx_SET (VOIDmode, target,
1266 GEN_INT (ARM_SIGN_EXTEND (val))));
1271 if (reload_completed && rtx_equal_p (target, source))
1274 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1283 emit_insn (gen_rtx_SET (VOIDmode, target, const0_rtx));
1286 if (remainder == 0xffffffff)
1288 if (reload_completed && rtx_equal_p (target, source))
1291 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1300 if (reload_completed && rtx_equal_p (target, source))
1303 emit_insn (gen_rtx_SET (VOIDmode, target, source));
1306 if (remainder == 0xffffffff)
1309 emit_insn (gen_rtx_SET (VOIDmode, target,
1310 gen_rtx_NOT (mode, source)));
1314 /* We don't know how to handle this yet below. */
1318 /* We treat MINUS as (val - source), since (source - val) is always
1319 passed as (source + (-val)). */
1323 emit_insn (gen_rtx_SET (VOIDmode, target,
1324 gen_rtx_NEG (mode, source)));
1327 if (const_ok_for_arm (val))
1330 emit_insn (gen_rtx_SET (VOIDmode, target,
1331 gen_rtx_MINUS (mode, GEN_INT (val),
1343 /* If we can do it in one insn get out quickly. */
1344 if (const_ok_for_arm (val)
1345 || (can_negate_initial && const_ok_for_arm (-val))
1346 || (can_invert && const_ok_for_arm (~val)))
1349 emit_insn (gen_rtx_SET (VOIDmode, target,
1350 (source ? gen_rtx (code, mode, source,
1356 /* Calculate a few attributes that may be useful for specific
1358 for (i = 31; i >= 0; i--)
1360 if ((remainder & (1 << i)) == 0)
1361 clear_sign_bit_copies++;
1366 for (i = 31; i >= 0; i--)
1368 if ((remainder & (1 << i)) != 0)
1369 set_sign_bit_copies++;
1374 for (i = 0; i <= 31; i++)
1376 if ((remainder & (1 << i)) == 0)
1377 clear_zero_bit_copies++;
1382 for (i = 0; i <= 31; i++)
1384 if ((remainder & (1 << i)) != 0)
1385 set_zero_bit_copies++;
1393 /* See if we can do this by sign_extending a constant that is known
1394 to be negative. This is a good, way of doing it, since the shift
1395 may well merge into a subsequent insn. */
1396 if (set_sign_bit_copies > 1)
1398 if (const_ok_for_arm
1399 (temp1 = ARM_SIGN_EXTEND (remainder
1400 << (set_sign_bit_copies - 1))))
1404 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1405 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1407 emit_insn (gen_ashrsi3 (target, new_src,
1408 GEN_INT (set_sign_bit_copies - 1)));
1412 /* For an inverted constant, we will need to set the low bits,
1413 these will be shifted out of harm's way. */
1414 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
1415 if (const_ok_for_arm (~temp1))
1419 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1420 emit_insn (gen_rtx_SET (VOIDmode, new_src,
1422 emit_insn (gen_ashrsi3 (target, new_src,
1423 GEN_INT (set_sign_bit_copies - 1)));
1429 /* See if we can generate this by setting the bottom (or the top)
1430 16 bits, and then shifting these into the other half of the
1431 word. We only look for the simplest cases, to do more would cost
1432 too much. Be careful, however, not to generate this when the
1433 alternative would take fewer insns. */
1434 if (val & 0xffff0000)
1436 temp1 = remainder & 0xffff0000;
1437 temp2 = remainder & 0x0000ffff;
1439 /* Overlaps outside this range are best done using other methods. */
1440 for (i = 9; i < 24; i++)
1442 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
1443 && !const_ok_for_arm (temp2))
1445 rtx new_src = (subtargets
1446 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1448 insns = arm_gen_constant (code, mode, temp2, new_src,
1449 source, subtargets, generate);
1452 emit_insn (gen_rtx_SET
1455 gen_rtx_ASHIFT (mode, source,
1462 /* Don't duplicate cases already considered. */
1463 for (i = 17; i < 24; i++)
1465 if (((temp1 | (temp1 >> i)) == remainder)
1466 && !const_ok_for_arm (temp1))
1468 rtx new_src = (subtargets
1469 ? (generate ? gen_reg_rtx (mode) : NULL_RTX)
1471 insns = arm_gen_constant (code, mode, temp1, new_src,
1472 source, subtargets, generate);
1476 (gen_rtx_SET (VOIDmode, target,
1479 gen_rtx_LSHIFTRT (mode, source,
1490 /* If we have IOR or XOR, and the constant can be loaded in a
1491 single instruction, and we can find a temporary to put it in,
1492 then this can be done in two instructions instead of 3-4. */
1494 /* TARGET can't be NULL if SUBTARGETS is 0 */
1495 || (reload_completed && !reg_mentioned_p (target, source)))
1497 if (const_ok_for_arm (ARM_SIGN_EXTEND (~val)))
1501 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1503 emit_insn (gen_rtx_SET (VOIDmode, sub, GEN_INT (val)));
1504 emit_insn (gen_rtx_SET (VOIDmode, target,
1505 gen_rtx (code, mode, source, sub)));
1514 if (set_sign_bit_copies > 8
1515 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
1519 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1520 rtx shift = GEN_INT (set_sign_bit_copies);
1522 emit_insn (gen_rtx_SET (VOIDmode, sub,
1524 gen_rtx_ASHIFT (mode,
1527 emit_insn (gen_rtx_SET (VOIDmode, target,
1529 gen_rtx_LSHIFTRT (mode, sub,
1535 if (set_zero_bit_copies > 8
1536 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
1540 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1541 rtx shift = GEN_INT (set_zero_bit_copies);
1543 emit_insn (gen_rtx_SET (VOIDmode, sub,
1545 gen_rtx_LSHIFTRT (mode,
1548 emit_insn (gen_rtx_SET (VOIDmode, target,
1550 gen_rtx_ASHIFT (mode, sub,
1556 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~val)))
1560 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
1561 emit_insn (gen_rtx_SET (VOIDmode, sub,
1562 gen_rtx_NOT (mode, source)));
1565 sub = gen_reg_rtx (mode);
1566 emit_insn (gen_rtx_SET (VOIDmode, sub,
1567 gen_rtx_AND (mode, source,
1569 emit_insn (gen_rtx_SET (VOIDmode, target,
1570 gen_rtx_NOT (mode, sub)));
1577 /* See if two shifts will do 2 or more insn's worth of work. */
1578 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
1580 HOST_WIDE_INT shift_mask = ((0xffffffff
1581 << (32 - clear_sign_bit_copies))
1584 if ((remainder | shift_mask) != 0xffffffff)
1588 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1589 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1590 new_src, source, subtargets, 1);
1595 rtx targ = subtargets ? NULL_RTX : target;
1596 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1597 targ, source, subtargets, 0);
1603 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1604 rtx shift = GEN_INT (clear_sign_bit_copies);
1606 emit_insn (gen_ashlsi3 (new_src, source, shift));
1607 emit_insn (gen_lshrsi3 (target, new_src, shift));
1613 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
1615 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
1617 if ((remainder | shift_mask) != 0xffffffff)
1621 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1623 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1624 new_src, source, subtargets, 1);
1629 rtx targ = subtargets ? NULL_RTX : target;
1631 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
1632 targ, source, subtargets, 0);
1638 rtx new_src = subtargets ? gen_reg_rtx (mode) : target;
1639 rtx shift = GEN_INT (clear_zero_bit_copies);
1641 emit_insn (gen_lshrsi3 (new_src, source, shift));
1642 emit_insn (gen_ashlsi3 (target, new_src, shift));
1654 for (i = 0; i < 32; i++)
1655 if (remainder & (1 << i))
1658 if (code == AND || (can_invert && num_bits_set > 16))
1659 remainder = (~remainder) & 0xffffffff;
1660 else if (code == PLUS && num_bits_set > 16)
1661 remainder = (-remainder) & 0xffffffff;
1668 /* Now try and find a way of doing the job in either two or three
1670 We start by looking for the largest block of zeros that are aligned on
1671 a 2-bit boundary, we then fill up the temps, wrapping around to the
1672 top of the word when we drop off the bottom.
1673 In the worst case this code should produce no more than four insns. */
1676 int best_consecutive_zeros = 0;
1678 for (i = 0; i < 32; i += 2)
1680 int consecutive_zeros = 0;
1682 if (!(remainder & (3 << i)))
1684 while ((i < 32) && !(remainder & (3 << i)))
1686 consecutive_zeros += 2;
1689 if (consecutive_zeros > best_consecutive_zeros)
1691 best_consecutive_zeros = consecutive_zeros;
1692 best_start = i - consecutive_zeros;
1698 /* So long as it won't require any more insns to do so, it's
1699 desirable to emit a small constant (in bits 0...9) in the last
1700 insn. This way there is more chance that it can be combined with
1701 a later addressing insn to form a pre-indexed load or store
1702 operation. Consider:
1704 *((volatile int *)0xe0000100) = 1;
1705 *((volatile int *)0xe0000110) = 2;
1707 We want this to wind up as:
1711 str rB, [rA, #0x100]
1713 str rB, [rA, #0x110]
1715 rather than having to synthesize both large constants from scratch.
1717 Therefore, we calculate how many insns would be required to emit
1718 the constant starting from `best_start', and also starting from
1719 zero (ie with bit 31 first to be output). If `best_start' doesn't
1720 yield a shorter sequence, we may as well use zero. */
1722 && ((((unsigned HOST_WIDE_INT) 1) << best_start) < remainder)
1723 && (count_insns_for_constant (remainder, 0) <=
1724 count_insns_for_constant (remainder, best_start)))
1727 /* Now start emitting the insns. */
1735 if (remainder & (3 << (i - 2)))
1740 temp1 = remainder & ((0x0ff << end)
1741 | ((i < end) ? (0xff >> (32 - end)) : 0));
1742 remainder &= ~temp1;
1746 rtx new_src, temp1_rtx;
1748 if (code == SET || code == MINUS)
1750 new_src = (subtargets ? gen_reg_rtx (mode) : target);
1751 if (can_invert && code != MINUS)
1756 if (remainder && subtargets)
1757 new_src = gen_reg_rtx (mode);
1762 else if (can_negate)
1766 temp1 = trunc_int_for_mode (temp1, mode);
1767 temp1_rtx = GEN_INT (temp1);
1771 else if (code == MINUS)
1772 temp1_rtx = gen_rtx_MINUS (mode, temp1_rtx, source);
1774 temp1_rtx = gen_rtx_fmt_ee (code, mode, source, temp1_rtx);
1776 emit_insn (gen_rtx_SET (VOIDmode, new_src, temp1_rtx));
1785 else if (code == MINUS)
1799 /* Canonicalize a comparison so that we are more likely to recognize it.
1800 This can be done for a few constant compares, where we can make the
1801 immediate value easier to load. */
1804 arm_canonicalize_comparison (enum rtx_code code, rtx * op1)
1806 unsigned HOST_WIDE_INT i = INTVAL (*op1);
1816 if (i != ((((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1)) - 1)
1817 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1819 *op1 = GEN_INT (i + 1);
1820 return code == GT ? GE : LT;
1826 if (i != (((unsigned HOST_WIDE_INT) 1) << (HOST_BITS_PER_WIDE_INT - 1))
1827 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1829 *op1 = GEN_INT (i - 1);
1830 return code == GE ? GT : LE;
1836 if (i != ~((unsigned HOST_WIDE_INT) 0)
1837 && (const_ok_for_arm (i + 1) || const_ok_for_arm (-(i + 1))))
1839 *op1 = GEN_INT (i + 1);
1840 return code == GTU ? GEU : LTU;
1847 && (const_ok_for_arm (i - 1) || const_ok_for_arm (-(i - 1))))
1849 *op1 = GEN_INT (i - 1);
1850 return code == GEU ? GTU : LEU;
1861 /* Decide whether a type should be returned in memory (true)
1862 or in a register (false). This is called by the macro
1863 RETURN_IN_MEMORY. */
1865 arm_return_in_memory (tree type)
1869 if (!AGGREGATE_TYPE_P (type))
1870 /* All simple types are returned in registers. */
1873 size = int_size_in_bytes (type);
1877 /* ATPCS returns aggregate types in memory only if they are
1878 larger than a word (or are variable size). */
1879 return (size < 0 || size > UNITS_PER_WORD);
1882 /* For the arm-wince targets we choose to be compatible with Microsoft's
1883 ARM and Thumb compilers, which always return aggregates in memory. */
1885 /* All structures/unions bigger than one word are returned in memory.
1886 Also catch the case where int_size_in_bytes returns -1. In this case
1887 the aggregate is either huge or of variable size, and in either case
1888 we will want to return it via memory and not in a register. */
1889 if (size < 0 || size > UNITS_PER_WORD)
1892 if (TREE_CODE (type) == RECORD_TYPE)
1896 /* For a struct the APCS says that we only return in a register
1897 if the type is 'integer like' and every addressable element
1898 has an offset of zero. For practical purposes this means
1899 that the structure can have at most one non bit-field element
1900 and that this element must be the first one in the structure. */
1902 /* Find the first field, ignoring non FIELD_DECL things which will
1903 have been created by C++. */
1904 for (field = TYPE_FIELDS (type);
1905 field && TREE_CODE (field) != FIELD_DECL;
1906 field = TREE_CHAIN (field))
1910 return 0; /* An empty structure. Allowed by an extension to ANSI C. */
1912 /* Check that the first field is valid for returning in a register. */
1914 /* ... Floats are not allowed */
1915 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1918 /* ... Aggregates that are not themselves valid for returning in
1919 a register are not allowed. */
1920 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1923 /* Now check the remaining fields, if any. Only bitfields are allowed,
1924 since they are not addressable. */
1925 for (field = TREE_CHAIN (field);
1927 field = TREE_CHAIN (field))
1929 if (TREE_CODE (field) != FIELD_DECL)
1932 if (!DECL_BIT_FIELD_TYPE (field))
1939 if (TREE_CODE (type) == UNION_TYPE)
1943 /* Unions can be returned in registers if every element is
1944 integral, or can be returned in an integer register. */
1945 for (field = TYPE_FIELDS (type);
1947 field = TREE_CHAIN (field))
1949 if (TREE_CODE (field) != FIELD_DECL)
1952 if (FLOAT_TYPE_P (TREE_TYPE (field)))
1955 if (RETURN_IN_MEMORY (TREE_TYPE (field)))
1961 #endif /* not ARM_WINCE */
1963 /* Return all other types in memory. */
1967 /* Indicate whether or not words of a double are in big-endian order. */
1970 arm_float_words_big_endian (void)
1975 /* For FPA, float words are always big-endian. For VFP, floats words
1976 follow the memory system mode. */
1978 if (TARGET_HARD_FLOAT)
1980 /* FIXME: TARGET_HARD_FLOAT currently implies FPA. */
1985 return (TARGET_BIG_END ? 1 : 0);
1990 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1991 for a call to a function whose data type is FNTYPE.
1992 For a library call, FNTYPE is NULL. */
1994 arm_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
1995 rtx libname ATTRIBUTE_UNUSED,
1996 tree fndecl ATTRIBUTE_UNUSED)
1998 /* On the ARM, the offset starts at 0. */
1999 pcum->nregs = ((fntype && aggregate_value_p (TREE_TYPE (fntype), fntype)) ? 1 : 0);
2000 pcum->iwmmxt_nregs = 0;
2002 pcum->call_cookie = CALL_NORMAL;
2004 if (TARGET_LONG_CALLS)
2005 pcum->call_cookie = CALL_LONG;
2007 /* Check for long call/short call attributes. The attributes
2008 override any command line option. */
2011 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (fntype)))
2012 pcum->call_cookie = CALL_SHORT;
2013 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (fntype)))
2014 pcum->call_cookie = CALL_LONG;
2017 /* Varargs vectors are treated the same as long long.
2018 named_count avoids having to change the way arm handles 'named' */
2019 pcum->named_count = 0;
2022 if (TARGET_REALLY_IWMMXT && fntype)
2026 for (fn_arg = TYPE_ARG_TYPES (fntype);
2028 fn_arg = TREE_CHAIN (fn_arg))
2029 pcum->named_count += 1;
2031 if (! pcum->named_count)
2032 pcum->named_count = INT_MAX;
2036 /* Determine where to put an argument to a function.
2037 Value is zero to push the argument on the stack,
2038 or a hard register in which to store the argument.
2040 MODE is the argument's machine mode.
2041 TYPE is the data type of the argument (as a tree).
2042 This is null for libcalls where that information may
2044 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2045 the preceding args and about the function being called.
2046 NAMED is nonzero if this argument is a named parameter
2047 (otherwise it is an extra parameter matching an ellipsis). */
2050 arm_function_arg (CUMULATIVE_ARGS *pcum, enum machine_mode mode,
2051 tree type ATTRIBUTE_UNUSED, int named)
2053 if (TARGET_REALLY_IWMMXT)
2055 if (VECTOR_MODE_SUPPORTED_P (mode))
2057 /* varargs vectors are treated the same as long long.
2058 named_count avoids having to change the way arm handles 'named' */
2059 if (pcum->named_count <= pcum->nargs + 1)
2061 if (pcum->nregs == 1)
2063 if (pcum->nregs <= 2)
2064 return gen_rtx_REG (mode, pcum->nregs);
2068 else if (pcum->iwmmxt_nregs <= 9)
2069 return gen_rtx_REG (mode, pcum->iwmmxt_nregs + FIRST_IWMMXT_REGNUM);
2073 else if ((mode == DImode || mode == DFmode) && pcum->nregs & 1)
2077 if (mode == VOIDmode)
2078 /* Compute operand 2 of the call insn. */
2079 return GEN_INT (pcum->call_cookie);
2081 if (!named || pcum->nregs >= NUM_ARG_REGS)
2084 return gen_rtx_REG (mode, pcum->nregs);
2087 /* Variable sized types are passed by reference. This is a GCC
2088 extension to the ARM ABI. */
2091 arm_function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2092 enum machine_mode mode ATTRIBUTE_UNUSED,
2093 tree type, int named ATTRIBUTE_UNUSED)
2095 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
2098 /* Implement va_arg. */
2101 arm_va_arg (tree valist, tree type)
2103 /* Variable sized types are passed by reference. */
2104 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2106 rtx addr = std_expand_builtin_va_arg (valist, build_pointer_type (type));
2107 return gen_rtx_MEM (ptr_mode, force_reg (Pmode, addr));
2110 if (FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), NULL) == IWMMXT_ALIGNMENT)
2115 /* Maintain 64-bit alignment of the valist pointer by
2116 constructing: valist = ((valist + (8 - 1)) & -8). */
2117 minus_eight = build_int_2 (- (IWMMXT_ALIGNMENT / BITS_PER_UNIT), -1);
2118 t = build_int_2 ((IWMMXT_ALIGNMENT / BITS_PER_UNIT) - 1, 0);
2119 t = build (PLUS_EXPR, TREE_TYPE (valist), valist, t);
2120 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, minus_eight);
2121 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
2122 TREE_SIDE_EFFECTS (t) = 1;
2123 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2125 /* This is to stop the combine pass optimizing
2126 away the alignment adjustment. */
2127 mark_reg_pointer (arg_pointer_rtx, PARM_BOUNDARY);
2130 return std_expand_builtin_va_arg (valist, type);
2133 /* Encode the current state of the #pragma [no_]long_calls. */
2136 OFF, /* No #pramgma [no_]long_calls is in effect. */
2137 LONG, /* #pragma long_calls is in effect. */
2138 SHORT /* #pragma no_long_calls is in effect. */
2141 static arm_pragma_enum arm_pragma_long_calls = OFF;
2144 arm_pr_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2146 arm_pragma_long_calls = LONG;
2150 arm_pr_no_long_calls (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2152 arm_pragma_long_calls = SHORT;
2156 arm_pr_long_calls_off (struct cpp_reader * pfile ATTRIBUTE_UNUSED)
2158 arm_pragma_long_calls = OFF;
2161 /* Table of machine attributes. */
2162 const struct attribute_spec arm_attribute_table[] =
2164 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2165 /* Function calls made to this symbol must be done indirectly, because
2166 it may lie outside of the 26 bit addressing range of a normal function
2168 { "long_call", 0, 0, false, true, true, NULL },
2169 /* Whereas these functions are always known to reside within the 26 bit
2170 addressing range. */
2171 { "short_call", 0, 0, false, true, true, NULL },
2172 /* Interrupt Service Routines have special prologue and epilogue requirements. */
2173 { "isr", 0, 1, false, false, false, arm_handle_isr_attribute },
2174 { "interrupt", 0, 1, false, false, false, arm_handle_isr_attribute },
2175 { "naked", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2177 /* ARM/PE has three new attributes:
2179 dllexport - for exporting a function/variable that will live in a dll
2180 dllimport - for importing a function/variable from a dll
2182 Microsoft allows multiple declspecs in one __declspec, separating
2183 them with spaces. We do NOT support this. Instead, use __declspec
2186 { "dllimport", 0, 0, true, false, false, NULL },
2187 { "dllexport", 0, 0, true, false, false, NULL },
2188 { "interfacearm", 0, 0, true, false, false, arm_handle_fndecl_attribute },
2190 { NULL, 0, 0, false, false, false, NULL }
2193 /* Handle an attribute requiring a FUNCTION_DECL;
2194 arguments as in struct attribute_spec.handler. */
2196 arm_handle_fndecl_attribute (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
2197 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
2199 if (TREE_CODE (*node) != FUNCTION_DECL)
2201 warning ("`%s' attribute only applies to functions",
2202 IDENTIFIER_POINTER (name));
2203 *no_add_attrs = true;
2209 /* Handle an "interrupt" or "isr" attribute;
2210 arguments as in struct attribute_spec.handler. */
2212 arm_handle_isr_attribute (tree *node, tree name, tree args, int flags,
2217 if (TREE_CODE (*node) != FUNCTION_DECL)
2219 warning ("`%s' attribute only applies to functions",
2220 IDENTIFIER_POINTER (name));
2221 *no_add_attrs = true;
2223 /* FIXME: the argument if any is checked for type attributes;
2224 should it be checked for decl ones? */
2228 if (TREE_CODE (*node) == FUNCTION_TYPE
2229 || TREE_CODE (*node) == METHOD_TYPE)
2231 if (arm_isr_value (args) == ARM_FT_UNKNOWN)
2233 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2234 *no_add_attrs = true;
2237 else if (TREE_CODE (*node) == POINTER_TYPE
2238 && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
2239 || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
2240 && arm_isr_value (args) != ARM_FT_UNKNOWN)
2242 *node = build_type_copy (*node);
2243 TREE_TYPE (*node) = build_type_attribute_variant
2245 tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
2246 *no_add_attrs = true;
2250 /* Possibly pass this attribute on from the type to a decl. */
2251 if (flags & ((int) ATTR_FLAG_DECL_NEXT
2252 | (int) ATTR_FLAG_FUNCTION_NEXT
2253 | (int) ATTR_FLAG_ARRAY_NEXT))
2255 *no_add_attrs = true;
2256 return tree_cons (name, args, NULL_TREE);
2260 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
2268 /* Return 0 if the attributes for two types are incompatible, 1 if they
2269 are compatible, and 2 if they are nearly compatible (which causes a
2270 warning to be generated). */
2272 arm_comp_type_attributes (tree type1, tree type2)
2276 /* Check for mismatch of non-default calling convention. */
2277 if (TREE_CODE (type1) != FUNCTION_TYPE)
2280 /* Check for mismatched call attributes. */
2281 l1 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type1)) != NULL;
2282 l2 = lookup_attribute ("long_call", TYPE_ATTRIBUTES (type2)) != NULL;
2283 s1 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type1)) != NULL;
2284 s2 = lookup_attribute ("short_call", TYPE_ATTRIBUTES (type2)) != NULL;
2286 /* Only bother to check if an attribute is defined. */
2287 if (l1 | l2 | s1 | s2)
2289 /* If one type has an attribute, the other must have the same attribute. */
2290 if ((l1 != l2) || (s1 != s2))
2293 /* Disallow mixed attributes. */
2294 if ((l1 & s2) || (l2 & s1))
2298 /* Check for mismatched ISR attribute. */
2299 l1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
2301 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
2302 l2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
2304 l1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
2311 /* Encode long_call or short_call attribute by prefixing
2312 symbol name in DECL with a special character FLAG. */
2314 arm_encode_call_attribute (tree decl, int flag)
2316 const char * str = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2317 int len = strlen (str);
2320 /* Do not allow weak functions to be treated as short call. */
2321 if (DECL_WEAK (decl) && flag == SHORT_CALL_FLAG_CHAR)
2324 newstr = alloca (len + 2);
2326 strcpy (newstr + 1, str);
2328 newstr = (char *) ggc_alloc_string (newstr, len + 1);
2329 XSTR (XEXP (DECL_RTL (decl), 0), 0) = newstr;
2332 /* Assigns default attributes to newly defined type. This is used to
2333 set short_call/long_call attributes for function types of
2334 functions defined inside corresponding #pragma scopes. */
2336 arm_set_default_type_attributes (tree type)
2338 /* Add __attribute__ ((long_call)) to all functions, when
2339 inside #pragma long_calls or __attribute__ ((short_call)),
2340 when inside #pragma no_long_calls. */
2341 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
2343 tree type_attr_list, attr_name;
2344 type_attr_list = TYPE_ATTRIBUTES (type);
2346 if (arm_pragma_long_calls == LONG)
2347 attr_name = get_identifier ("long_call");
2348 else if (arm_pragma_long_calls == SHORT)
2349 attr_name = get_identifier ("short_call");
2353 type_attr_list = tree_cons (attr_name, NULL_TREE, type_attr_list);
2354 TYPE_ATTRIBUTES (type) = type_attr_list;
2358 /* Return 1 if the operand is a SYMBOL_REF for a function known to be
2359 defined within the current compilation unit. If this cannot be
2360 determined, then 0 is returned. */
2362 current_file_function_operand (rtx sym_ref)
2364 /* This is a bit of a fib. A function will have a short call flag
2365 applied to its name if it has the short call attribute, or it has
2366 already been defined within the current compilation unit. */
2367 if (ENCODED_SHORT_CALL_ATTR_P (XSTR (sym_ref, 0)))
2370 /* The current function is always defined within the current compilation
2371 unit. if it s a weak definition however, then this may not be the real
2372 definition of the function, and so we have to say no. */
2373 if (sym_ref == XEXP (DECL_RTL (current_function_decl), 0)
2374 && !DECL_WEAK (current_function_decl))
2377 /* We cannot make the determination - default to returning 0. */
2381 /* Return nonzero if a 32 bit "long_call" should be generated for
2382 this call. We generate a long_call if the function:
2384 a. has an __attribute__((long call))
2385 or b. is within the scope of a #pragma long_calls
2386 or c. the -mlong-calls command line switch has been specified
2388 However we do not generate a long call if the function:
2390 d. has an __attribute__ ((short_call))
2391 or e. is inside the scope of a #pragma no_long_calls
2392 or f. has an __attribute__ ((section))
2393 or g. is defined within the current compilation unit.
2395 This function will be called by C fragments contained in the machine
2396 description file. CALL_REF and CALL_COOKIE correspond to the matched
2397 rtl operands. CALL_SYMBOL is used to distinguish between
2398 two different callers of the function. It is set to 1 in the
2399 "call_symbol" and "call_symbol_value" patterns and to 0 in the "call"
2400 and "call_value" patterns. This is because of the difference in the
2401 SYM_REFs passed by these patterns. */
2403 arm_is_longcall_p (rtx sym_ref, int call_cookie, int call_symbol)
2407 if (GET_CODE (sym_ref) != MEM)
2410 sym_ref = XEXP (sym_ref, 0);
2413 if (GET_CODE (sym_ref) != SYMBOL_REF)
2416 if (call_cookie & CALL_SHORT)
2419 if (TARGET_LONG_CALLS && flag_function_sections)
2422 if (current_file_function_operand (sym_ref))
2425 return (call_cookie & CALL_LONG)
2426 || ENCODED_LONG_CALL_ATTR_P (XSTR (sym_ref, 0))
2427 || TARGET_LONG_CALLS;
2430 /* Return nonzero if it is ok to make a tail-call to DECL. */
2432 arm_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2434 int call_type = TARGET_LONG_CALLS ? CALL_LONG : CALL_NORMAL;
2436 if (cfun->machine->sibcall_blocked)
2439 /* Never tailcall something for which we have no decl, or if we
2440 are in Thumb mode. */
2441 if (decl == NULL || TARGET_THUMB)
2444 /* Get the calling method. */
2445 if (lookup_attribute ("short_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2446 call_type = CALL_SHORT;
2447 else if (lookup_attribute ("long_call", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
2448 call_type = CALL_LONG;
2450 /* Cannot tail-call to long calls, since these are out of range of
2451 a branch instruction. However, if not compiling PIC, we know
2452 we can reach the symbol if it is in this compilation unit. */
2453 if (call_type == CALL_LONG && (flag_pic || !TREE_ASM_WRITTEN (decl)))
2456 /* If we are interworking and the function is not declared static
2457 then we can't tail-call it unless we know that it exists in this
2458 compilation unit (since it might be a Thumb routine). */
2459 if (TARGET_INTERWORK && TREE_PUBLIC (decl) && !TREE_ASM_WRITTEN (decl))
2462 /* Never tailcall from an ISR routine - it needs a special exit sequence. */
2463 if (IS_INTERRUPT (arm_current_func_type ()))
2466 /* Everything else is ok. */
2471 /* Addressing mode support functions. */
2473 /* Return nonzero if X is a legitimate immediate operand when compiling
2476 legitimate_pic_operand_p (rtx x)
2480 && (GET_CODE (x) == SYMBOL_REF
2481 || (GET_CODE (x) == CONST
2482 && GET_CODE (XEXP (x, 0)) == PLUS
2483 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
2490 legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg)
2492 if (GET_CODE (orig) == SYMBOL_REF
2493 || GET_CODE (orig) == LABEL_REF)
2495 #ifndef AOF_ASSEMBLER
2496 rtx pic_ref, address;
2506 reg = gen_reg_rtx (Pmode);
2511 #ifdef AOF_ASSEMBLER
2512 /* The AOF assembler can generate relocations for these directly, and
2513 understands that the PIC register has to be added into the offset. */
2514 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
2517 address = gen_reg_rtx (Pmode);
2522 emit_insn (gen_pic_load_addr_arm (address, orig));
2524 emit_insn (gen_pic_load_addr_thumb (address, orig));
2526 if ((GET_CODE (orig) == LABEL_REF
2527 || (GET_CODE (orig) == SYMBOL_REF &&
2528 SYMBOL_REF_LOCAL_P (orig)))
2530 pic_ref = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, address);
2533 pic_ref = gen_rtx_MEM (Pmode,
2534 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
2536 RTX_UNCHANGING_P (pic_ref) = 1;
2539 insn = emit_move_insn (reg, pic_ref);
2541 current_function_uses_pic_offset_table = 1;
2542 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2544 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig,
2548 else if (GET_CODE (orig) == CONST)
2552 if (GET_CODE (XEXP (orig, 0)) == PLUS
2553 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
2561 reg = gen_reg_rtx (Pmode);
2564 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2566 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
2567 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
2568 base == reg ? 0 : reg);
2573 if (GET_CODE (offset) == CONST_INT)
2575 /* The base register doesn't really matter, we only want to
2576 test the index for the appropriate mode. */
2577 if (!arm_legitimate_index_p (mode, offset, 0))
2579 if (!no_new_pseudos)
2580 offset = force_reg (Pmode, offset);
2585 if (GET_CODE (offset) == CONST_INT)
2586 return plus_constant (base, INTVAL (offset));
2589 if (GET_MODE_SIZE (mode) > 4
2590 && (GET_MODE_CLASS (mode) == MODE_INT
2591 || TARGET_SOFT_FLOAT))
2593 emit_insn (gen_addsi3 (reg, base, offset));
2597 return gen_rtx_PLUS (Pmode, base, offset);
2603 /* Generate code to load the PIC register. PROLOGUE is true if
2604 called from arm_expand_prologue (in which case we want the
2605 generated insns at the start of the function); false if called
2606 by an exception receiver that needs the PIC register reloaded
2607 (in which case the insns are just dumped at the current location). */
2609 arm_finalize_pic (int prologue ATTRIBUTE_UNUSED)
2611 #ifndef AOF_ASSEMBLER
2612 rtx l1, pic_tmp, pic_tmp2, seq, pic_rtx;
2613 rtx global_offset_table;
2615 if (current_function_uses_pic_offset_table == 0 || TARGET_SINGLE_PIC_BASE)
2622 l1 = gen_label_rtx ();
2624 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2625 /* On the ARM the PC register contains 'dot + 8' at the time of the
2626 addition, on the Thumb it is 'dot + 4'. */
2627 pic_tmp = plus_constant (gen_rtx_LABEL_REF (Pmode, l1), TARGET_ARM ? 8 : 4);
2629 pic_tmp2 = gen_rtx_CONST (VOIDmode,
2630 gen_rtx_PLUS (Pmode, global_offset_table, pc_rtx));
2632 pic_tmp2 = gen_rtx_CONST (VOIDmode, global_offset_table);
2634 pic_rtx = gen_rtx_CONST (Pmode, gen_rtx_MINUS (Pmode, pic_tmp2, pic_tmp));
2638 emit_insn (gen_pic_load_addr_arm (pic_offset_table_rtx, pic_rtx));
2639 emit_insn (gen_pic_add_dot_plus_eight (pic_offset_table_rtx, l1));
2643 emit_insn (gen_pic_load_addr_thumb (pic_offset_table_rtx, pic_rtx));
2644 emit_insn (gen_pic_add_dot_plus_four (pic_offset_table_rtx, l1));
2650 emit_insn_after (seq, get_insns ());
2654 /* Need to emit this whether or not we obey regdecls,
2655 since setjmp/longjmp can cause life info to screw up. */
2656 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
2657 #endif /* AOF_ASSEMBLER */
2660 /* Return nonzero if X is valid as an ARM state addressing register. */
2662 arm_address_register_rtx_p (rtx x, int strict_p)
2666 if (GET_CODE (x) != REG)
2672 return ARM_REGNO_OK_FOR_BASE_P (regno);
2674 return (regno <= LAST_ARM_REGNUM
2675 || regno >= FIRST_PSEUDO_REGISTER
2676 || regno == FRAME_POINTER_REGNUM
2677 || regno == ARG_POINTER_REGNUM);
2680 /* Return nonzero if X is a valid ARM state address operand. */
2682 arm_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2684 if (arm_address_register_rtx_p (x, strict_p))
2687 else if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_DEC)
2688 return arm_address_register_rtx_p (XEXP (x, 0), strict_p);
2690 else if ((GET_CODE (x) == POST_MODIFY || GET_CODE (x) == PRE_MODIFY)
2691 && GET_MODE_SIZE (mode) <= 4
2692 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2693 && GET_CODE (XEXP (x, 1)) == PLUS
2694 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2695 return arm_legitimate_index_p (mode, XEXP (XEXP (x, 1), 1), strict_p);
2697 /* After reload constants split into minipools will have addresses
2698 from a LABEL_REF. */
2699 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2700 && (GET_CODE (x) == LABEL_REF
2701 || (GET_CODE (x) == CONST
2702 && GET_CODE (XEXP (x, 0)) == PLUS
2703 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2704 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2707 else if (mode == TImode)
2710 else if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
2712 if (GET_CODE (x) == PLUS
2713 && arm_address_register_rtx_p (XEXP (x, 0), strict_p)
2714 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2716 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2718 if (val == 4 || val == -4 || val == -8)
2723 else if (GET_CODE (x) == PLUS)
2725 rtx xop0 = XEXP (x, 0);
2726 rtx xop1 = XEXP (x, 1);
2728 return ((arm_address_register_rtx_p (xop0, strict_p)
2729 && arm_legitimate_index_p (mode, xop1, strict_p))
2730 || (arm_address_register_rtx_p (xop1, strict_p)
2731 && arm_legitimate_index_p (mode, xop0, strict_p)));
2735 /* Reload currently can't handle MINUS, so disable this for now */
2736 else if (GET_CODE (x) == MINUS)
2738 rtx xop0 = XEXP (x, 0);
2739 rtx xop1 = XEXP (x, 1);
2741 return (arm_address_register_rtx_p (xop0, strict_p)
2742 && arm_legitimate_index_p (mode, xop1, strict_p));
2746 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2747 && GET_CODE (x) == SYMBOL_REF
2748 && CONSTANT_POOL_ADDRESS_P (x)
2750 && symbol_mentioned_p (get_pool_constant (x))))
2753 else if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_DEC)
2754 && (GET_MODE_SIZE (mode) <= 4)
2755 && arm_address_register_rtx_p (XEXP (x, 0), strict_p))
2761 /* Return nonzero if INDEX is valid for an address index operand in
2764 arm_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
2766 HOST_WIDE_INT range;
2767 enum rtx_code code = GET_CODE (index);
2769 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
2770 return (code == CONST_INT && INTVAL (index) < 1024
2771 && INTVAL (index) > -1024
2772 && (INTVAL (index) & 3) == 0);
2775 && (GET_MODE_CLASS (mode) == MODE_FLOAT || mode == DImode))
2776 return (code == CONST_INT
2777 && INTVAL (index) < 255
2778 && INTVAL (index) > -255);
2780 if (arm_address_register_rtx_p (index, strict_p)
2781 && GET_MODE_SIZE (mode) <= 4)
2784 if (TARGET_REALLY_IWMMXT && VALID_IWMMXT_REG_MODE (mode))
2785 return (code == CONST_INT
2786 && INTVAL (index) < 256
2787 && INTVAL (index) > -256);
2789 /* XXX What about ldrsb? */
2790 if (GET_MODE_SIZE (mode) <= 4 && code == MULT
2791 && (!arm_arch4 || (mode) != HImode))
2793 rtx xiop0 = XEXP (index, 0);
2794 rtx xiop1 = XEXP (index, 1);
2796 return ((arm_address_register_rtx_p (xiop0, strict_p)
2797 && power_of_two_operand (xiop1, SImode))
2798 || (arm_address_register_rtx_p (xiop1, strict_p)
2799 && power_of_two_operand (xiop0, SImode)));
2802 if (GET_MODE_SIZE (mode) <= 4
2803 && (code == LSHIFTRT || code == ASHIFTRT
2804 || code == ASHIFT || code == ROTATERT)
2805 && (!arm_arch4 || (mode) != HImode))
2807 rtx op = XEXP (index, 1);
2809 return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
2810 && GET_CODE (op) == CONST_INT
2812 && INTVAL (op) <= 31);
2815 /* XXX For ARM v4 we may be doing a sign-extend operation during the
2816 load, but that has a restricted addressing range and we are unable
2817 to tell here whether that is the case. To be safe we restrict all
2818 loads to that range. */
2819 range = ((mode) == HImode || (mode) == QImode)
2820 ? (arm_arch4 ? 256 : 4095) : 4096;
2822 return (code == CONST_INT
2823 && INTVAL (index) < range
2824 && INTVAL (index) > -range);
2827 /* Return nonzero if X is valid as an ARM state addressing register. */
2829 thumb_base_register_rtx_p (rtx x, enum machine_mode mode, int strict_p)
2833 if (GET_CODE (x) != REG)
2839 return THUMB_REGNO_MODE_OK_FOR_BASE_P (regno, mode);
2841 return (regno <= LAST_LO_REGNUM
2842 || regno >= FIRST_PSEUDO_REGISTER
2843 || regno == FRAME_POINTER_REGNUM
2844 || (GET_MODE_SIZE (mode) >= 4
2845 && (regno == STACK_POINTER_REGNUM
2846 || x == hard_frame_pointer_rtx
2847 || x == arg_pointer_rtx)));
2850 /* Return nonzero if x is a legitimate index register. This is the case
2851 for any base register that can access a QImode object. */
2853 thumb_index_register_rtx_p (rtx x, int strict_p)
2855 return thumb_base_register_rtx_p (x, QImode, strict_p);
2858 /* Return nonzero if x is a legitimate Thumb-state address.
2860 The AP may be eliminated to either the SP or the FP, so we use the
2861 least common denominator, e.g. SImode, and offsets from 0 to 64.
2863 ??? Verify whether the above is the right approach.
2865 ??? Also, the FP may be eliminated to the SP, so perhaps that
2866 needs special handling also.
2868 ??? Look at how the mips16 port solves this problem. It probably uses
2869 better ways to solve some of these problems.
2871 Although it is not incorrect, we don't accept QImode and HImode
2872 addresses based on the frame pointer or arg pointer until the
2873 reload pass starts. This is so that eliminating such addresses
2874 into stack based ones won't produce impossible code. */
2876 thumb_legitimate_address_p (enum machine_mode mode, rtx x, int strict_p)
2878 /* ??? Not clear if this is right. Experiment. */
2879 if (GET_MODE_SIZE (mode) < 4
2880 && !(reload_in_progress || reload_completed)
2881 && (reg_mentioned_p (frame_pointer_rtx, x)
2882 || reg_mentioned_p (arg_pointer_rtx, x)
2883 || reg_mentioned_p (virtual_incoming_args_rtx, x)
2884 || reg_mentioned_p (virtual_outgoing_args_rtx, x)
2885 || reg_mentioned_p (virtual_stack_dynamic_rtx, x)
2886 || reg_mentioned_p (virtual_stack_vars_rtx, x)))
2889 /* Accept any base register. SP only in SImode or larger. */
2890 else if (thumb_base_register_rtx_p (x, mode, strict_p))
2893 /* This is PC relative data before arm_reorg runs. */
2894 else if (GET_MODE_SIZE (mode) >= 4 && CONSTANT_P (x)
2895 && GET_CODE (x) == SYMBOL_REF
2896 && CONSTANT_POOL_ADDRESS_P (x) && ! flag_pic)
2899 /* This is PC relative data after arm_reorg runs. */
2900 else if (GET_MODE_SIZE (mode) >= 4 && reload_completed
2901 && (GET_CODE (x) == LABEL_REF
2902 || (GET_CODE (x) == CONST
2903 && GET_CODE (XEXP (x, 0)) == PLUS
2904 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
2905 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
2908 /* Post-inc indexing only supported for SImode and larger. */
2909 else if (GET_CODE (x) == POST_INC && GET_MODE_SIZE (mode) >= 4
2910 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p))
2913 else if (GET_CODE (x) == PLUS)
2915 /* REG+REG address can be any two index registers. */
2916 /* We disallow FRAME+REG addressing since we know that FRAME
2917 will be replaced with STACK, and SP relative addressing only
2918 permits SP+OFFSET. */
2919 if (GET_MODE_SIZE (mode) <= 4
2920 && XEXP (x, 0) != frame_pointer_rtx
2921 && XEXP (x, 1) != frame_pointer_rtx
2922 && XEXP (x, 0) != virtual_stack_vars_rtx
2923 && XEXP (x, 1) != virtual_stack_vars_rtx
2924 && thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2925 && thumb_index_register_rtx_p (XEXP (x, 1), strict_p))
2928 /* REG+const has 5-7 bit offset for non-SP registers. */
2929 else if ((thumb_index_register_rtx_p (XEXP (x, 0), strict_p)
2930 || XEXP (x, 0) == arg_pointer_rtx)
2931 && GET_CODE (XEXP (x, 1)) == CONST_INT
2932 && thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
2935 /* REG+const has 10 bit offset for SP, but only SImode and
2936 larger is supported. */
2937 /* ??? Should probably check for DI/DFmode overflow here
2938 just like GO_IF_LEGITIMATE_OFFSET does. */
2939 else if (GET_CODE (XEXP (x, 0)) == REG
2940 && REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
2941 && GET_MODE_SIZE (mode) >= 4
2942 && GET_CODE (XEXP (x, 1)) == CONST_INT
2943 && INTVAL (XEXP (x, 1)) >= 0
2944 && INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
2945 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2948 else if (GET_CODE (XEXP (x, 0)) == REG
2949 && REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
2950 && GET_MODE_SIZE (mode) >= 4
2951 && GET_CODE (XEXP (x, 1)) == CONST_INT
2952 && (INTVAL (XEXP (x, 1)) & 3) == 0)
2956 else if (GET_MODE_CLASS (mode) != MODE_FLOAT
2957 && GET_CODE (x) == SYMBOL_REF
2958 && CONSTANT_POOL_ADDRESS_P (x)
2960 && symbol_mentioned_p (get_pool_constant (x))))
2966 /* Return nonzero if VAL can be used as an offset in a Thumb-state address
2967 instruction of mode MODE. */
2969 thumb_legitimate_offset_p (enum machine_mode mode, HOST_WIDE_INT val)
2971 switch (GET_MODE_SIZE (mode))
2974 return val >= 0 && val < 32;
2977 return val >= 0 && val < 64 && (val & 1) == 0;
2981 && (val + GET_MODE_SIZE (mode)) <= 128
2986 /* Try machine-dependent ways of modifying an illegitimate address
2987 to be legitimate. If we find one, return the new, valid address. */
2989 arm_legitimize_address (rtx x, rtx orig_x, enum machine_mode mode)
2991 if (GET_CODE (x) == PLUS)
2993 rtx xop0 = XEXP (x, 0);
2994 rtx xop1 = XEXP (x, 1);
2996 if (CONSTANT_P (xop0) && !symbol_mentioned_p (xop0))
2997 xop0 = force_reg (SImode, xop0);
2999 if (CONSTANT_P (xop1) && !symbol_mentioned_p (xop1))
3000 xop1 = force_reg (SImode, xop1);
3002 if (ARM_BASE_REGISTER_RTX_P (xop0)
3003 && GET_CODE (xop1) == CONST_INT)
3005 HOST_WIDE_INT n, low_n;
3009 if (mode == DImode || (TARGET_SOFT_FLOAT && mode == DFmode))
3021 low_n = ((mode) == TImode ? 0
3022 : n >= 0 ? (n & 0xfff) : -((-n) & 0xfff));
3026 base_reg = gen_reg_rtx (SImode);
3027 val = force_operand (gen_rtx_PLUS (SImode, xop0,
3028 GEN_INT (n)), NULL_RTX);
3029 emit_move_insn (base_reg, val);
3030 x = (low_n == 0 ? base_reg
3031 : gen_rtx_PLUS (SImode, base_reg, GEN_INT (low_n)));
3033 else if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3034 x = gen_rtx_PLUS (SImode, xop0, xop1);
3037 /* XXX We don't allow MINUS any more -- see comment in
3038 arm_legitimate_address_p (). */
3039 else if (GET_CODE (x) == MINUS)
3041 rtx xop0 = XEXP (x, 0);
3042 rtx xop1 = XEXP (x, 1);
3044 if (CONSTANT_P (xop0))
3045 xop0 = force_reg (SImode, xop0);
3047 if (CONSTANT_P (xop1) && ! symbol_mentioned_p (xop1))
3048 xop1 = force_reg (SImode, xop1);
3050 if (xop0 != XEXP (x, 0) || xop1 != XEXP (x, 1))
3051 x = gen_rtx_MINUS (SImode, xop0, xop1);
3056 /* We need to find and carefully transform any SYMBOL and LABEL
3057 references; so go back to the original address expression. */
3058 rtx new_x = legitimize_pic_address (orig_x, mode, NULL_RTX);
3060 if (new_x != orig_x)
3069 #define REG_OR_SUBREG_REG(X) \
3070 (GET_CODE (X) == REG \
3071 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
3073 #define REG_OR_SUBREG_RTX(X) \
3074 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
3076 #ifndef COSTS_N_INSNS
3077 #define COSTS_N_INSNS(N) ((N) * 4 - 2)
3079 /* Worker routine for arm_rtx_costs. */
3081 arm_rtx_costs_1 (rtx x, enum rtx_code code, enum rtx_code outer)
3083 enum machine_mode mode = GET_MODE (x);
3084 enum rtx_code subcode;
3100 return COSTS_N_INSNS (1);
3103 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3106 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
3113 return COSTS_N_INSNS (2) + cycles;
3115 return COSTS_N_INSNS (1) + 16;
3118 return (COSTS_N_INSNS (1)
3119 + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
3120 + GET_CODE (SET_DEST (x)) == MEM));
3125 if ((unsigned HOST_WIDE_INT) INTVAL (x) < 256)
3127 if (thumb_shiftable_const (INTVAL (x)))
3128 return COSTS_N_INSNS (2);
3129 return COSTS_N_INSNS (3);
3131 else if ((outer == PLUS || outer == COMPARE)
3132 && INTVAL (x) < 256 && INTVAL (x) > -256)
3134 else if (outer == AND
3135 && INTVAL (x) < 256 && INTVAL (x) >= -256)
3136 return COSTS_N_INSNS (1);
3137 else if (outer == ASHIFT || outer == ASHIFTRT
3138 || outer == LSHIFTRT)
3140 return COSTS_N_INSNS (2);
3146 return COSTS_N_INSNS (3);
3165 /* XXX another guess. */
3166 /* Memory costs quite a lot for the first word, but subsequent words
3167 load at the equivalent of a single insn each. */
3168 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3169 + ((GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3174 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
3179 /* XXX still guessing. */
3180 switch (GET_MODE (XEXP (x, 0)))
3183 return (1 + (mode == DImode ? 4 : 0)
3184 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3187 return (4 + (mode == DImode ? 4 : 0)
3188 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3191 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
3205 /* Memory costs quite a lot for the first word, but subsequent words
3206 load at the equivalent of a single insn each. */
3207 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
3208 + (GET_CODE (x) == SYMBOL_REF
3209 && CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
3215 return optimize_size ? COSTS_N_INSNS (2) : 100;
3218 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
3225 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
3227 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
3228 + ((GET_CODE (XEXP (x, 0)) == REG
3229 || (GET_CODE (XEXP (x, 0)) == SUBREG
3230 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3232 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
3233 || (GET_CODE (XEXP (x, 0)) == SUBREG
3234 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
3236 + ((GET_CODE (XEXP (x, 1)) == REG
3237 || (GET_CODE (XEXP (x, 1)) == SUBREG
3238 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
3239 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
3244 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
3245 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3246 || (GET_CODE (XEXP (x, 0)) == CONST_INT
3247 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
3250 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3251 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3252 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3253 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3255 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
3256 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
3257 && const_double_rtx_ok_for_fpa (XEXP (x, 0))))
3260 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
3261 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
3262 && REG_OR_SUBREG_REG (XEXP (x, 1))))
3263 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
3264 || subcode == ASHIFTRT || subcode == LSHIFTRT
3265 || subcode == ROTATE || subcode == ROTATERT
3267 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3268 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
3269 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
3270 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
3271 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
3272 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
3273 && REG_OR_SUBREG_REG (XEXP (x, 0))))
3278 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3279 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3280 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3281 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
3282 && const_double_rtx_ok_for_fpa (XEXP (x, 1))))
3286 case AND: case XOR: case IOR:
3289 /* Normally the frame registers will be spilt into reg+const during
3290 reload, so it is a bad idea to combine them with other instructions,
3291 since then they might not be moved outside of loops. As a compromise
3292 we allow integration with ops that have a constant as their second
3294 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
3295 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
3296 && GET_CODE (XEXP (x, 1)) != CONST_INT)
3297 || (REG_OR_SUBREG_REG (XEXP (x, 0))
3298 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
3302 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
3303 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3304 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3305 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3308 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
3309 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
3310 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
3311 || (GET_CODE (XEXP (x, 1)) == CONST_INT
3312 && const_ok_for_op (INTVAL (XEXP (x, 1)), code)))
3315 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
3316 return (1 + extra_cost
3317 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
3318 || subcode == LSHIFTRT || subcode == ASHIFTRT
3319 || subcode == ROTATE || subcode == ROTATERT
3321 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3322 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
3323 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)))
3324 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
3325 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
3326 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
3332 /* There is no point basing this on the tuning, since it is always the
3333 fast variant if it exists at all. */
3334 if (arm_fast_multiply && mode == DImode
3335 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
3336 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
3337 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))