1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-flags.h"
36 #include "insn-attr.h"
42 /* The maximum number of insns skipped which will be conditionalised if
44 #define MAX_INSNS_SKIPPED 5
46 /* Some function declarations. */
47 extern FILE *asm_out_file;
49 static HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
50 static char *output_multi_immediate PROTO ((rtx *, char *, char *, int,
52 static int arm_gen_constant PROTO ((enum rtx_code, enum machine_mode,
53 HOST_WIDE_INT, rtx, rtx, int, int));
54 static int arm_naked_function_p PROTO ((tree));
55 static void init_fpa_table PROTO ((void));
56 static enum machine_mode select_dominance_cc_mode PROTO ((enum rtx_code, rtx,
58 static HOST_WIDE_INT add_constant PROTO ((rtx, enum machine_mode));
59 static void dump_table PROTO ((rtx));
60 static int fixit PROTO ((rtx, enum machine_mode, int));
61 static rtx find_barrier PROTO ((rtx, int));
62 static int broken_move PROTO ((rtx));
63 static char *fp_const_from_val PROTO ((REAL_VALUE_TYPE *));
64 static int eliminate_lr2ip PROTO ((rtx *));
65 static char *shift_op PROTO ((rtx, HOST_WIDE_INT *));
66 static int pattern_really_clobbers_lr PROTO ((rtx));
67 static int function_really_clobbers_lr PROTO ((rtx));
68 static void emit_multi_reg_push PROTO ((int));
69 static enum arm_cond_code get_arm_condition_code PROTO ((rtx));
71 /* Define the information needed to generate branch insns. This is
72 stored from the compare operation. */
74 rtx arm_compare_op0, arm_compare_op1;
77 /* What type of cpu are we compiling for? */
78 enum processor_type arm_cpu;
80 /* What type of floating point are we compiling for? */
81 enum floating_point_type arm_fpu;
83 /* What program mode is the cpu running in? 26-bit mode or 32-bit mode */
84 enum prog_mode_type arm_prgmode;
86 char *target_cpu_name = ARM_CPU_NAME;
87 char *target_fpe_name = NULL;
89 /* Nonzero if this is an "M" variant of the processor. */
90 int arm_fast_multiply = 0;
92 /* Nonzero if this chip supports the ARM Architecture 4 extensions */
95 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
96 must report the mode of the memory reference from PRINT_OPERAND to
97 PRINT_OPERAND_ADDRESS. */
98 enum machine_mode output_memory_reference_mode;
100 /* Nonzero if the prologue must setup `fp'. */
101 int current_function_anonymous_args;
103 /* The register number to be used for the PIC offset register. */
104 int arm_pic_register = 9;
106 /* Location counter of .text segment. */
107 int arm_text_location = 0;
109 /* Set to one if we think that lr is only saved because of subroutine calls,
110 but all of these can be `put after' return insns */
111 int lr_save_eliminated;
113 /* Set to 1 when a return insn is output, this means that the epilogue
116 static int return_used_this_function;
118 static int arm_constant_limit = 3;
120 /* For an explanation of these variables, see final_prescan_insn below. */
122 enum arm_cond_code arm_current_cc;
124 int arm_target_label;
126 /* The condition codes of the ARM, and the inverse function. */
127 char *arm_condition_codes[] =
129 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
130 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
133 static enum arm_cond_code get_arm_condition_code ();
136 /* Initialization code */
138 struct arm_cpu_select arm_select[3] =
140 /* switch name, tune arch */
141 { (char *)0, "--with-cpu=", 1, 1 },
142 { (char *)0, "-mcpu=", 1, 1 },
143 { (char *)0, "-mtune=", 1, 0 },
146 #define FL_CO_PROC 0x01 /* Has external co-processor bus */
147 #define FL_FAST_MULT 0x02 /* Fast multiply */
148 #define FL_MODE26 0x04 /* 26-bit mode support */
149 #define FL_MODE32 0x08 /* 32-bit mode support */
150 #define FL_ARCH4 0x10 /* Architecture rel 4 */
151 #define FL_THUMB 0x20 /* Thumb aware */
156 enum processor_type type;
160 /* Not all of these give usefully different compilation alternatives,
161 but there is no simple way of generalizing them. */
162 static struct processors all_procs[] =
164 {"arm2", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
165 {"arm250", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
166 {"arm3", PROCESSOR_ARM2, FL_CO_PROC | FL_MODE26},
167 {"arm6", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
168 {"arm600", PROCESSOR_ARM6, FL_CO_PROC | FL_MODE32 | FL_MODE26},
169 {"arm610", PROCESSOR_ARM6, FL_MODE32 | FL_MODE26},
170 {"arm7", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
171 /* arm7m doesn't exist on its own, only in conjuction with D, (and I), but
172 those don't alter the code, so it is sometimes known as the arm7m */
173 {"arm7m", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
175 {"arm7dm", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
177 {"arm7dmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
179 {"arm700", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
180 {"arm710", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
181 {"arm7100", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
182 {"arm7500", PROCESSOR_ARM7, FL_MODE32 | FL_MODE26},
183 /* Doesn't really have an external co-proc, but does have embedded fpu */
184 {"arm7500fe", PROCESSOR_ARM7, FL_CO_PROC | FL_MODE32 | FL_MODE26},
185 {"arm7tdmi", PROCESSOR_ARM7, (FL_CO_PROC | FL_FAST_MULT | FL_MODE32
186 | FL_ARCH4 | FL_THUMB)},
187 {"arm8", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
189 {"arm810", PROCESSOR_ARM8, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
191 {"strongarm", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
193 {"strongarm110", PROCESSOR_STARM, (FL_FAST_MULT | FL_MODE32 | FL_MODE26
198 /* Fix up any incompatible options that the user has specified.
199 This has now turned into a maze. */
201 arm_override_options ()
203 int arm_thumb_aware = 0;
206 struct arm_cpu_select *ptr;
207 static struct cpu_default {
211 { TARGET_CPU_arm2, "arm2" },
212 { TARGET_CPU_arm6, "arm6" },
213 { TARGET_CPU_arm610, "arm610" },
214 { TARGET_CPU_arm7dm, "arm7dm" },
215 { TARGET_CPU_arm7500fe, "arm7500fe" },
216 { TARGET_CPU_arm7tdmi, "arm7tdmi" },
217 { TARGET_CPU_arm8, "arm8" },
218 { TARGET_CPU_arm810, "arm810" },
219 { TARGET_CPU_strongarm, "strongarm" },
222 struct cpu_default *def;
224 /* Set the default. */
225 for (def = &cpu_defaults[0]; def->name; ++def)
226 if (def->cpu == TARGET_CPU_DEFAULT)
231 arm_select[0].string = def->name;
233 for (i = 0; i < sizeof (arm_select) / sizeof (arm_select[0]); i++)
235 ptr = &arm_select[i];
236 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
238 struct processors *sel;
240 for (sel = all_procs; sel->name != NULL; sel++)
241 if (! strcmp (ptr->string, sel->name))
251 if (sel->name == NULL)
252 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
256 if (write_symbols != NO_DEBUG && flag_omit_frame_pointer)
257 warning ("-g with -fomit-frame-pointer may not give sensible debugging");
259 if (TARGET_POKE_FUNCTION_NAME)
260 target_flags |= ARM_FLAG_APCS_FRAME;
263 warning ("Option '-m6' deprecated. Use: '-mapcs-32' or -mcpu=<proc>");
266 warning ("Option '-m3' deprecated. Use: '-mapcs-26' or -mcpu=<proc>");
268 if (TARGET_APCS_REENT && flag_pic)
269 fatal ("-fpic and -mapcs-reent are incompatible");
271 if (TARGET_APCS_REENT)
272 warning ("APCS reentrant code not supported.");
274 /* If stack checking is disabled, we can use r10 as the PIC register,
275 which keeps r9 available. */
276 if (flag_pic && ! TARGET_APCS_STACK)
277 arm_pic_register = 10;
279 /* Well, I'm about to have a go, but pic is NOT going to be compatible
280 with APCS reentrancy, since that requires too much support in the
281 assembler and linker, and the ARMASM assembler seems to lack some
282 required directives. */
284 warning ("Position independent code not supported. Ignored");
286 if (TARGET_APCS_FLOAT)
287 warning ("Passing floating point arguments in fp regs not yet supported");
289 if (TARGET_APCS_STACK && ! TARGET_APCS)
291 warning ("-mapcs-stack-check incompatible with -mno-apcs-frame");
292 target_flags |= ARM_FLAG_APCS_FRAME;
297 /* Default value for floating point code... if no co-processor
298 bus, then schedule for emulated floating point. Otherwise,
299 assume the user has an FPA, unless overridden with -mfpe-... */
300 if (flags & FL_CO_PROC == 0)
304 arm_fast_multiply = (flags & FL_FAST_MULT) != 0;
305 arm_arch4 = (flags & FL_ARCH4) != 0;
306 arm_thumb_aware = (flags & FL_THUMB) != 0;
310 if (strcmp (target_fpe_name, "2") == 0)
312 else if (strcmp (target_fpe_name, "3") == 0)
315 fatal ("Invalid floating point emulation option: -mfpe-%s",
319 if (TARGET_THUMB_INTERWORK && ! arm_thumb_aware)
321 warning ("This processor variant does not support Thumb interworking");
322 target_flags &= ~ARM_FLAG_THUMB;
325 if (TARGET_FPE && arm_fpu != FP_HARD)
328 /* For arm2/3 there is no need to do any scheduling if there is only
329 a floating point emulator, or we are doing software floating-point. */
330 if ((TARGET_SOFT_FLOAT || arm_fpu != FP_HARD) && arm_cpu == PROCESSOR_ARM2)
331 flag_schedule_insns = flag_schedule_insns_after_reload = 0;
333 arm_prog_mode = TARGET_APCS_32 ? PROG_MODE_PROG32 : PROG_MODE_PROG26;
337 /* Return 1 if it is possible to return using a single instruction */
344 if (!reload_completed ||current_function_pretend_args_size
345 || current_function_anonymous_args
346 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
349 /* Can't be done if any of the FPU regs are pushed, since this also
351 for (regno = 20; regno < 24; regno++)
352 if (regs_ever_live[regno])
355 /* If a function is naked, don't use the "return" insn. */
356 if (arm_naked_function_p (current_function_decl))
362 /* Return TRUE if int I is a valid immediate ARM constant. */
368 unsigned HOST_WIDE_INT mask = ~0xFF;
370 /* Fast return for 0 and powers of 2 */
371 if ((i & (i - 1)) == 0)
376 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
379 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
380 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
381 } while (mask != ~0xFF);
386 /* Return true if I is a valid constant for the operation CODE. */
388 const_ok_for_op (i, code, mode)
391 enum machine_mode mode;
393 if (const_ok_for_arm (i))
399 return const_ok_for_arm (ARM_SIGN_EXTEND (-i));
401 case MINUS: /* Should only occur with (MINUS I reg) => rsb */
407 return const_ok_for_arm (ARM_SIGN_EXTEND (~i));
414 /* Emit a sequence of insns to handle a large constant.
415 CODE is the code of the operation required, it can be any of SET, PLUS,
416 IOR, AND, XOR, MINUS;
417 MODE is the mode in which the operation is being performed;
418 VAL is the integer to operate on;
419 SOURCE is the other operand (a register, or a null-pointer for SET);
420 SUBTARGETS means it is safe to create scratch registers if that will
421 either produce a simpler sequence, or we will want to cse the values.
422 Return value is the number of insns emitted. */
425 arm_split_constant (code, mode, val, target, source, subtargets)
427 enum machine_mode mode;
433 if (subtargets || code == SET
434 || (GET_CODE (target) == REG && GET_CODE (source) == REG
435 && REGNO (target) != REGNO (source)))
439 if (arm_gen_constant (code, mode, val, target, source, 1, 0)
440 > arm_constant_limit + (code != SET))
444 /* Currently SET is the only monadic value for CODE, all
445 the rest are diadic. */
446 emit_insn (gen_rtx (SET, VOIDmode, target, GEN_INT (val)));
451 rtx temp = subtargets ? gen_reg_rtx (mode) : target;
453 emit_insn (gen_rtx (SET, VOIDmode, temp, GEN_INT (val)));
454 /* For MINUS, the value is subtracted from, since we never
455 have subtraction of a constant. */
457 emit_insn (gen_rtx (SET, VOIDmode, target,
458 gen_rtx (code, mode, temp, source)));
460 emit_insn (gen_rtx (SET, VOIDmode, target,
461 gen_rtx (code, mode, source, temp)));
467 return arm_gen_constant (code, mode, val, target, source, subtargets, 1);
470 /* As above, but extra parameter GENERATE which, if clear, suppresses
473 arm_gen_constant (code, mode, val, target, source, subtargets, generate)
475 enum machine_mode mode;
485 int can_negate_initial = 0;
488 int num_bits_set = 0;
489 int set_sign_bit_copies = 0;
490 int clear_sign_bit_copies = 0;
491 int clear_zero_bit_copies = 0;
492 int set_zero_bit_copies = 0;
495 unsigned HOST_WIDE_INT temp1, temp2;
496 unsigned HOST_WIDE_INT remainder = val & 0xffffffff;
498 /* find out which operations are safe for a given CODE. Also do a quick
499 check for degenerate cases; these can occur when DImode operations
511 can_negate_initial = 1;
515 if (remainder == 0xffffffff)
518 emit_insn (gen_rtx (SET, VOIDmode, target,
519 GEN_INT (ARM_SIGN_EXTEND (val))));
524 if (reload_completed && rtx_equal_p (target, source))
527 emit_insn (gen_rtx (SET, VOIDmode, target, source));
536 emit_insn (gen_rtx (SET, VOIDmode, target, const0_rtx));
539 if (remainder == 0xffffffff)
541 if (reload_completed && rtx_equal_p (target, source))
544 emit_insn (gen_rtx (SET, VOIDmode, target, source));
553 if (reload_completed && rtx_equal_p (target, source))
556 emit_insn (gen_rtx (SET, VOIDmode, target, source));
559 if (remainder == 0xffffffff)
562 emit_insn (gen_rtx (SET, VOIDmode, target,
563 gen_rtx (NOT, mode, source)));
567 /* We don't know how to handle this yet below. */
571 /* We treat MINUS as (val - source), since (source - val) is always
572 passed as (source + (-val)). */
576 emit_insn (gen_rtx (SET, VOIDmode, target,
577 gen_rtx (NEG, mode, source)));
580 if (const_ok_for_arm (val))
583 emit_insn (gen_rtx (SET, VOIDmode, target,
584 gen_rtx (MINUS, mode, GEN_INT (val), source)));
595 /* If we can do it in one insn get out quickly */
596 if (const_ok_for_arm (val)
597 || (can_negate_initial && const_ok_for_arm (-val))
598 || (can_invert && const_ok_for_arm (~val)))
601 emit_insn (gen_rtx (SET, VOIDmode, target,
602 (source ? gen_rtx (code, mode, source,
609 /* Calculate a few attributes that may be useful for specific
612 for (i = 31; i >= 0; i--)
614 if ((remainder & (1 << i)) == 0)
615 clear_sign_bit_copies++;
620 for (i = 31; i >= 0; i--)
622 if ((remainder & (1 << i)) != 0)
623 set_sign_bit_copies++;
628 for (i = 0; i <= 31; i++)
630 if ((remainder & (1 << i)) == 0)
631 clear_zero_bit_copies++;
636 for (i = 0; i <= 31; i++)
638 if ((remainder & (1 << i)) != 0)
639 set_zero_bit_copies++;
647 /* See if we can do this by sign_extending a constant that is known
648 to be negative. This is a good, way of doing it, since the shift
649 may well merge into a subsequent insn. */
650 if (set_sign_bit_copies > 1)
653 (temp1 = ARM_SIGN_EXTEND (remainder
654 << (set_sign_bit_copies - 1))))
658 new_src = subtargets ? gen_reg_rtx (mode) : target;
659 emit_insn (gen_rtx (SET, VOIDmode, new_src,
661 emit_insn (gen_ashrsi3 (target, new_src,
662 GEN_INT (set_sign_bit_copies - 1)));
666 /* For an inverted constant, we will need to set the low bits,
667 these will be shifted out of harm's way. */
668 temp1 |= (1 << (set_sign_bit_copies - 1)) - 1;
669 if (const_ok_for_arm (~temp1))
673 new_src = subtargets ? gen_reg_rtx (mode) : target;
674 emit_insn (gen_rtx (SET, VOIDmode, new_src,
676 emit_insn (gen_ashrsi3 (target, new_src,
677 GEN_INT (set_sign_bit_copies - 1)));
683 /* See if we can generate this by setting the bottom (or the top)
684 16 bits, and then shifting these into the other half of the
685 word. We only look for the simplest cases, to do more would cost
686 too much. Be careful, however, not to generate this when the
687 alternative would take fewer insns. */
688 if (val & 0xffff0000)
690 temp1 = remainder & 0xffff0000;
691 temp2 = remainder & 0x0000ffff;
693 /* Overlaps outside this range are best done using other methods. */
694 for (i = 9; i < 24; i++)
696 if ((((temp2 | (temp2 << i)) & 0xffffffff) == remainder)
697 && ! const_ok_for_arm (temp2))
699 insns = arm_gen_constant (code, mode, temp2,
700 new_src = (subtargets
703 source, subtargets, generate);
706 emit_insn (gen_rtx (SET, VOIDmode, target,
708 gen_rtx (ASHIFT, mode, source,
715 /* Don't duplicate cases already considered. */
716 for (i = 17; i < 24; i++)
718 if (((temp1 | (temp1 >> i)) == remainder)
719 && ! const_ok_for_arm (temp1))
721 insns = arm_gen_constant (code, mode, temp1,
722 new_src = (subtargets
725 source, subtargets, generate);
728 emit_insn (gen_rtx (SET, VOIDmode, target,
730 gen_rtx (LSHIFTRT, mode,
731 source, GEN_INT (i)),
741 /* If we have IOR or XOR, and the constant can be loaded in a
742 single instruction, and we can find a temporary to put it in,
743 then this can be done in two instructions instead of 3-4. */
745 || (reload_completed && ! reg_mentioned_p (target, source)))
747 if (const_ok_for_arm (ARM_SIGN_EXTEND (~ val)))
751 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
753 emit_insn (gen_rtx (SET, VOIDmode, sub, GEN_INT (val)));
754 emit_insn (gen_rtx (SET, VOIDmode, target,
755 gen_rtx (code, mode, source, sub)));
764 if (set_sign_bit_copies > 8
765 && (val & (-1 << (32 - set_sign_bit_copies))) == val)
769 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
770 rtx shift = GEN_INT (set_sign_bit_copies);
772 emit_insn (gen_rtx (SET, VOIDmode, sub,
774 gen_rtx (ASHIFT, mode, source,
776 emit_insn (gen_rtx (SET, VOIDmode, target,
778 gen_rtx (LSHIFTRT, mode, sub,
784 if (set_zero_bit_copies > 8
785 && (remainder & ((1 << set_zero_bit_copies) - 1)) == remainder)
789 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
790 rtx shift = GEN_INT (set_zero_bit_copies);
792 emit_insn (gen_rtx (SET, VOIDmode, sub,
794 gen_rtx (LSHIFTRT, mode, source,
796 emit_insn (gen_rtx (SET, VOIDmode, target,
798 gen_rtx (ASHIFT, mode, sub,
804 if (const_ok_for_arm (temp1 = ARM_SIGN_EXTEND (~ val)))
808 rtx sub = subtargets ? gen_reg_rtx (mode) : target;
809 emit_insn (gen_rtx (SET, VOIDmode, sub,
810 gen_rtx (NOT, mode, source)));
813 sub = gen_reg_rtx (mode);
814 emit_insn (gen_rtx (SET, VOIDmode, sub,
815 gen_rtx (AND, mode, source,
817 emit_insn (gen_rtx (SET, VOIDmode, target,
818 gen_rtx (NOT, mode, sub)));
825 /* See if two shifts will do 2 or more insn's worth of work. */
826 if (clear_sign_bit_copies >= 16 && clear_sign_bit_copies < 24)
828 HOST_WIDE_INT shift_mask = ((0xffffffff
829 << (32 - clear_sign_bit_copies))
834 if ((remainder | shift_mask) != 0xffffffff)
838 new_source = subtargets ? gen_reg_rtx (mode) : target;
839 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
840 new_source, source, subtargets, 1);
844 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
845 new_source, source, subtargets, 0);
850 shift = GEN_INT (clear_sign_bit_copies);
851 new_source = subtargets ? gen_reg_rtx (mode) : target;
852 emit_insn (gen_ashlsi3 (new_source, source, shift));
853 emit_insn (gen_lshrsi3 (target, new_source, shift));
859 if (clear_zero_bit_copies >= 16 && clear_zero_bit_copies < 24)
861 HOST_WIDE_INT shift_mask = (1 << clear_zero_bit_copies) - 1;
865 if ((remainder | shift_mask) != 0xffffffff)
869 new_source = subtargets ? gen_reg_rtx (mode) : target;
870 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
871 new_source, source, subtargets, 1);
875 insns = arm_gen_constant (AND, mode, remainder | shift_mask,
876 new_source, source, subtargets, 0);
881 shift = GEN_INT (clear_zero_bit_copies);
882 new_source = subtargets ? gen_reg_rtx (mode) : target;
883 emit_insn (gen_lshrsi3 (new_source, source, shift));
884 emit_insn (gen_ashlsi3 (target, new_source, shift));
896 for (i = 0; i < 32; i++)
897 if (remainder & (1 << i))
900 if (code == AND || (can_invert && num_bits_set > 16))
901 remainder = (~remainder) & 0xffffffff;
902 else if (code == PLUS && num_bits_set > 16)
903 remainder = (-remainder) & 0xffffffff;
910 /* Now try and find a way of doing the job in either two or three
912 We start by looking for the largest block of zeros that are aligned on
913 a 2-bit boundary, we then fill up the temps, wrapping around to the
914 top of the word when we drop off the bottom.
915 In the worst case this code should produce no more than four insns. */
918 int best_consecutive_zeros = 0;
920 for (i = 0; i < 32; i += 2)
922 int consecutive_zeros = 0;
924 if (! (remainder & (3 << i)))
926 while ((i < 32) && ! (remainder & (3 << i)))
928 consecutive_zeros += 2;
931 if (consecutive_zeros > best_consecutive_zeros)
933 best_consecutive_zeros = consecutive_zeros;
934 best_start = i - consecutive_zeros;
940 /* Now start emitting the insns, starting with the one with the highest
941 bit set: we do this so that the smallest number will be emitted last;
942 this is more likely to be combinable with addressing insns. */
950 if (remainder & (3 << (i - 2)))
955 temp1 = remainder & ((0x0ff << end)
956 | ((i < end) ? (0xff >> (32 - end)) : 0));
962 emit_insn (gen_rtx (SET, VOIDmode,
963 new_src = (subtargets
966 GEN_INT (can_invert ? ~temp1 : temp1)));
970 else if (code == MINUS)
973 emit_insn (gen_rtx (SET, VOIDmode,
974 new_src = (subtargets
977 gen_rtx (code, mode, GEN_INT (temp1),
984 emit_insn (gen_rtx (SET, VOIDmode,
990 gen_rtx (code, mode, source,
991 GEN_INT (can_invert ? ~temp1
1002 } while (remainder);
1007 /* Canonicalize a comparison so that we are more likely to recognize it.
1008 This can be done for a few constant compares, where we can make the
1009 immediate value easier to load. */
1011 arm_canonicalize_comparison (code, op1)
1015 HOST_WIDE_INT i = INTVAL (*op1);
1025 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1)
1026 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1028 *op1 = GEN_INT (i+1);
1029 return code == GT ? GE : LT;
1035 if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1))
1036 && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1))))
1038 *op1 = GEN_INT (i-1);
1039 return code == GE ? GT : LE;
1046 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1))))
1048 *op1 = GEN_INT (i + 1);
1049 return code == GTU ? GEU : LTU;
1056 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1))))
1058 *op1 = GEN_INT (i - 1);
1059 return code == GEU ? GTU : LEU;
1071 /* Handle aggregates that are not laid out in a BLKmode element.
1072 This is a sub-element of RETURN_IN_MEMORY. */
1074 arm_return_in_memory (type)
1077 if (TREE_CODE (type) == RECORD_TYPE)
1081 /* For a struct, we can return in a register if every element was a
1083 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1084 if (TREE_CODE (field) != FIELD_DECL
1085 || ! DECL_BIT_FIELD_TYPE (field))
1090 else if (TREE_CODE (type) == UNION_TYPE)
1094 /* Unions can be returned in registers if every element is
1095 integral, or can be returned in an integer register. */
1096 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1098 if (TREE_CODE (field) != FIELD_DECL
1099 || (AGGREGATE_TYPE_P (TREE_TYPE (field))
1100 && RETURN_IN_MEMORY (TREE_TYPE (field)))
1101 || FLOAT_TYPE_P (TREE_TYPE (field)))
1106 /* XXX Not sure what should be done for other aggregates, so put them in
1112 legitimate_pic_operand_p (x)
1115 if (CONSTANT_P (x) && flag_pic
1116 && (GET_CODE (x) == SYMBOL_REF
1117 || (GET_CODE (x) == CONST
1118 && GET_CODE (XEXP (x, 0)) == PLUS
1119 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF)))
1126 legitimize_pic_address (orig, mode, reg)
1128 enum machine_mode mode;
1131 if (GET_CODE (orig) == SYMBOL_REF)
1133 rtx pic_ref, address;
1139 if (reload_in_progress || reload_completed)
1142 reg = gen_reg_rtx (Pmode);
1147 #ifdef AOF_ASSEMBLER
1148 /* The AOF assembler can generate relocations for these directly, and
1149 understands that the PIC register has to be added into the offset.
1151 insn = emit_insn (gen_pic_load_addr_based (reg, orig));
1154 address = gen_reg_rtx (Pmode);
1158 emit_insn (gen_pic_load_addr (address, orig));
1160 pic_ref = gen_rtx (MEM, Pmode,
1161 gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address));
1162 RTX_UNCHANGING_P (pic_ref) = 1;
1163 insn = emit_move_insn (reg, pic_ref);
1165 current_function_uses_pic_offset_table = 1;
1166 /* Put a REG_EQUAL note on this insn, so that it can be optimized
1168 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig,
1172 else if (GET_CODE (orig) == CONST)
1176 if (GET_CODE (XEXP (orig, 0)) == PLUS
1177 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
1182 if (reload_in_progress || reload_completed)
1185 reg = gen_reg_rtx (Pmode);
1188 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1190 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
1191 offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
1192 base == reg ? 0 : reg);
1197 if (GET_CODE (offset) == CONST_INT)
1199 /* The base register doesn't really matter, we only want to
1200 test the index for the appropriate mode. */
1201 GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win);
1203 if (! reload_in_progress && ! reload_completed)
1204 offset = force_reg (Pmode, offset);
1209 if (GET_CODE (offset) == CONST_INT)
1210 return plus_constant_for_output (base, INTVAL (offset));
1213 if (GET_MODE_SIZE (mode) > 4
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || TARGET_SOFT_FLOAT))
1217 emit_insn (gen_addsi3 (reg, base, offset));
1221 return gen_rtx (PLUS, Pmode, base, offset);
1223 else if (GET_CODE (orig) == LABEL_REF)
1224 current_function_uses_pic_offset_table = 1;
1243 #ifndef AOF_ASSEMBLER
1244 rtx l1, pic_tmp, pic_tmp2, seq;
1245 rtx global_offset_table;
1247 if (current_function_uses_pic_offset_table == 0)
1254 l1 = gen_label_rtx ();
1256 global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_");
1257 pic_tmp = gen_rtx (CONST, VOIDmode,
1258 gen_rtx (PLUS, Pmode,
1259 gen_rtx (LABEL_REF, VOIDmode, l1),
1261 pic_tmp2 = gen_rtx (CONST, VOIDmode,
1262 gen_rtx (PLUS, Pmode,
1263 global_offset_table,
1266 pic_rtx = gen_rtx (CONST, Pmode,
1267 gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp));
1269 emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx));
1270 emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx));
1273 seq = gen_sequence ();
1275 emit_insn_after (seq, get_insns ());
1277 /* Need to emit this whether or not we obey regdecls,
1278 since setjmp/longjmp can cause life info to screw up. */
1279 emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));
1280 #endif /* AOF_ASSEMBLER */
1283 #define REG_OR_SUBREG_REG(X) \
1284 (GET_CODE (X) == REG \
1285 || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
1287 #define REG_OR_SUBREG_RTX(X) \
1288 (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
1290 #define ARM_FRAME_RTX(X) \
1291 ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \
1292 || (X) == arg_pointer_rtx)
1295 arm_rtx_costs (x, code, outer_code)
1297 enum rtx_code code, outer_code;
1299 enum machine_mode mode = GET_MODE (x);
1300 enum rtx_code subcode;
1306 /* Memory costs quite a lot for the first word, but subsequent words
1307 load at the equivalent of a single insn each. */
1308 return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD)
1309 + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0));
1316 if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
1323 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
1325 return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8)
1326 + ((GET_CODE (XEXP (x, 0)) == REG
1327 || (GET_CODE (XEXP (x, 0)) == SUBREG
1328 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1330 return (1 + ((GET_CODE (XEXP (x, 0)) == REG
1331 || (GET_CODE (XEXP (x, 0)) == SUBREG
1332 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG))
1334 + ((GET_CODE (XEXP (x, 1)) == REG
1335 || (GET_CODE (XEXP (x, 1)) == SUBREG
1336 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG)
1337 || (GET_CODE (XEXP (x, 1)) == CONST_INT))
1342 return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8)
1343 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1344 || (GET_CODE (XEXP (x, 0)) == CONST_INT
1345 && const_ok_for_arm (INTVAL (XEXP (x, 0)))))
1348 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1349 return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1350 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1351 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1353 + ((REG_OR_SUBREG_REG (XEXP (x, 0))
1354 || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE
1355 && const_double_rtx_ok_for_fpu (XEXP (x, 0))))
1358 if (((GET_CODE (XEXP (x, 0)) == CONST_INT
1359 && const_ok_for_arm (INTVAL (XEXP (x, 0)))
1360 && REG_OR_SUBREG_REG (XEXP (x, 1))))
1361 || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT
1362 || subcode == ASHIFTRT || subcode == LSHIFTRT
1363 || subcode == ROTATE || subcode == ROTATERT
1365 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1366 && ((INTVAL (XEXP (XEXP (x, 1), 1)) &
1367 (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0)))
1368 && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0))
1369 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1))
1370 || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)
1371 && REG_OR_SUBREG_REG (XEXP (x, 0))))
1376 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1377 return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1378 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1379 || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE
1380 && const_double_rtx_ok_for_fpu (XEXP (x, 1))))
1384 case AND: case XOR: case IOR:
1387 /* Normally the frame registers will be spilt into reg+const during
1388 reload, so it is a bad idea to combine them with other instructions,
1389 since then they might not be moved outside of loops. As a compromise
1390 we allow integration with ops that have a constant as their second
1392 if ((REG_OR_SUBREG_REG (XEXP (x, 0))
1393 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
1394 && GET_CODE (XEXP (x, 1)) != CONST_INT)
1395 || (REG_OR_SUBREG_REG (XEXP (x, 0))
1396 && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
1400 return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8)
1401 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1402 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1403 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1406 if (REG_OR_SUBREG_REG (XEXP (x, 0)))
1407 return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost)
1408 + ((REG_OR_SUBREG_REG (XEXP (x, 1))
1409 || (GET_CODE (XEXP (x, 1)) == CONST_INT
1410 && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode)))
1413 else if (REG_OR_SUBREG_REG (XEXP (x, 1)))
1414 return (1 + extra_cost
1415 + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT
1416 || subcode == LSHIFTRT || subcode == ASHIFTRT
1417 || subcode == ROTATE || subcode == ROTATERT
1419 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1420 && ((INTVAL (XEXP (XEXP (x, 0), 1)) &
1421 (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0))
1422 && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0)))
1423 && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1)))
1424 || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
1430 if (arm_fast_multiply && mode == DImode
1431 && (GET_CODE (XEXP (x, 0)) == GET_CODE (XEXP (x, 1)))
1432 && (GET_CODE (XEXP (x, 0)) == ZERO_EXTEND
1433 || GET_CODE (XEXP (x, 0)) == SIGN_EXTEND))
1436 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1440 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1442 unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
1443 & (unsigned HOST_WIDE_INT) 0xffffffff);
1444 int add_cost = const_ok_for_arm (i) ? 4 : 8;
1446 int booth_unit_size = (arm_fast_multiply ? 8 : 2);
1448 for (j = 0; i && j < 32; j += booth_unit_size)
1450 i >>= booth_unit_size;
1457 return ((arm_fast_multiply ? 8 : 30)
1458 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4)
1459 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 4));
1462 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1463 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 6);
1467 return 4 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1469 return 1 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 4);
1472 if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
1480 return 4 + (mode == DImode ? 4 : 0);
1483 if (GET_MODE (XEXP (x, 0)) == QImode)
1484 return (4 + (mode == DImode ? 4 : 0)
1485 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1488 switch (GET_MODE (XEXP (x, 0)))
1491 return (1 + (mode == DImode ? 4 : 0)
1492 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1495 return (4 + (mode == DImode ? 4 : 0)
1496 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1499 return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
1509 arm_adjust_cost (insn, link, dep, cost)
1517 if ((i_pat = single_set (insn)) != NULL
1518 && GET_CODE (SET_SRC (i_pat)) == MEM
1519 && (d_pat = single_set (dep)) != NULL
1520 && GET_CODE (SET_DEST (d_pat)) == MEM)
1522 /* This is a load after a store, there is no conflict if the load reads
1523 from a cached area. Assume that loads from the stack, and from the
1524 constant pool are cached, and that others will miss. This is a
1527 /* debug_rtx (insn);
1530 fprintf (stderr, "costs %d\n", cost); */
1532 if (CONSTANT_POOL_ADDRESS_P (XEXP (SET_SRC (i_pat), 0))
1533 || reg_mentioned_p (stack_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1534 || reg_mentioned_p (frame_pointer_rtx, XEXP (SET_SRC (i_pat), 0))
1535 || reg_mentioned_p (hard_frame_pointer_rtx,
1536 XEXP (SET_SRC (i_pat), 0)))
1538 /* fprintf (stderr, "***** Now 1\n"); */
1546 /* This code has been fixed for cross compilation. */
1548 static int fpa_consts_inited = 0;
1550 char *strings_fpa[8] = {
1552 "4", "5", "0.5", "10"
1555 static REAL_VALUE_TYPE values_fpa[8];
1563 for (i = 0; i < 8; i++)
1565 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
1569 fpa_consts_inited = 1;
1572 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1575 const_double_rtx_ok_for_fpu (x)
1581 if (!fpa_consts_inited)
1584 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1585 if (REAL_VALUE_MINUS_ZERO (r))
1588 for (i = 0; i < 8; i++)
1589 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1595 /* Return TRUE if rtx X is a valid immediate FPU constant. */
1598 neg_const_double_rtx_ok_for_fpu (x)
1604 if (!fpa_consts_inited)
1607 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1608 r = REAL_VALUE_NEGATE (r);
1609 if (REAL_VALUE_MINUS_ZERO (r))
1612 for (i = 0; i < 8; i++)
1613 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
1619 /* Predicates for `match_operand' and `match_operator'. */
1621 /* s_register_operand is the same as register_operand, but it doesn't accept
1624 This function exists because at the time it was put in it led to better
1625 code. SUBREG(MEM) always needs a reload in the places where
1626 s_register_operand is used, and this seemed to lead to excessive
1630 s_register_operand (op, mode)
1632 enum machine_mode mode;
1634 if (GET_MODE (op) != mode && mode != VOIDmode)
1637 if (GET_CODE (op) == SUBREG)
1638 op = SUBREG_REG (op);
1640 /* We don't consider registers whose class is NO_REGS
1641 to be a register operand. */
1642 return (GET_CODE (op) == REG
1643 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1644 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1647 /* Only accept reg, subreg(reg), const_int. */
1650 reg_or_int_operand (op, mode)
1652 enum machine_mode mode;
1654 if (GET_CODE (op) == CONST_INT)
1657 if (GET_MODE (op) != mode && mode != VOIDmode)
1660 if (GET_CODE (op) == SUBREG)
1661 op = SUBREG_REG (op);
1663 /* We don't consider registers whose class is NO_REGS
1664 to be a register operand. */
1665 return (GET_CODE (op) == REG
1666 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1667 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1670 /* Return 1 if OP is an item in memory, given that we are in reload. */
1673 reload_memory_operand (op, mode)
1675 enum machine_mode mode;
1677 int regno = true_regnum (op);
1679 return (! CONSTANT_P (op)
1681 || (GET_CODE (op) == REG
1682 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1685 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
1688 arm_rhs_operand (op, mode)
1690 enum machine_mode mode;
1692 return (s_register_operand (op, mode)
1693 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
1696 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
1700 arm_rhsm_operand (op, mode)
1702 enum machine_mode mode;
1704 return (s_register_operand (op, mode)
1705 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
1706 || memory_operand (op, mode));
1709 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
1710 constant that is valid when negated. */
1713 arm_add_operand (op, mode)
1715 enum machine_mode mode;
1717 return (s_register_operand (op, mode)
1718 || (GET_CODE (op) == CONST_INT
1719 && (const_ok_for_arm (INTVAL (op))
1720 || const_ok_for_arm (-INTVAL (op)))));
1724 arm_not_operand (op, mode)
1726 enum machine_mode mode;
1728 return (s_register_operand (op, mode)
1729 || (GET_CODE (op) == CONST_INT
1730 && (const_ok_for_arm (INTVAL (op))
1731 || const_ok_for_arm (~INTVAL (op)))));
1734 /* Return TRUE if the operand is a memory reference which contains an
1735 offsettable address. */
1737 offsettable_memory_operand (op, mode)
1739 enum machine_mode mode;
1741 if (mode == VOIDmode)
1742 mode = GET_MODE (op);
1744 return (mode == GET_MODE (op)
1745 && GET_CODE (op) == MEM
1746 && offsettable_address_p (reload_completed | reload_in_progress,
1747 mode, XEXP (op, 0)));
1750 /* Return TRUE if the operand is a memory reference which is, or can be
1751 made word aligned by adjusting the offset. */
1753 alignable_memory_operand (op, mode)
1755 enum machine_mode mode;
1759 if (mode == VOIDmode)
1760 mode = GET_MODE (op);
1762 if (mode != GET_MODE (op) || GET_CODE (op) != MEM)
1767 return ((GET_CODE (reg = op) == REG
1768 || (GET_CODE (op) == SUBREG
1769 && GET_CODE (reg = SUBREG_REG (op)) == REG)
1770 || (GET_CODE (op) == PLUS
1771 && GET_CODE (XEXP (op, 1)) == CONST_INT
1772 && (GET_CODE (reg = XEXP (op, 0)) == REG
1773 || (GET_CODE (XEXP (op, 0)) == SUBREG
1774 && GET_CODE (reg = SUBREG_REG (XEXP (op, 0))) == REG))))
1775 && REGNO_POINTER_ALIGN (REGNO (reg)) >= 4);
1778 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
1781 fpu_rhs_operand (op, mode)
1783 enum machine_mode mode;
1785 if (s_register_operand (op, mode))
1787 else if (GET_CODE (op) == CONST_DOUBLE)
1788 return (const_double_rtx_ok_for_fpu (op));
1794 fpu_add_operand (op, mode)
1796 enum machine_mode mode;
1798 if (s_register_operand (op, mode))
1800 else if (GET_CODE (op) == CONST_DOUBLE)
1801 return (const_double_rtx_ok_for_fpu (op)
1802 || neg_const_double_rtx_ok_for_fpu (op));
1807 /* Return nonzero if OP is a constant power of two. */
1810 power_of_two_operand (op, mode)
1812 enum machine_mode mode;
1814 if (GET_CODE (op) == CONST_INT)
1816 HOST_WIDE_INT value = INTVAL(op);
1817 return value != 0 && (value & (value - 1)) == 0;
1822 /* Return TRUE for a valid operand of a DImode operation.
1823 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1824 Note that this disallows MEM(REG+REG), but allows
1825 MEM(PRE/POST_INC/DEC(REG)). */
1828 di_operand (op, mode)
1830 enum machine_mode mode;
1832 if (s_register_operand (op, mode))
1835 switch (GET_CODE (op))
1842 return memory_address_p (DImode, XEXP (op, 0));
1849 /* Return TRUE for a valid operand of a DFmode operation when -msoft-float.
1850 Either: REG, CONST_DOUBLE or MEM(DImode_address).
1851 Note that this disallows MEM(REG+REG), but allows
1852 MEM(PRE/POST_INC/DEC(REG)). */
1855 soft_df_operand (op, mode)
1857 enum machine_mode mode;
1859 if (s_register_operand (op, mode))
1862 switch (GET_CODE (op))
1868 return memory_address_p (DFmode, XEXP (op, 0));
1875 /* Return TRUE for valid index operands. */
1878 index_operand (op, mode)
1880 enum machine_mode mode;
1882 return (s_register_operand(op, mode)
1883 || (immediate_operand (op, mode)
1884 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
1887 /* Return TRUE for valid shifts by a constant. This also accepts any
1888 power of two on the (somewhat overly relaxed) assumption that the
1889 shift operator in this case was a mult. */
1892 const_shift_operand (op, mode)
1894 enum machine_mode mode;
1896 return (power_of_two_operand (op, mode)
1897 || (immediate_operand (op, mode)
1898 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
1901 /* Return TRUE for arithmetic operators which can be combined with a multiply
1905 shiftable_operator (x, mode)
1907 enum machine_mode mode;
1909 if (GET_MODE (x) != mode)
1913 enum rtx_code code = GET_CODE (x);
1915 return (code == PLUS || code == MINUS
1916 || code == IOR || code == XOR || code == AND);
1920 /* Return TRUE for shift operators. */
1923 shift_operator (x, mode)
1925 enum machine_mode mode;
1927 if (GET_MODE (x) != mode)
1931 enum rtx_code code = GET_CODE (x);
1934 return power_of_two_operand (XEXP (x, 1));
1936 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT
1937 || code == ROTATERT);
1941 int equality_operator (x, mode)
1943 enum machine_mode mode;
1945 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
1948 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
1951 minmax_operator (x, mode)
1953 enum machine_mode mode;
1955 enum rtx_code code = GET_CODE (x);
1957 if (GET_MODE (x) != mode)
1960 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
1963 /* return TRUE if x is EQ or NE */
1965 /* Return TRUE if this is the condition code register, if we aren't given
1966 a mode, accept any class CCmode register */
1969 cc_register (x, mode)
1971 enum machine_mode mode;
1973 if (mode == VOIDmode)
1975 mode = GET_MODE (x);
1976 if (GET_MODE_CLASS (mode) != MODE_CC)
1980 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
1986 /* Return TRUE if this is the condition code register, if we aren't given
1987 a mode, accept any class CCmode register which indicates a dominance
1991 dominant_cc_register (x, mode)
1993 enum machine_mode mode;
1995 if (mode == VOIDmode)
1997 mode = GET_MODE (x);
1998 if (GET_MODE_CLASS (mode) != MODE_CC)
2002 if (mode != CC_DNEmode && mode != CC_DEQmode
2003 && mode != CC_DLEmode && mode != CC_DLTmode
2004 && mode != CC_DGEmode && mode != CC_DGTmode
2005 && mode != CC_DLEUmode && mode != CC_DLTUmode
2006 && mode != CC_DGEUmode && mode != CC_DGTUmode)
2009 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
2015 /* Return TRUE if X references a SYMBOL_REF. */
2017 symbol_mentioned_p (x)
2023 if (GET_CODE (x) == SYMBOL_REF)
2026 fmt = GET_RTX_FORMAT (GET_CODE (x));
2027 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2033 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2034 if (symbol_mentioned_p (XVECEXP (x, i, j)))
2037 else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
2044 /* Return TRUE if X references a LABEL_REF. */
2046 label_mentioned_p (x)
2052 if (GET_CODE (x) == LABEL_REF)
2055 fmt = GET_RTX_FORMAT (GET_CODE (x));
2056 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2062 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2063 if (label_mentioned_p (XVECEXP (x, i, j)))
2066 else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
2077 enum rtx_code code = GET_CODE (x);
2081 else if (code == SMIN)
2083 else if (code == UMIN)
2085 else if (code == UMAX)
2091 /* Return 1 if memory locations are adjacent */
2094 adjacent_mem_locations (a, b)
2097 int val0 = 0, val1 = 0;
2100 if ((GET_CODE (XEXP (a, 0)) == REG
2101 || (GET_CODE (XEXP (a, 0)) == PLUS
2102 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
2103 && (GET_CODE (XEXP (b, 0)) == REG
2104 || (GET_CODE (XEXP (b, 0)) == PLUS
2105 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
2107 if (GET_CODE (XEXP (a, 0)) == PLUS)
2109 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
2110 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
2113 reg0 = REGNO (XEXP (a, 0));
2114 if (GET_CODE (XEXP (b, 0)) == PLUS)
2116 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
2117 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
2120 reg1 = REGNO (XEXP (b, 0));
2121 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
2126 /* Return 1 if OP is a load multiple operation. It is known to be
2127 parallel and the first section will be tested. */
2130 load_multiple_operation (op, mode)
2132 enum machine_mode mode;
2134 HOST_WIDE_INT count = XVECLEN (op, 0);
2137 HOST_WIDE_INT i = 1, base = 0;
2141 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2144 /* Check to see if this might be a write-back */
2145 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2150 /* Now check it more carefully */
2151 if (GET_CODE (SET_DEST (elt)) != REG
2152 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2153 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2154 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2155 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2156 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2157 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2158 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2159 != REGNO (SET_DEST (elt)))
2165 /* Perform a quick check so we don't blow up below. */
2167 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2168 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
2169 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
2172 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
2173 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
2175 for (; i < count; i++)
2177 rtx elt = XVECEXP (op, 0, i);
2179 if (GET_CODE (elt) != SET
2180 || GET_CODE (SET_DEST (elt)) != REG
2181 || GET_MODE (SET_DEST (elt)) != SImode
2182 || REGNO (SET_DEST (elt)) != dest_regno + i - base
2183 || GET_CODE (SET_SRC (elt)) != MEM
2184 || GET_MODE (SET_SRC (elt)) != SImode
2185 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
2186 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
2187 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
2188 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
2195 /* Return 1 if OP is a store multiple operation. It is known to be
2196 parallel and the first section will be tested. */
2199 store_multiple_operation (op, mode)
2201 enum machine_mode mode;
2203 HOST_WIDE_INT count = XVECLEN (op, 0);
2206 HOST_WIDE_INT i = 1, base = 0;
2210 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
2213 /* Check to see if this might be a write-back */
2214 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
2219 /* Now check it more carefully */
2220 if (GET_CODE (SET_DEST (elt)) != REG
2221 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
2222 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
2223 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
2224 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
2225 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
2226 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
2227 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
2228 != REGNO (SET_DEST (elt)))
2234 /* Perform a quick check so we don't blow up below. */
2236 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
2237 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
2238 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
2241 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
2242 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
2244 for (; i < count; i++)
2246 elt = XVECEXP (op, 0, i);
2248 if (GET_CODE (elt) != SET
2249 || GET_CODE (SET_SRC (elt)) != REG
2250 || GET_MODE (SET_SRC (elt)) != SImode
2251 || REGNO (SET_SRC (elt)) != src_regno + i - base
2252 || GET_CODE (SET_DEST (elt)) != MEM
2253 || GET_MODE (SET_DEST (elt)) != SImode
2254 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2255 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2256 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2257 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
2265 load_multiple_sequence (operands, nops, regs, base, load_offset)
2270 HOST_WIDE_INT *load_offset;
2272 int unsorted_regs[4];
2273 HOST_WIDE_INT unsorted_offsets[4];
2278 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2279 extended if required. */
2280 if (nops < 2 || nops > 4)
2283 /* Loop over the operands and check that the memory references are
2284 suitable (ie immediate offsets from the same base register). At
2285 the same time, extract the target register, and the memory
2287 for (i = 0; i < nops; i++)
2292 if (GET_CODE (operands[nops + i]) != MEM)
2295 /* Don't reorder volatile memory references; it doesn't seem worth
2296 looking for the case where the order is ok anyway. */
2297 if (MEM_VOLATILE_P (operands[nops + i]))
2300 offset = const0_rtx;
2302 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2303 || (GET_CODE (reg) == SUBREG
2304 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2305 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2306 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2308 || (GET_CODE (reg) == SUBREG
2309 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2310 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2315 base_reg = REGNO(reg);
2316 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2317 ? REGNO (operands[i])
2318 : REGNO (SUBREG_REG (operands[i])));
2323 if (base_reg != REGNO (reg))
2324 /* Not addressed from the same base register. */
2327 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2328 ? REGNO (operands[i])
2329 : REGNO (SUBREG_REG (operands[i])));
2330 if (unsorted_regs[i] < unsorted_regs[order[0]])
2334 /* If it isn't an integer register, or if it overwrites the
2335 base register but isn't the last insn in the list, then
2336 we can't do this. */
2337 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14
2338 || (i != nops - 1 && unsorted_regs[i] == base_reg))
2341 unsorted_offsets[i] = INTVAL (offset);
2344 /* Not a suitable memory address. */
2348 /* All the useful information has now been extracted from the
2349 operands into unsorted_regs and unsorted_offsets; additionally,
2350 order[0] has been set to the lowest numbered register in the
2351 list. Sort the registers into order, and check that the memory
2352 offsets are ascending and adjacent. */
2354 for (i = 1; i < nops; i++)
2358 order[i] = order[i - 1];
2359 for (j = 0; j < nops; j++)
2360 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2361 && (order[i] == order[i - 1]
2362 || unsorted_regs[j] < unsorted_regs[order[i]]))
2365 /* Have we found a suitable register? if not, one must be used more
2367 if (order[i] == order[i - 1])
2370 /* Is the memory address adjacent and ascending? */
2371 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2379 for (i = 0; i < nops; i++)
2380 regs[i] = unsorted_regs[order[i]];
2382 *load_offset = unsorted_offsets[order[0]];
2385 if (unsorted_offsets[order[0]] == 0)
2386 return 1; /* ldmia */
2388 if (unsorted_offsets[order[0]] == 4)
2389 return 2; /* ldmib */
2391 if (unsorted_offsets[order[nops - 1]] == 0)
2392 return 3; /* ldmda */
2394 if (unsorted_offsets[order[nops - 1]] == -4)
2395 return 4; /* ldmdb */
2397 /* Can't do it without setting up the offset, only do this if it takes
2398 no more than one insn. */
2399 return (const_ok_for_arm (unsorted_offsets[order[0]])
2400 || const_ok_for_arm (-unsorted_offsets[order[0]])) ? 5 : 0;
2404 emit_ldm_seq (operands, nops)
2410 HOST_WIDE_INT offset;
2414 switch (load_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2417 strcpy (buf, "ldm%?ia\t");
2421 strcpy (buf, "ldm%?ib\t");
2425 strcpy (buf, "ldm%?da\t");
2429 strcpy (buf, "ldm%?db\t");
2434 sprintf (buf, "add%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2435 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2438 sprintf (buf, "sub%%?\t%s%s, %s%s, #%ld", REGISTER_PREFIX,
2439 reg_names[regs[0]], REGISTER_PREFIX, reg_names[base_reg],
2441 output_asm_insn (buf, operands);
2443 strcpy (buf, "ldm%?ia\t");
2450 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2451 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2453 for (i = 1; i < nops; i++)
2454 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2455 reg_names[regs[i]]);
2457 strcat (buf, "}\t%@ phole ldm");
2459 output_asm_insn (buf, operands);
2464 store_multiple_sequence (operands, nops, regs, base, load_offset)
2469 HOST_WIDE_INT *load_offset;
2471 int unsorted_regs[4];
2472 HOST_WIDE_INT unsorted_offsets[4];
2477 /* Can only handle 2, 3, or 4 insns at present, though could be easily
2478 extended if required. */
2479 if (nops < 2 || nops > 4)
2482 /* Loop over the operands and check that the memory references are
2483 suitable (ie immediate offsets from the same base register). At
2484 the same time, extract the target register, and the memory
2486 for (i = 0; i < nops; i++)
2491 if (GET_CODE (operands[nops + i]) != MEM)
2494 /* Don't reorder volatile memory references; it doesn't seem worth
2495 looking for the case where the order is ok anyway. */
2496 if (MEM_VOLATILE_P (operands[nops + i]))
2499 offset = const0_rtx;
2501 if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
2502 || (GET_CODE (reg) == SUBREG
2503 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2504 || (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
2505 && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
2507 || (GET_CODE (reg) == SUBREG
2508 && GET_CODE (reg = SUBREG_REG (reg)) == REG))
2509 && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
2514 base_reg = REGNO(reg);
2515 unsorted_regs[0] = (GET_CODE (operands[i]) == REG
2516 ? REGNO (operands[i])
2517 : REGNO (SUBREG_REG (operands[i])));
2522 if (base_reg != REGNO (reg))
2523 /* Not addressed from the same base register. */
2526 unsorted_regs[i] = (GET_CODE (operands[i]) == REG
2527 ? REGNO (operands[i])
2528 : REGNO (SUBREG_REG (operands[i])));
2529 if (unsorted_regs[i] < unsorted_regs[order[0]])
2533 /* If it isn't an integer register, then we can't do this. */
2534 if (unsorted_regs[i] < 0 || unsorted_regs[i] > 14)
2537 unsorted_offsets[i] = INTVAL (offset);
2540 /* Not a suitable memory address. */
2544 /* All the useful information has now been extracted from the
2545 operands into unsorted_regs and unsorted_offsets; additionally,
2546 order[0] has been set to the lowest numbered register in the
2547 list. Sort the registers into order, and check that the memory
2548 offsets are ascending and adjacent. */
2550 for (i = 1; i < nops; i++)
2554 order[i] = order[i - 1];
2555 for (j = 0; j < nops; j++)
2556 if (unsorted_regs[j] > unsorted_regs[order[i - 1]]
2557 && (order[i] == order[i - 1]
2558 || unsorted_regs[j] < unsorted_regs[order[i]]))
2561 /* Have we found a suitable register? if not, one must be used more
2563 if (order[i] == order[i - 1])
2566 /* Is the memory address adjacent and ascending? */
2567 if (unsorted_offsets[order[i]] != unsorted_offsets[order[i - 1]] + 4)
2575 for (i = 0; i < nops; i++)
2576 regs[i] = unsorted_regs[order[i]];
2578 *load_offset = unsorted_offsets[order[0]];
2581 if (unsorted_offsets[order[0]] == 0)
2582 return 1; /* stmia */
2584 if (unsorted_offsets[order[0]] == 4)
2585 return 2; /* stmib */
2587 if (unsorted_offsets[order[nops - 1]] == 0)
2588 return 3; /* stmda */
2590 if (unsorted_offsets[order[nops - 1]] == -4)
2591 return 4; /* stmdb */
2597 emit_stm_seq (operands, nops)
2603 HOST_WIDE_INT offset;
2607 switch (store_multiple_sequence (operands, nops, regs, &base_reg, &offset))
2610 strcpy (buf, "stm%?ia\t");
2614 strcpy (buf, "stm%?ib\t");
2618 strcpy (buf, "stm%?da\t");
2622 strcpy (buf, "stm%?db\t");
2629 sprintf (buf + strlen (buf), "%s%s, {%s%s", REGISTER_PREFIX,
2630 reg_names[base_reg], REGISTER_PREFIX, reg_names[regs[0]]);
2632 for (i = 1; i < nops; i++)
2633 sprintf (buf + strlen (buf), ", %s%s", REGISTER_PREFIX,
2634 reg_names[regs[i]]);
2636 strcat (buf, "}\t%@ phole stm");
2638 output_asm_insn (buf, operands);
2643 multi_register_push (op, mode)
2645 enum machine_mode mode;
2647 if (GET_CODE (op) != PARALLEL
2648 || (GET_CODE (XVECEXP (op, 0, 0)) != SET)
2649 || (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC)
2650 || (XINT (SET_SRC (XVECEXP (op, 0, 0)), 1) != 2))
2657 /* Routines for use with attributes */
2659 /* Return nonzero if ATTR is a valid attribute for DECL.
2660 ATTRIBUTES are any existing attributes and ARGS are the arguments
2663 Supported attributes:
2665 naked: don't output any prologue or epilogue code, the user is assumed
2666 to do the right thing. */
2669 arm_valid_machine_decl_attribute (decl, attributes, attr, args)
2675 if (args != NULL_TREE)
2678 if (is_attribute_p ("naked", attr))
2679 return TREE_CODE (decl) == FUNCTION_DECL;
2683 /* Return non-zero if FUNC is a naked function. */
2686 arm_naked_function_p (func)
2691 if (TREE_CODE (func) != FUNCTION_DECL)
2694 a = lookup_attribute ("naked", DECL_MACHINE_ATTRIBUTES (func));
2695 return a != NULL_TREE;
2698 /* Routines for use in generating RTL */
2701 arm_gen_load_multiple (base_regno, count, from, up, write_back)
2710 int sign = up ? 1 : -1;
2712 result = gen_rtx (PARALLEL, VOIDmode,
2713 rtvec_alloc (count + (write_back ? 2 : 0)));
2716 XVECEXP (result, 0, 0)
2717 = gen_rtx (SET, GET_MODE (from), from,
2718 plus_constant (from, count * 4 * sign));
2723 for (j = 0; i < count; i++, j++)
2725 XVECEXP (result, 0, i)
2726 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
2727 gen_rtx (MEM, SImode,
2728 plus_constant (from, j * 4 * sign)));
2732 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
2738 arm_gen_store_multiple (base_regno, count, to, up, write_back)
2747 int sign = up ? 1 : -1;
2749 result = gen_rtx (PARALLEL, VOIDmode,
2750 rtvec_alloc (count + (write_back ? 2 : 0)));
2753 XVECEXP (result, 0, 0)
2754 = gen_rtx (SET, GET_MODE (to), to,
2755 plus_constant (to, count * 4 * sign));
2760 for (j = 0; i < count; i++, j++)
2762 XVECEXP (result, 0, i)
2763 = gen_rtx (SET, VOIDmode,
2764 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
2765 gen_rtx (REG, SImode, base_regno + j));
2769 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
2775 arm_gen_movstrqi (operands)
2778 HOST_WIDE_INT in_words_to_go, out_words_to_go, last_bytes;
2781 rtx st_src, st_dst, end_src, end_dst, fin_src, fin_dst;
2782 rtx part_bytes_reg = NULL;
2783 extern int optimize;
2785 if (GET_CODE (operands[2]) != CONST_INT
2786 || GET_CODE (operands[3]) != CONST_INT
2787 || INTVAL (operands[2]) > 64
2788 || INTVAL (operands[3]) & 3)
2791 st_dst = XEXP (operands[0], 0);
2792 st_src = XEXP (operands[1], 0);
2793 fin_dst = dst = copy_to_mode_reg (SImode, st_dst);
2794 fin_src = src = copy_to_mode_reg (SImode, st_src);
2796 in_words_to_go = (INTVAL (operands[2]) + 3) / 4;
2797 out_words_to_go = INTVAL (operands[2]) / 4;
2798 last_bytes = INTVAL (operands[2]) & 3;
2800 if (out_words_to_go != in_words_to_go && ((in_words_to_go - 1) & 3) != 0)
2801 part_bytes_reg = gen_rtx (REG, SImode, (in_words_to_go - 1) & 3);
2803 for (i = 0; in_words_to_go >= 2; i+=4)
2805 if (in_words_to_go > 4)
2806 emit_insn (arm_gen_load_multiple (0, 4, src, TRUE, TRUE));
2808 emit_insn (arm_gen_load_multiple (0, in_words_to_go, src, TRUE,
2811 if (out_words_to_go)
2813 if (out_words_to_go > 4)
2814 emit_insn (arm_gen_store_multiple (0, 4, dst, TRUE, TRUE));
2815 else if (out_words_to_go != 1)
2816 emit_insn (arm_gen_store_multiple (0, out_words_to_go,
2822 emit_move_insn (gen_rtx (MEM, SImode, dst),
2823 gen_rtx (REG, SImode, 0));
2824 if (last_bytes != 0)
2825 emit_insn (gen_addsi3 (dst, dst, GEN_INT (4)));
2829 in_words_to_go -= in_words_to_go < 4 ? in_words_to_go : 4;
2830 out_words_to_go -= out_words_to_go < 4 ? out_words_to_go : 4;
2833 /* OUT_WORDS_TO_GO will be zero here if there are byte stores to do. */
2834 if (out_words_to_go)
2838 emit_move_insn (sreg = gen_reg_rtx (SImode), gen_rtx (MEM, SImode, src));
2839 emit_move_insn (fin_src = gen_reg_rtx (SImode), plus_constant (src, 4));
2840 emit_move_insn (gen_rtx (MEM, SImode, dst), sreg);
2841 emit_move_insn (fin_dst = gen_reg_rtx (SImode), plus_constant (dst, 4));
2844 if (in_words_to_go) /* Sanity check */
2850 if (in_words_to_go < 0)
2853 part_bytes_reg = copy_to_mode_reg (SImode, gen_rtx (MEM, SImode, src));
2856 if (BYTES_BIG_ENDIAN && last_bytes)
2858 rtx tmp = gen_reg_rtx (SImode);
2860 if (part_bytes_reg == NULL)
2863 /* The bytes we want are in the top end of the word */
2864 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg,
2865 GEN_INT (8 * (4 - last_bytes))));
2866 part_bytes_reg = tmp;
2870 emit_move_insn (gen_rtx (MEM, QImode,
2871 plus_constant (dst, last_bytes - 1)),
2872 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2875 tmp = gen_reg_rtx (SImode);
2876 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2877 part_bytes_reg = tmp;
2886 if (part_bytes_reg == NULL)
2889 emit_move_insn (gen_rtx (MEM, QImode, dst),
2890 gen_rtx (SUBREG, QImode, part_bytes_reg, 0));
2893 rtx tmp = gen_reg_rtx (SImode);
2895 emit_insn (gen_addsi3 (dst, dst, const1_rtx));
2896 emit_insn (gen_lshrsi3 (tmp, part_bytes_reg, GEN_INT (8)));
2897 part_bytes_reg = tmp;
2905 /* Generate a memory reference for a half word, such that it will be loaded
2906 into the top 16 bits of the word. We can assume that the address is
2907 known to be alignable and of the form reg, or plus (reg, const). */
2909 gen_rotated_half_load (memref)
2912 HOST_WIDE_INT offset = 0;
2913 rtx base = XEXP (memref, 0);
2915 if (GET_CODE (base) == PLUS)
2917 offset = INTVAL (XEXP (base, 1));
2918 base = XEXP (base, 0);
2921 /* If we aren't allowed to generate unalligned addresses, then fail. */
2922 if (TARGET_SHORT_BY_BYTES
2923 && ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 0)))
2926 base = gen_rtx (MEM, SImode, plus_constant (base, offset & ~2));
2928 if ((BYTES_BIG_ENDIAN ? 1 : 0) ^ ((offset & 2) == 2))
2931 return gen_rtx (ROTATE, SImode, base, GEN_INT (16));
2934 static enum machine_mode
2935 select_dominance_cc_mode (op, x, y, cond_or)
2939 HOST_WIDE_INT cond_or;
2941 enum rtx_code cond1, cond2;
2944 /* Currently we will probably get the wrong result if the individual
2945 comparisons are not simple. This also ensures that it is safe to
2946 reverse a comparions if necessary. */
2947 if ((arm_select_cc_mode (cond1 = GET_CODE (x), XEXP (x, 0), XEXP (x, 1))
2949 || (arm_select_cc_mode (cond2 = GET_CODE (y), XEXP (y, 0), XEXP (y, 1))
2954 cond1 = reverse_condition (cond1);
2956 /* If the comparisons are not equal, and one doesn't dominate the other,
2957 then we can't do this. */
2959 && ! comparison_dominates_p (cond1, cond2)
2960 && (swapped = 1, ! comparison_dominates_p (cond2, cond1)))
2965 enum rtx_code temp = cond1;
2973 if (cond2 == EQ || ! cond_or)
2978 case LE: return CC_DLEmode;
2979 case LEU: return CC_DLEUmode;
2980 case GE: return CC_DGEmode;
2981 case GEU: return CC_DGEUmode;
2987 if (cond2 == LT || ! cond_or)
2996 if (cond2 == GT || ! cond_or)
3005 if (cond2 == LTU || ! cond_or)
3014 if (cond2 == GTU || ! cond_or)
3022 /* The remaining cases only occur when both comparisons are the
3044 arm_select_cc_mode (op, x, y)
3049 /* All floating point compares return CCFP if it is an equality
3050 comparison, and CCFPE otherwise. */
3051 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3052 return (op == EQ || op == NE) ? CCFPmode : CCFPEmode;
3054 /* A compare with a shifted operand. Because of canonicalization, the
3055 comparison will have to be swapped when we emit the assembler. */
3056 if (GET_MODE (y) == SImode && GET_CODE (y) == REG
3057 && (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3058 || GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
3059 || GET_CODE (x) == ROTATERT))
3062 /* This is a special case, that is used by combine to alow a
3063 comarison of a shifted byte load to be split into a zero-extend
3064 followed by a comparison of the shifted integer (only valid for
3065 equalities and unsigned inequalites. */
3066 if (GET_MODE (x) == SImode
3067 && GET_CODE (x) == ASHIFT
3068 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
3069 && GET_CODE (XEXP (x, 0)) == SUBREG
3070 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
3071 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
3072 && (op == EQ || op == NE
3073 || op == GEU || op == GTU || op == LTU || op == LEU)
3074 && GET_CODE (y) == CONST_INT)
3077 /* An operation that sets the condition codes as a side-effect, the
3078 V flag is not set correctly, so we can only use comparisons where
3079 this doesn't matter. (For LT and GE we can use "mi" and "pl"
3081 if (GET_MODE (x) == SImode
3083 && (op == EQ || op == NE || op == LT || op == GE)
3084 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
3085 || GET_CODE (x) == AND || GET_CODE (x) == IOR
3086 || GET_CODE (x) == XOR || GET_CODE (x) == MULT
3087 || GET_CODE (x) == NOT || GET_CODE (x) == NEG
3088 || GET_CODE (x) == LSHIFTRT
3089 || GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
3090 || GET_CODE (x) == ROTATERT || GET_CODE (x) == ZERO_EXTRACT))
3093 /* A construct for a conditional compare, if the false arm contains
3094 0, then both conditions must be true, otherwise either condition
3095 must be true. Not all conditions are possible, so CCmode is
3096 returned if it can't be done. */
3097 if (GET_CODE (x) == IF_THEN_ELSE
3098 && (XEXP (x, 2) == const0_rtx
3099 || XEXP (x, 2) == const1_rtx)
3100 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3101 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<')
3102 return select_dominance_cc_mode (op, XEXP (x, 0), XEXP (x, 1),
3103 INTVAL (XEXP (x, 2)));
3105 if (GET_MODE (x) == QImode && (op == EQ || op == NE))
3108 if (GET_MODE (x) == SImode && (op == LTU || op == GEU)
3109 && GET_CODE (x) == PLUS
3110 && (rtx_equal_p (XEXP (x, 0), y) || rtx_equal_p (XEXP (x, 1), y)))
3116 /* X and Y are two things to compare using CODE. Emit the compare insn and
3117 return the rtx for register 0 in the proper mode. FP means this is a
3118 floating point compare: I don't think that it is needed on the arm. */
3121 gen_compare_reg (code, x, y, fp)
3125 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
3126 rtx cc_reg = gen_rtx (REG, mode, 24);
3128 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
3129 gen_rtx (COMPARE, mode, x, y)));
3135 arm_reload_in_hi (operands)
3138 rtx base = find_replacement (&XEXP (operands[1], 0));
3140 emit_insn (gen_zero_extendqisi2 (operands[2], gen_rtx (MEM, QImode, base)));
3141 emit_insn (gen_zero_extendqisi2 (gen_rtx (SUBREG, SImode, operands[0], 0),
3142 gen_rtx (MEM, QImode,
3143 plus_constant (base, 1))));
3144 if (BYTES_BIG_ENDIAN)
3145 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3147 gen_rtx (IOR, SImode,
3148 gen_rtx (ASHIFT, SImode,
3149 gen_rtx (SUBREG, SImode,
3154 emit_insn (gen_rtx (SET, VOIDmode, gen_rtx (SUBREG, SImode,
3156 gen_rtx (IOR, SImode,
3157 gen_rtx (ASHIFT, SImode,
3160 gen_rtx (SUBREG, SImode, operands[0], 0))));
3164 arm_reload_out_hi (operands)
3167 rtx base = find_replacement (&XEXP (operands[0], 0));
3169 if (BYTES_BIG_ENDIAN)
3171 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3172 gen_rtx (SUBREG, QImode, operands[1], 0)));
3173 emit_insn (gen_lshrsi3 (operands[2],
3174 gen_rtx (SUBREG, SImode, operands[1], 0),
3176 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3177 gen_rtx (SUBREG, QImode, operands[2], 0)));
3181 emit_insn (gen_movqi (gen_rtx (MEM, QImode, base),
3182 gen_rtx (SUBREG, QImode, operands[1], 0)));
3183 emit_insn (gen_lshrsi3 (operands[2],
3184 gen_rtx (SUBREG, SImode, operands[1], 0),
3186 emit_insn (gen_movqi (gen_rtx (MEM, QImode, plus_constant (base, 1)),
3187 gen_rtx (SUBREG, QImode, operands[2], 0)));
3191 /* Routines for manipulation of the constant pool. */
3192 /* This is unashamedly hacked from the version in sh.c, since the problem is
3193 extremely similar. */
3195 /* Arm instructions cannot load a large constant into a register,
3196 constants have to come from a pc relative load. The reference of a pc
3197 relative load instruction must be less than 1k infront of the instruction.
3198 This means that we often have to dump a constant inside a function, and
3199 generate code to branch around it.
3201 It is important to minimize this, since the branches will slow things
3202 down and make things bigger.
3204 Worst case code looks like:
3220 We fix this by performing a scan before scheduling, which notices which
3221 instructions need to have their operands fetched from the constant table
3222 and builds the table.
3227 scan, find an instruction which needs a pcrel move. Look forward, find th
3228 last barrier which is within MAX_COUNT bytes of the requirement.
3229 If there isn't one, make one. Process all the instructions between
3230 the find and the barrier.
3232 In the above example, we can tell that L3 is within 1k of L1, so
3233 the first move can be shrunk from the 2 insn+constant sequence into
3234 just 1 insn, and the constant moved to L3 to make:
3245 Then the second move becomes the target for the shortening process.
3251 rtx value; /* Value in table */
3252 HOST_WIDE_INT next_offset;
3253 enum machine_mode mode; /* Mode of value */
3256 /* The maximum number of constants that can fit into one pool, since
3257 the pc relative range is 0...1020 bytes and constants are at least 4
3260 #define MAX_POOL_SIZE (1020/4)
3261 static pool_node pool_vector[MAX_POOL_SIZE];
3262 static int pool_size;
3263 static rtx pool_vector_label;
3265 /* Add a constant to the pool and return its label. */
3266 static HOST_WIDE_INT
3267 add_constant (x, mode)
3269 enum machine_mode mode;
3273 HOST_WIDE_INT offset;
3275 if (mode == SImode && GET_CODE (x) == MEM && CONSTANT_P (XEXP (x, 0))
3276 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3277 x = get_pool_constant (XEXP (x, 0));
3278 #ifndef AOF_ASSEMBLER
3279 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == 3)
3280 x = XVECEXP (x, 0, 0);
3283 #ifdef AOF_ASSEMBLER
3284 /* PIC Symbol references need to be converted into offsets into the
3286 if (flag_pic && GET_CODE (x) == SYMBOL_REF)
3287 x = aof_pic_entry (x);
3288 #endif /* AOF_ASSEMBLER */
3290 /* First see if we've already got it */
3291 for (i = 0; i < pool_size; i++)
3293 if (GET_CODE (x) == pool_vector[i].value->code
3294 && mode == pool_vector[i].mode)
3296 if (GET_CODE (x) == CODE_LABEL)
3298 if (XINT (x, 3) != XINT (pool_vector[i].value, 3))
3301 if (rtx_equal_p (x, pool_vector[i].value))
3302 return pool_vector[i].next_offset - GET_MODE_SIZE (mode);
3306 /* Need a new one */
3307 pool_vector[pool_size].next_offset = GET_MODE_SIZE (mode);
3310 pool_vector_label = gen_label_rtx ();
3312 pool_vector[pool_size].next_offset
3313 += (offset = pool_vector[pool_size - 1].next_offset);
3315 pool_vector[pool_size].value = x;
3316 pool_vector[pool_size].mode = mode;
3321 /* Output the literal table */
3328 scan = emit_label_after (gen_label_rtx (), scan);
3329 scan = emit_insn_after (gen_align_4 (), scan);
3330 scan = emit_label_after (pool_vector_label, scan);
3332 for (i = 0; i < pool_size; i++)
3334 pool_node *p = pool_vector + i;
3336 switch (GET_MODE_SIZE (p->mode))
3339 scan = emit_insn_after (gen_consttable_4 (p->value), scan);
3343 scan = emit_insn_after (gen_consttable_8 (p->value), scan);
3352 scan = emit_insn_after (gen_consttable_end (), scan);
3353 scan = emit_barrier_after (scan);
3357 /* Non zero if the src operand needs to be fixed up */
3359 fixit (src, mode, destreg)
3361 enum machine_mode mode;
3364 if (CONSTANT_P (src))
3366 if (GET_CODE (src) == CONST_INT)
3367 return (! const_ok_for_arm (INTVAL (src))
3368 && ! const_ok_for_arm (~INTVAL (src)));
3369 if (GET_CODE (src) == CONST_DOUBLE)
3370 return (GET_MODE (src) == VOIDmode
3372 || (! const_double_rtx_ok_for_fpu (src)
3373 && ! neg_const_double_rtx_ok_for_fpu (src)));
3374 return symbol_mentioned_p (src);
3376 #ifndef AOF_ASSEMBLER
3377 else if (GET_CODE (src) == UNSPEC && XINT (src, 1) == 3)
3381 return (mode == SImode && GET_CODE (src) == MEM
3382 && GET_CODE (XEXP (src, 0)) == SYMBOL_REF
3383 && CONSTANT_POOL_ADDRESS_P (XEXP (src, 0)));
3386 /* Find the last barrier less than MAX_COUNT bytes from FROM, or create one. */
3388 find_barrier (from, max_count)
3393 rtx found_barrier = 0;
3395 while (from && count < max_count)
3397 if (GET_CODE (from) == BARRIER)
3398 found_barrier = from;
3400 /* Count the length of this insn */
3401 if (GET_CODE (from) == INSN
3402 && GET_CODE (PATTERN (from)) == SET
3403 && CONSTANT_P (SET_SRC (PATTERN (from)))
3404 && CONSTANT_POOL_ADDRESS_P (SET_SRC (PATTERN (from))))
3406 rtx src = SET_SRC (PATTERN (from));
3410 count += get_attr_length (from);
3412 from = NEXT_INSN (from);
3417 /* We didn't find a barrier in time to
3418 dump our stuff, so we'll make one */
3419 rtx label = gen_label_rtx ();
3422 from = PREV_INSN (from);
3424 from = get_last_insn ();
3426 /* Walk back to be just before any jump */
3427 while (GET_CODE (from) == JUMP_INSN
3428 || GET_CODE (from) == NOTE
3429 || GET_CODE (from) == CODE_LABEL)
3430 from = PREV_INSN (from);
3432 from = emit_jump_insn_after (gen_jump (label), from);
3433 JUMP_LABEL (from) = label;
3434 found_barrier = emit_barrier_after (from);
3435 emit_label_after (label, found_barrier);
3436 return found_barrier;
3439 return found_barrier;
3442 /* Non zero if the insn is a move instruction which needs to be fixed. */
3447 if (!INSN_DELETED_P (insn)
3448 && GET_CODE (insn) == INSN
3449 && GET_CODE (PATTERN (insn)) == SET)
3451 rtx pat = PATTERN (insn);
3452 rtx src = SET_SRC (pat);
3453 rtx dst = SET_DEST (pat);
3455 enum machine_mode mode = GET_MODE (dst);
3459 if (GET_CODE (dst) == REG)
3460 destreg = REGNO (dst);
3461 else if (GET_CODE (dst) == SUBREG && GET_CODE (SUBREG_REG (dst)) == REG)
3462 destreg = REGNO (SUBREG_REG (dst));
3464 return fixit (src, mode, destreg);
3478 /* The ldr instruction can work with up to a 4k offset, and most constants
3479 will be loaded with one of these instructions; however, the adr
3480 instruction and the ldf instructions only work with a 1k offset. This
3481 code needs to be rewritten to use the 4k offset when possible, and to
3482 adjust when a 1k offset is needed. For now we just use a 1k offset
3486 /* Floating point operands can't work further than 1024 bytes from the
3487 PC, so to make things simple we restrict all loads for such functions.
3489 if (TARGET_HARD_FLOAT)
3490 for (regno = 16; regno < 24; regno++)
3491 if (regs_ever_live[regno])
3500 for (insn = first; insn; insn = NEXT_INSN (insn))
3502 if (broken_move (insn))
3504 /* This is a broken move instruction, scan ahead looking for
3505 a barrier to stick the constant table behind */
3507 rtx barrier = find_barrier (insn, count_size);
3509 /* Now find all the moves between the points and modify them */
3510 for (scan = insn; scan != barrier; scan = NEXT_INSN (scan))
3512 if (broken_move (scan))
3514 /* This is a broken move instruction, add it to the pool */
3515 rtx pat = PATTERN (scan);
3516 rtx src = SET_SRC (pat);
3517 rtx dst = SET_DEST (pat);
3518 enum machine_mode mode = GET_MODE (dst);
3519 HOST_WIDE_INT offset;
3525 /* If this is an HImode constant load, convert it into
3526 an SImode constant load. Since the register is always
3527 32 bits this is safe. We have to do this, since the
3528 load pc-relative instruction only does a 32-bit load. */
3532 if (GET_CODE (dst) != REG)
3534 PUT_MODE (dst, SImode);
3537 offset = add_constant (src, mode);
3538 addr = plus_constant (gen_rtx (LABEL_REF, VOIDmode,
3542 /* For wide moves to integer regs we need to split the
3543 address calculation off into a separate insn, so that
3544 the load can then be done with a load-multiple. This is
3545 safe, since we have already noted the length of such
3546 insns to be 8, and we are immediately over-writing the
3547 scratch we have grabbed with the final result. */
3548 if (GET_MODE_SIZE (mode) > 4
3549 && (scratch = REGNO (dst)) < 16)
3551 rtx reg = gen_rtx (REG, SImode, scratch);
3552 newinsn = emit_insn_after (gen_movaddr (reg, addr),
3557 newsrc = gen_rtx (MEM, mode, addr);
3559 /* Build a jump insn wrapper around the move instead
3560 of an ordinary insn, because we want to have room for
3561 the target label rtx in fld[7], which an ordinary
3562 insn doesn't have. */
3563 newinsn = emit_jump_insn_after (gen_rtx (SET, VOIDmode,
3566 JUMP_LABEL (newinsn) = pool_vector_label;
3568 /* But it's still an ordinary insn */
3569 PUT_CODE (newinsn, INSN);
3576 dump_table (barrier);
3583 /* Routines to output assembly language. */
3585 /* If the rtx is the correct value then return the string of the number.
3586 In this way we can ensure that valid double constants are generated even
3587 when cross compiling. */
3589 fp_immediate_constant (x)
3595 if (!fpa_consts_inited)
3598 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
3599 for (i = 0; i < 8; i++)
3600 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
3601 return strings_fpa[i];
3606 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
3608 fp_const_from_val (r)
3613 if (! fpa_consts_inited)
3616 for (i = 0; i < 8; i++)
3617 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
3618 return strings_fpa[i];
3623 /* Output the operands of a LDM/STM instruction to STREAM.
3624 MASK is the ARM register set mask of which only bits 0-15 are important.
3625 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
3626 must follow the register list. */
3629 print_multi_reg (stream, instr, mask, hat)
3635 int not_first = FALSE;
3637 fputc ('\t', stream);
3638 fprintf (stream, instr, REGISTER_PREFIX);
3639 fputs (", {", stream);
3640 for (i = 0; i < 16; i++)
3641 if (mask & (1 << i))
3644 fprintf (stream, ", ");
3645 fprintf (stream, "%s%s", REGISTER_PREFIX, reg_names[i]);
3649 fprintf (stream, "}%s\n", hat ? "^" : "");
3652 /* Output a 'call' insn. */
3655 output_call (operands)
3658 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
3660 if (REGNO (operands[0]) == 14)
3662 operands[0] = gen_rtx (REG, SImode, 12);
3663 output_asm_insn ("mov%?\t%0, %|lr", operands);
3665 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3666 output_asm_insn ("mov%?\t%|pc, %0", operands);
3674 int something_changed = 0;
3676 int code = GET_CODE (x0);
3683 if (REGNO (x0) == 14)
3685 *x = gen_rtx (REG, SImode, 12);
3690 /* Scan through the sub-elements and change any references there */
3691 fmt = GET_RTX_FORMAT (code);
3692 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3694 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
3695 else if (fmt[i] == 'E')
3696 for (j = 0; j < XVECLEN (x0, i); j++)
3697 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
3698 return something_changed;
3702 /* Output a 'call' insn that is a reference in memory. */
3705 output_call_mem (operands)
3708 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
3709 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
3711 if (eliminate_lr2ip (&operands[0]))
3712 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
3714 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
3715 output_asm_insn ("ldr%?\t%|pc, %0", operands);
3720 /* Output a move from arm registers to an fpu registers.
3721 OPERANDS[0] is an fpu register.
3722 OPERANDS[1] is the first registers of an arm register pair. */
3725 output_mov_long_double_fpu_from_arm (operands)
3728 int arm_reg0 = REGNO (operands[1]);
3734 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3735 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3736 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3738 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
3739 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
3743 /* Output a move from an fpu register to arm registers.
3744 OPERANDS[0] is the first registers of an arm register pair.
3745 OPERANDS[1] is an fpu register. */
3748 output_mov_long_double_arm_from_fpu (operands)
3751 int arm_reg0 = REGNO (operands[0]);
3757 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3758 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3759 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
3761 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
3762 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
3766 /* Output a move from arm registers to arm registers of a long double
3767 OPERANDS[0] is the destination.
3768 OPERANDS[1] is the source. */
3770 output_mov_long_double_arm_from_arm (operands)
3773 /* We have to be careful here because the two might overlap */
3774 int dest_start = REGNO (operands[0]);
3775 int src_start = REGNO (operands[1]);
3779 if (dest_start < src_start)
3781 for (i = 0; i < 3; i++)
3783 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3784 ops[1] = gen_rtx (REG, SImode, src_start + i);
3785 output_asm_insn ("mov%?\t%0, %1", ops);
3790 for (i = 2; i >= 0; i--)
3792 ops[0] = gen_rtx (REG, SImode, dest_start + i);
3793 ops[1] = gen_rtx (REG, SImode, src_start + i);
3794 output_asm_insn ("mov%?\t%0, %1", ops);
3802 /* Output a move from arm registers to an fpu registers.
3803 OPERANDS[0] is an fpu register.
3804 OPERANDS[1] is the first registers of an arm register pair. */
3807 output_mov_double_fpu_from_arm (operands)
3810 int arm_reg0 = REGNO (operands[1]);
3815 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3816 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3817 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
3818 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
3822 /* Output a move from an fpu register to arm registers.
3823 OPERANDS[0] is the first registers of an arm register pair.
3824 OPERANDS[1] is an fpu register. */
3827 output_mov_double_arm_from_fpu (operands)
3830 int arm_reg0 = REGNO (operands[0]);
3836 ops[0] = gen_rtx (REG, SImode, arm_reg0);
3837 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
3838 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
3839 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
3843 /* Output a move between double words.
3844 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
3845 or MEM<-REG and all MEMs must be offsettable addresses. */
3848 output_move_double (operands)
3851 enum rtx_code code0 = GET_CODE (operands[0]);
3852 enum rtx_code code1 = GET_CODE (operands[1]);
3857 int reg0 = REGNO (operands[0]);
3859 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
3862 int reg1 = REGNO (operands[1]);
3866 /* Ensure the second source is not overwritten */
3867 if (reg1 == reg0 + (WORDS_BIG_ENDIAN ? -1 : 1))
3868 output_asm_insn("mov%?\t%Q0, %Q1\n\tmov%?\t%R0, %R1", operands);
3870 output_asm_insn("mov%?\t%R0, %R1\n\tmov%?\t%Q0, %Q1", operands);
3872 else if (code1 == CONST_DOUBLE)
3874 if (GET_MODE (operands[1]) == DFmode)
3877 union real_extract u;
3879 bcopy ((char *) &CONST_DOUBLE_LOW (operands[1]), (char *) &u,
3881 REAL_VALUE_TO_TARGET_DOUBLE (u.d, l);
3882 otherops[1] = GEN_INT(l[1]);
3883 operands[1] = GEN_INT(l[0]);
3885 else if (GET_MODE (operands[1]) != VOIDmode)
3887 else if (WORDS_BIG_ENDIAN)
3890 otherops[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3891 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3896 otherops[1] = GEN_INT (CONST_DOUBLE_HIGH (operands[1]));
3897 operands[1] = GEN_INT (CONST_DOUBLE_LOW (operands[1]));
3899 output_mov_immediate (operands);
3900 output_mov_immediate (otherops);
3902 else if (code1 == CONST_INT)
3904 /* sign extend the intval into the high-order word */
3905 if (WORDS_BIG_ENDIAN)
3907 otherops[1] = operands[1];
3908 operands[1] = (INTVAL (operands[1]) < 0
3909 ? constm1_rtx : const0_rtx);
3912 otherops[1] = INTVAL (operands[1]) < 0 ? constm1_rtx : const0_rtx;
3913 output_mov_immediate (otherops);
3914 output_mov_immediate (operands);
3916 else if (code1 == MEM)
3918 switch (GET_CODE (XEXP (operands[1], 0)))
3921 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
3925 abort (); /* Should never happen now */
3929 output_asm_insn ("ldm%?db\t%m1!, %M0", operands);
3933 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
3937 abort (); /* Should never happen now */
3942 output_asm_insn ("adr%?\t%0, %1", operands);
3943 output_asm_insn ("ldm%?ia\t%0, %M0", operands);
3947 if (arm_add_operand (XEXP (XEXP (operands[1], 0), 1)))
3949 otherops[0] = operands[0];
3950 otherops[1] = XEXP (XEXP (operands[1], 0), 0);
3951 otherops[2] = XEXP (XEXP (operands[1], 0), 1);
3952 if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
3954 if (GET_CODE (otherops[2]) == CONST_INT)
3956 switch (INTVAL (otherops[2]))
3959 output_asm_insn ("ldm%?db\t%1, %M0", otherops);
3962 output_asm_insn ("ldm%?da\t%1, %M0", otherops);
3965 output_asm_insn ("ldm%?ib\t%1, %M0", otherops);
3968 if (!(const_ok_for_arm (INTVAL (otherops[2]))))
3969 output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
3971 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3974 output_asm_insn ("add%?\t%0, %1, %2", otherops);
3977 output_asm_insn ("sub%?\t%0, %1, %2", otherops);
3978 return "ldm%?ia\t%0, %M0";
3982 otherops[1] = adj_offsettable_operand (operands[1], 4);
3983 /* Take care of overlapping base/data reg. */
3984 if (reg_mentioned_p (operands[0], operands[1]))
3986 output_asm_insn ("ldr%?\t%0, %1", otherops);
3987 output_asm_insn ("ldr%?\t%0, %1", operands);
3991 output_asm_insn ("ldr%?\t%0, %1", operands);
3992 output_asm_insn ("ldr%?\t%0, %1", otherops);
3998 abort(); /* Constraints should prevent this */
4000 else if (code0 == MEM && code1 == REG)
4002 if (REGNO (operands[1]) == 12)
4005 switch (GET_CODE (XEXP (operands[0], 0)))
4008 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
4012 abort (); /* Should never happen now */
4016 output_asm_insn ("stm%?db\t%m0!, %M1", operands);
4020 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
4024 abort (); /* Should never happen now */
4028 if (GET_CODE (XEXP (XEXP (operands[0], 0), 1)) == CONST_INT)
4030 switch (INTVAL (XEXP (XEXP (operands[0], 0), 1)))
4033 output_asm_insn ("stm%?db\t%m0, %M1", operands);
4037 output_asm_insn ("stm%?da\t%m0, %M1", operands);
4041 output_asm_insn ("stm%?ib\t%m0, %M1", operands);
4048 otherops[0] = adj_offsettable_operand (operands[0], 4);
4049 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
4050 output_asm_insn ("str%?\t%1, %0", operands);
4051 output_asm_insn ("str%?\t%1, %0", otherops);
4055 abort(); /* Constraints should prevent this */
4061 /* Output an arbitrary MOV reg, #n.
4062 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
4065 output_mov_immediate (operands)
4068 HOST_WIDE_INT n = INTVAL (operands[1]);
4072 /* Try to use one MOV */
4073 if (const_ok_for_arm (n))
4075 output_asm_insn ("mov%?\t%0, %1", operands);
4079 /* Try to use one MVN */
4080 if (const_ok_for_arm (~n))
4082 operands[1] = GEN_INT (~n);
4083 output_asm_insn ("mvn%?\t%0, %1", operands);
4087 /* If all else fails, make it out of ORRs or BICs as appropriate. */
4089 for (i=0; i < 32; i++)
4093 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
4094 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
4097 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
4104 /* Output an ADD r, s, #n where n may be too big for one instruction. If
4105 adding zero to one register, output nothing. */
4108 output_add_immediate (operands)
4111 HOST_WIDE_INT n = INTVAL (operands[2]);
4113 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
4116 output_multi_immediate (operands,
4117 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
4120 output_multi_immediate (operands,
4121 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
4128 /* Output a multiple immediate operation.
4129 OPERANDS is the vector of operands referred to in the output patterns.
4130 INSTR1 is the output pattern to use for the first constant.
4131 INSTR2 is the output pattern to use for subsequent constants.
4132 IMMED_OP is the index of the constant slot in OPERANDS.
4133 N is the constant value. */
4136 output_multi_immediate (operands, instr1, instr2, immed_op, n)
4138 char *instr1, *instr2;
4142 #if HOST_BITS_PER_WIDE_INT > 32
4148 operands[immed_op] = const0_rtx;
4149 output_asm_insn (instr1, operands); /* Quick and easy output */
4154 char *instr = instr1;
4156 /* Note that n is never zero here (which would give no output) */
4157 for (i = 0; i < 32; i += 2)
4161 operands[immed_op] = GEN_INT (n & (255 << i));
4162 output_asm_insn (instr, operands);
4172 /* Return the appropriate ARM instruction for the operation code.
4173 The returned result should not be overwritten. OP is the rtx of the
4174 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
4178 arithmetic_instr (op, shift_first_arg)
4180 int shift_first_arg;
4182 switch (GET_CODE (op))
4188 return shift_first_arg ? "rsb" : "sub";
4205 /* Ensure valid constant shifts and return the appropriate shift mnemonic
4206 for the operation code. The returned result should not be overwritten.
4207 OP is the rtx code of the shift.
4208 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
4212 shift_op (op, amountp)
4214 HOST_WIDE_INT *amountp;
4217 enum rtx_code code = GET_CODE (op);
4219 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
4221 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
4222 *amountp = INTVAL (XEXP (op, 1));
4245 /* We never have to worry about the amount being other than a
4246 power of 2, since this case can never be reloaded from a reg. */
4248 *amountp = int_log2 (*amountp);
4259 /* This is not 100% correct, but follows from the desire to merge
4260 multiplication by a power of 2 with the recognizer for a
4261 shift. >=32 is not a valid shift for "asl", so we must try and
4262 output a shift that produces the correct arithmetical result.
4263 Using lsr #32 is identical except for the fact that the carry bit
4264 is not set correctly if we set the flags; but we never use the
4265 carry bit from such an operation, so we can ignore that. */
4266 if (code == ROTATERT)
4267 *amountp &= 31; /* Rotate is just modulo 32 */
4268 else if (*amountp != (*amountp & 31))
4275 /* Shifts of 0 are no-ops. */
4284 /* Obtain the shift from the POWER of two. */
4286 static HOST_WIDE_INT
4288 HOST_WIDE_INT power;
4290 HOST_WIDE_INT shift = 0;
4292 while (((((HOST_WIDE_INT) 1) << shift) & power) == 0)
4302 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
4303 /bin/as is horribly restrictive. */
4306 output_ascii_pseudo_op (stream, p, len)
4312 int len_so_far = 1000;
4313 int chars_so_far = 0;
4315 for (i = 0; i < len; i++)
4317 register int c = p[i];
4319 if (len_so_far > 50)
4322 fputs ("\"\n", stream);
4323 fputs ("\t.ascii\t\"", stream);
4328 if (c == '\"' || c == '\\')
4334 if (c >= ' ' && c < 0177)
4341 fprintf (stream, "\\%03o", c);
4348 fputs ("\"\n", stream);
4352 /* Try to determine whether a pattern really clobbers the link register.
4353 This information is useful when peepholing, so that lr need not be pushed
4354 if we combine a call followed by a return.
4355 NOTE: This code does not check for side-effect expressions in a SET_SRC:
4356 such a check should not be needed because these only update an existing
4357 value within a register; the register must still be set elsewhere within
4361 pattern_really_clobbers_lr (x)
4366 switch (GET_CODE (x))
4369 switch (GET_CODE (SET_DEST (x)))
4372 return REGNO (SET_DEST (x)) == 14;
4375 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
4376 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
4378 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
4387 for (i = 0; i < XVECLEN (x, 0); i++)
4388 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
4393 switch (GET_CODE (XEXP (x, 0)))
4396 return REGNO (XEXP (x, 0)) == 14;
4399 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
4400 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
4416 function_really_clobbers_lr (first)
4421 for (insn = first; insn; insn = next_nonnote_insn (insn))
4423 switch (GET_CODE (insn))
4428 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
4433 if (pattern_really_clobbers_lr (PATTERN (insn)))
4438 /* Don't yet know how to handle those calls that are not to a
4440 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4443 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
4446 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
4452 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
4458 default: /* Don't recognize it, be safe */
4462 /* A call can be made (by peepholing) not to clobber lr iff it is
4463 followed by a return. There may, however, be a use insn iff
4464 we are returning the result of the call.
4465 If we run off the end of the insn chain, then that means the
4466 call was at the end of the function. Unfortunately we don't
4467 have a return insn for the peephole to recognize, so we
4468 must reject this. (Can this be fixed by adding our own insn?) */
4469 if ((next = next_nonnote_insn (insn)) == NULL)
4472 /* No need to worry about lr if the call never returns */
4473 if (GET_CODE (next) == BARRIER)
4476 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
4477 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
4478 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
4479 == REGNO (XEXP (PATTERN (next), 0))))
4480 if ((next = next_nonnote_insn (next)) == NULL)
4483 if (GET_CODE (next) == JUMP_INSN
4484 && GET_CODE (PATTERN (next)) == RETURN)
4493 /* We have reached the end of the chain so lr was _not_ clobbered */
4498 output_return_instruction (operand, really_return, reverse)
4504 int reg, live_regs = 0;
4505 int volatile_func = (optimize > 0
4506 && TREE_THIS_VOLATILE (current_function_decl));
4508 return_used_this_function = 1;
4513 /* If this function was declared non-returning, and we have found a tail
4514 call, then we have to trust that the called function won't return. */
4515 if (! really_return)
4518 /* Otherwise, trap an attempted return by aborting. */
4520 ops[1] = gen_rtx (SYMBOL_REF, Pmode, "abort");
4521 assemble_external_libcall (ops[1]);
4522 output_asm_insn (reverse ? "bl%D0\t%a1" : "bl%d0\t%a1", ops);
4526 if (current_function_calls_alloca && ! really_return)
4529 for (reg = 0; reg <= 10; reg++)
4530 if (regs_ever_live[reg] && ! call_used_regs[reg])
4533 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
4536 if (frame_pointer_needed)
4541 if (lr_save_eliminated || ! regs_ever_live[14])
4544 if (frame_pointer_needed)
4546 reverse ? "ldm%?%D0ea\t%|fp, {" : "ldm%?%d0ea\t%|fp, {");
4549 reverse ? "ldm%?%D0fd\t%|sp!, {" : "ldm%?%d0fd\t%|sp!, {");
4551 for (reg = 0; reg <= 10; reg++)
4552 if (regs_ever_live[reg] && ! call_used_regs[reg])
4554 strcat (instr, "%|");
4555 strcat (instr, reg_names[reg]);
4557 strcat (instr, ", ");
4560 if (frame_pointer_needed)
4562 strcat (instr, "%|");
4563 strcat (instr, reg_names[11]);
4564 strcat (instr, ", ");
4565 strcat (instr, "%|");
4566 strcat (instr, reg_names[13]);
4567 strcat (instr, ", ");
4568 strcat (instr, "%|");
4569 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4573 strcat (instr, "%|");
4574 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
4576 strcat (instr, (TARGET_APCS_32 || !really_return) ? "}" : "}^");
4577 output_asm_insn (instr, &operand);
4579 else if (really_return)
4581 sprintf (instr, "mov%%?%%%s0%s\t%%|pc, %%|lr",
4582 reverse ? "D" : "d", TARGET_APCS_32 ? "" : "s");
4583 output_asm_insn (instr, &operand);
4589 /* Return nonzero if optimizing and the current function is volatile.
4590 Such functions never return, and many memory cycles can be saved
4591 by not storing register values that will never be needed again.
4592 This optimization was added to speed up context switching in a
4593 kernel application. */
4596 arm_volatile_func ()
4598 return (optimize > 0 && TREE_THIS_VOLATILE (current_function_decl));
4601 /* The amount of stack adjustment that happens here, in output_return and in
4602 output_epilogue must be exactly the same as was calculated during reload,
4603 or things will point to the wrong place. The only time we can safely
4604 ignore this constraint is when a function has no arguments on the stack,
4605 no stack frame requirement and no live registers execpt for `lr'. If we
4606 can guarantee that by making all function calls into tail calls and that
4607 lr is not clobbered in any other way, then there is no need to push lr
4611 output_func_prologue (f, frame_size)
4615 int reg, live_regs_mask = 0;
4617 int volatile_func = (optimize > 0
4618 && TREE_THIS_VOLATILE (current_function_decl));
4620 /* Nonzero if we must stuff some register arguments onto the stack as if
4621 they were passed there. */
4622 int store_arg_regs = 0;
4624 if (arm_ccfsm_state || arm_target_insn)
4625 abort (); /* Sanity check */
4627 if (arm_naked_function_p (current_function_decl))
4630 return_used_this_function = 0;
4631 lr_save_eliminated = 0;
4633 fprintf (f, "\t%s args = %d, pretend = %d, frame = %d\n",
4634 ASM_COMMENT_START, current_function_args_size,
4635 current_function_pretend_args_size, frame_size);
4636 fprintf (f, "\t%s frame_needed = %d, current_function_anonymous_args = %d\n",
4637 ASM_COMMENT_START, frame_pointer_needed,
4638 current_function_anonymous_args);
4641 fprintf (f, "\t%s Volatile function.\n", ASM_COMMENT_START);
4643 if (current_function_anonymous_args && current_function_pretend_args_size)
4646 for (reg = 0; reg <= 10; reg++)
4647 if (regs_ever_live[reg] && ! call_used_regs[reg])
4648 live_regs_mask |= (1 << reg);
4650 if (frame_pointer_needed)
4651 live_regs_mask |= 0xD800;
4652 else if (regs_ever_live[14])
4654 if (! current_function_args_size
4655 && ! function_really_clobbers_lr (get_insns ()))
4656 lr_save_eliminated = 1;
4658 live_regs_mask |= 0x4000;
4663 /* if a di mode load/store multiple is used, and the base register
4664 is r3, then r4 can become an ever live register without lr
4665 doing so, in this case we need to push lr as well, or we
4666 will fail to get a proper return. */
4668 live_regs_mask |= 0x4000;
4669 lr_save_eliminated = 0;
4673 if (lr_save_eliminated)
4674 fprintf (f,"\t%s I don't think this function clobbers lr\n",
4677 #ifdef AOF_ASSEMBLER
4679 fprintf (f, "\tmov\t%sip, %s%s\n", REGISTER_PREFIX, REGISTER_PREFIX,
4680 reg_names[PIC_OFFSET_TABLE_REGNUM]);
4686 output_func_epilogue (f, frame_size)
4690 int reg, live_regs_mask = 0, code_size = 0;
4691 /* If we need this then it will always be at lesat this much */
4692 int floats_offset = 24;
4694 int volatile_func = (optimize > 0
4695 && TREE_THIS_VOLATILE (current_function_decl));
4697 if (use_return_insn() && return_used_this_function)
4699 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
4706 /* Naked functions don't have epilogues. */
4707 if (arm_naked_function_p (current_function_decl))
4710 /* A volatile function should never return. Call abort. */
4713 rtx op = gen_rtx (SYMBOL_REF, Pmode, "abort");
4714 assemble_external_libcall (op);
4715 output_asm_insn ("bl\t%a0", &op);
4720 for (reg = 0; reg <= 10; reg++)
4721 if (regs_ever_live[reg] && ! call_used_regs[reg])
4723 live_regs_mask |= (1 << reg);
4727 if (frame_pointer_needed)
4729 for (reg = 23; reg > 15; reg--)
4730 if (regs_ever_live[reg] && ! call_used_regs[reg])
4732 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", REGISTER_PREFIX,
4733 reg_names[reg], REGISTER_PREFIX, floats_offset);
4734 floats_offset += 12;
4738 live_regs_mask |= 0xA800;
4739 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
4740 TARGET_APCS_32 ? FALSE : TRUE);
4745 /* Restore stack pointer if necessary. */
4748 operands[0] = operands[1] = stack_pointer_rtx;
4749 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
4750 output_add_immediate (operands);
4753 for (reg = 16; reg < 24; reg++)
4754 if (regs_ever_live[reg] && ! call_used_regs[reg])
4756 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", REGISTER_PREFIX,
4757 reg_names[reg], REGISTER_PREFIX);
4760 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
4762 if (lr_save_eliminated)
4763 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
4764 : "\tmovs\t%spc, %slr\n"),
4765 REGISTER_PREFIX, REGISTER_PREFIX, f);
4767 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
4768 TARGET_APCS_32 ? FALSE : TRUE);
4773 if (live_regs_mask || regs_ever_live[14])
4775 /* Restore the integer regs, and the return address into lr */
4776 if (! lr_save_eliminated)
4777 live_regs_mask |= 0x4000;
4779 if (live_regs_mask != 0)
4781 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
4785 if (current_function_pretend_args_size)
4787 /* Unwind the pre-pushed regs */
4788 operands[0] = operands[1] = stack_pointer_rtx;
4789 operands[2] = gen_rtx (CONST_INT, VOIDmode,
4790 current_function_pretend_args_size);
4791 output_add_immediate (operands);
4793 /* And finally, go home */
4794 fprintf (f, (TARGET_APCS_32 ? "\tmov\t%spc, %slr\n"
4795 : "\tmovs\t%spc, %slr\n"),
4796 REGISTER_PREFIX, REGISTER_PREFIX, f);
4803 current_function_anonymous_args = 0;
4807 emit_multi_reg_push (mask)
4814 for (i = 0; i < 16; i++)
4815 if (mask & (1 << i))
4818 if (num_regs == 0 || num_regs > 16)
4821 par = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num_regs));
4823 for (i = 0; i < 16; i++)
4825 if (mask & (1 << i))
4828 = gen_rtx (SET, VOIDmode, gen_rtx (MEM, BLKmode,
4829 gen_rtx (PRE_DEC, BLKmode,
4830 stack_pointer_rtx)),
4831 gen_rtx (UNSPEC, BLKmode,
4832 gen_rtvec (1, gen_rtx (REG, SImode, i)),
4838 for (j = 1, i++; j < num_regs; i++)
4840 if (mask & (1 << i))
4843 = gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, i));
4851 arm_expand_prologue ()
4854 rtx amount = GEN_INT (- get_frame_size ());
4857 int live_regs_mask = 0;
4858 int store_arg_regs = 0;
4859 int volatile_func = (optimize > 0
4860 && TREE_THIS_VOLATILE (current_function_decl));
4862 /* Naked functions don't have prologues. */
4863 if (arm_naked_function_p (current_function_decl))
4866 if (current_function_anonymous_args && current_function_pretend_args_size)
4869 if (! volatile_func)
4870 for (reg = 0; reg <= 10; reg++)
4871 if (regs_ever_live[reg] && ! call_used_regs[reg])
4872 live_regs_mask |= 1 << reg;
4874 if (! volatile_func && regs_ever_live[14])
4875 live_regs_mask |= 0x4000;
4877 if (frame_pointer_needed)
4879 live_regs_mask |= 0xD800;
4880 emit_insn (gen_movsi (gen_rtx (REG, SImode, 12),
4881 stack_pointer_rtx));
4884 if (current_function_pretend_args_size)
4887 emit_multi_reg_push ((0xf0 >> (current_function_pretend_args_size / 4))
4890 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
4891 GEN_INT (-current_function_pretend_args_size)));
4896 /* If we have to push any regs, then we must push lr as well, or
4897 we won't get a proper return. */
4898 live_regs_mask |= 0x4000;
4899 emit_multi_reg_push (live_regs_mask);
4902 /* For now the integer regs are still pushed in output_func_epilogue (). */
4904 if (! volatile_func)
4905 for (reg = 23; reg > 15; reg--)
4906 if (regs_ever_live[reg] && ! call_used_regs[reg])
4907 emit_insn (gen_rtx (SET, VOIDmode,
4908 gen_rtx (MEM, XFmode,
4909 gen_rtx (PRE_DEC, XFmode,
4910 stack_pointer_rtx)),
4911 gen_rtx (REG, XFmode, reg)));
4913 if (frame_pointer_needed)
4914 emit_insn (gen_addsi3 (hard_frame_pointer_rtx, gen_rtx (REG, SImode, 12),
4916 (-(4 + current_function_pretend_args_size)))));
4918 if (amount != const0_rtx)
4920 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, amount));
4921 emit_insn (gen_rtx (CLOBBER, VOIDmode,
4922 gen_rtx (MEM, BLKmode, stack_pointer_rtx)));
4925 /* If we are profiling, make sure no instructions are scheduled before
4926 the call to mcount. */
4927 if (profile_flag || profile_block_flag)
4928 emit_insn (gen_blockage ());
4932 /* If CODE is 'd', then the X is a condition operand and the instruction
4933 should only be executed if the condition is true.
4934 if CODE is 'D', then the X is a condition operand and the instruction
4935 should only be executed if the condition is false: however, if the mode
4936 of the comparison is CCFPEmode, then always execute the instruction -- we
4937 do this because in these circumstances !GE does not necessarily imply LT;
4938 in these cases the instruction pattern will take care to make sure that
4939 an instruction containing %d will follow, thereby undoing the effects of
4940 doing this instruction unconditionally.
4941 If CODE is 'N' then X is a floating point operand that must be negated
4943 If CODE is 'B' then output a bitwise inverted value of X (a const int).
4944 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
4947 arm_print_operand (stream, x, code)
4955 fputs (ASM_COMMENT_START, stream);
4959 fputs (REGISTER_PREFIX, stream);
4963 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
4964 fputs (arm_condition_codes[arm_current_cc], stream);
4970 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
4971 r = REAL_VALUE_NEGATE (r);
4972 fprintf (stream, "%s", fp_const_from_val (&r));
4977 if (GET_CODE (x) == CONST_INT)
4979 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
4984 ARM_SIGN_EXTEND (~ INTVAL (x)));
4988 output_addr_const (stream, x);
4993 fprintf (stream, "%s", arithmetic_instr (x, 1));
4997 fprintf (stream, "%s", arithmetic_instr (x, 0));
5003 char *shift = shift_op (x, &val);
5007 fprintf (stream, ", %s ", shift_op (x, &val));
5009 arm_print_operand (stream, XEXP (x, 1), 0);
5012 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
5025 fputs (REGISTER_PREFIX, stream);
5026 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 1 : 0)], stream);
5032 fputs (REGISTER_PREFIX, stream);
5033 fputs (reg_names[REGNO (x) + (WORDS_BIG_ENDIAN ? 0 : 1)], stream);
5037 fputs (REGISTER_PREFIX, stream);
5038 if (GET_CODE (XEXP (x, 0)) == REG)
5039 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
5041 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
5045 fprintf (stream, "{%s%s-%s%s}", REGISTER_PREFIX, reg_names[REGNO (x)],
5046 REGISTER_PREFIX, reg_names[REGNO (x) - 1
5047 + ((GET_MODE_SIZE (GET_MODE (x))
5048 + GET_MODE_SIZE (SImode) - 1)
5049 / GET_MODE_SIZE (SImode))]);
5054 fputs (arm_condition_codes[get_arm_condition_code (x)],
5060 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
5061 (get_arm_condition_code (x))],
5069 if (GET_CODE (x) == REG)
5071 fputs (REGISTER_PREFIX, stream);
5072 fputs (reg_names[REGNO (x)], stream);
5074 else if (GET_CODE (x) == MEM)
5076 output_memory_reference_mode = GET_MODE (x);
5077 output_address (XEXP (x, 0));
5079 else if (GET_CODE (x) == CONST_DOUBLE)
5080 fprintf (stream, "#%s", fp_immediate_constant (x));
5081 else if (GET_CODE (x) == NEG)
5082 abort (); /* This should never happen now. */
5085 fputc ('#', stream);
5086 output_addr_const (stream, x);
5091 /* Output a label definition. */
5094 arm_asm_output_label (stream, name)
5098 ARM_OUTPUT_LABEL (stream, name);
5101 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
5102 directive hence this hack, which works by reserving some `.space' in the
5103 bss segment directly.
5105 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
5106 define STATIC COMMON space but merely STATIC BSS space. */
5109 output_lcomm_directive (stream, name, size, align)
5115 ASM_OUTPUT_ALIGN (stream, floor_log2 (align / BITS_PER_UNIT));
5116 ARM_OUTPUT_LABEL (stream, name);
5117 fprintf (stream, "\t.space\t%d\n", size);
5120 /* A finite state machine takes care of noticing whether or not instructions
5121 can be conditionally executed, and thus decrease execution time and code
5122 size by deleting branch instructions. The fsm is controlled by
5123 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
5125 /* The state of the fsm controlling condition codes are:
5126 0: normal, do nothing special
5127 1: make ASM_OUTPUT_OPCODE not output this instruction
5128 2: make ASM_OUTPUT_OPCODE not output this instruction
5129 3: make instructions conditional
5130 4: make instructions conditional
5132 State transitions (state->state by whom under condition):
5133 0 -> 1 final_prescan_insn if the `target' is a label
5134 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
5135 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
5136 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
5137 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
5138 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
5139 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
5140 (the target insn is arm_target_insn).
5142 If the jump clobbers the conditions then we use states 2 and 4.
5144 A similar thing can be done with conditional return insns.
5146 XXX In case the `target' is an unconditional branch, this conditionalising
5147 of the instructions always reduces code size, but not always execution
5148 time. But then, I want to reduce the code size to somewhere near what
5149 /bin/cc produces. */
5151 /* Returns the index of the ARM condition code string in
5152 `arm_condition_codes'. COMPARISON should be an rtx like
5153 `(eq (...) (...))'. */
5155 static enum arm_cond_code
5156 get_arm_condition_code (comparison)
5159 enum machine_mode mode = GET_MODE (XEXP (comparison, 0));
5161 register enum rtx_code comp_code = GET_CODE (comparison);
5163 if (GET_MODE_CLASS (mode) != MODE_CC)
5164 mode = SELECT_CC_MODE (comp_code, XEXP (comparison, 0),
5165 XEXP (comparison, 1));
5169 case CC_DNEmode: code = ARM_NE; goto dominance;
5170 case CC_DEQmode: code = ARM_EQ; goto dominance;
5171 case CC_DGEmode: code = ARM_GE; goto dominance;
5172 case CC_DGTmode: code = ARM_GT; goto dominance;
5173 case CC_DLEmode: code = ARM_LE; goto dominance;
5174 case CC_DLTmode: code = ARM_LT; goto dominance;
5175 case CC_DGEUmode: code = ARM_CS; goto dominance;
5176 case CC_DGTUmode: code = ARM_HI; goto dominance;
5177 case CC_DLEUmode: code = ARM_LS; goto dominance;
5178 case CC_DLTUmode: code = ARM_CC;
5181 if (comp_code != EQ && comp_code != NE)
5184 if (comp_code == EQ)
5185 return ARM_INVERSE_CONDITION_CODE (code);
5191 case NE: return ARM_NE;
5192 case EQ: return ARM_EQ;
5193 case GE: return ARM_PL;
5194 case LT: return ARM_MI;
5202 case NE: return ARM_NE;
5203 case EQ: return ARM_EQ;
5210 case GE: return ARM_GE;
5211 case GT: return ARM_GT;
5212 case LE: return ARM_LS;
5213 case LT: return ARM_MI;
5220 case NE: return ARM_NE;
5221 case EQ: return ARM_EQ;
5222 case GE: return ARM_LE;
5223 case GT: return ARM_LT;
5224 case LE: return ARM_GE;
5225 case LT: return ARM_GT;
5226 case GEU: return ARM_LS;
5227 case GTU: return ARM_CC;
5228 case LEU: return ARM_CS;
5229 case LTU: return ARM_HI;
5236 case LTU: return ARM_CS;
5237 case GEU: return ARM_CC;
5244 case NE: return ARM_NE;
5245 case EQ: return ARM_EQ;
5246 case GE: return ARM_GE;
5247 case GT: return ARM_GT;
5248 case LE: return ARM_LE;
5249 case LT: return ARM_LT;
5250 case GEU: return ARM_CS;
5251 case GTU: return ARM_HI;
5252 case LEU: return ARM_LS;
5253 case LTU: return ARM_CC;
5265 final_prescan_insn (insn, opvec, noperands)
5270 /* BODY will hold the body of INSN. */
5271 register rtx body = PATTERN (insn);
5273 /* This will be 1 if trying to repeat the trick, and things need to be
5274 reversed if it appears to fail. */
5277 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
5278 taken are clobbered, even if the rtl suggests otherwise. It also
5279 means that we have to grub around within the jump expression to find
5280 out what the conditions are when the jump isn't taken. */
5281 int jump_clobbers = 0;
5283 /* If we start with a return insn, we only succeed if we find another one. */
5284 int seeking_return = 0;
5286 /* START_INSN will hold the insn from where we start looking. This is the
5287 first insn after the following code_label if REVERSE is true. */
5288 rtx start_insn = insn;
5290 /* If in state 4, check if the target branch is reached, in order to
5291 change back to state 0. */
5292 if (arm_ccfsm_state == 4)
5294 if (insn == arm_target_insn)
5296 arm_target_insn = NULL;
5297 arm_ccfsm_state = 0;
5302 /* If in state 3, it is possible to repeat the trick, if this insn is an
5303 unconditional branch to a label, and immediately following this branch
5304 is the previous target label which is only used once, and the label this
5305 branch jumps to is not too far off. */
5306 if (arm_ccfsm_state == 3)
5308 if (simplejump_p (insn))
5310 start_insn = next_nonnote_insn (start_insn);
5311 if (GET_CODE (start_insn) == BARRIER)
5313 /* XXX Isn't this always a barrier? */
5314 start_insn = next_nonnote_insn (start_insn);
5316 if (GET_CODE (start_insn) == CODE_LABEL
5317 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5318 && LABEL_NUSES (start_insn) == 1)
5323 else if (GET_CODE (body) == RETURN)
5325 start_insn = next_nonnote_insn (start_insn);
5326 if (GET_CODE (start_insn) == BARRIER)
5327 start_insn = next_nonnote_insn (start_insn);
5328 if (GET_CODE (start_insn) == CODE_LABEL
5329 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
5330 && LABEL_NUSES (start_insn) == 1)
5342 if (arm_ccfsm_state != 0 && !reverse)
5344 if (GET_CODE (insn) != JUMP_INSN)
5347 /* This jump might be paralleled with a clobber of the condition codes
5348 the jump should always come first */
5349 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
5350 body = XVECEXP (body, 0, 0);
5353 /* If this is a conditional return then we don't want to know */
5354 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5355 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
5356 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
5357 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
5362 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
5363 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
5366 int fail = FALSE, succeed = FALSE;
5367 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
5368 int then_not_else = TRUE;
5369 rtx this_insn = start_insn, label = 0;
5371 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
5373 /* The code below is wrong for these, and I haven't time to
5374 fix it now. So we just do the safe thing and return. This
5375 whole function needs re-writing anyway. */
5380 /* Register the insn jumped to. */
5383 if (!seeking_return)
5384 label = XEXP (SET_SRC (body), 0);
5386 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
5387 label = XEXP (XEXP (SET_SRC (body), 1), 0);
5388 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
5390 label = XEXP (XEXP (SET_SRC (body), 2), 0);
5391 then_not_else = FALSE;
5393 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
5395 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
5398 then_not_else = FALSE;
5403 /* See how many insns this branch skips, and what kind of insns. If all
5404 insns are okay, and the label or unconditional branch to the same
5405 label is not too far away, succeed. */
5406 for (insns_skipped = 0;
5407 !fail && !succeed && insns_skipped++ < MAX_INSNS_SKIPPED;)
5411 this_insn = next_nonnote_insn (this_insn);
5415 scanbody = PATTERN (this_insn);
5417 switch (GET_CODE (this_insn))
5420 /* Succeed if it is the target label, otherwise fail since
5421 control falls in from somewhere else. */
5422 if (this_insn == label)
5426 arm_ccfsm_state = 2;
5427 this_insn = next_nonnote_insn (this_insn);
5430 arm_ccfsm_state = 1;
5438 /* Succeed if the following insn is the target label.
5440 If return insns are used then the last insn in a function
5441 will be a barrier. */
5442 this_insn = next_nonnote_insn (this_insn);
5443 if (this_insn && this_insn == label)
5447 arm_ccfsm_state = 2;
5448 this_insn = next_nonnote_insn (this_insn);
5451 arm_ccfsm_state = 1;
5459 /* If using 32-bit addresses the cc is not preserved over
5463 /* Succeed if the following insn is the target label,
5464 or if the following two insns are a barrier and
5465 the target label. */
5466 this_insn = next_nonnote_insn (this_insn);
5467 if (this_insn && GET_CODE (this_insn) == BARRIER)
5468 this_insn = next_nonnote_insn (this_insn);
5470 if (this_insn && this_insn == label
5471 && insns_skipped < MAX_INSNS_SKIPPED)
5475 arm_ccfsm_state = 2;
5476 this_insn = next_nonnote_insn (this_insn);
5479 arm_ccfsm_state = 1;
5488 /* If this is an unconditional branch to the same label, succeed.
5489 If it is to another label, do nothing. If it is conditional,
5491 /* XXX Probably, the test for the SET and the PC are unnecessary. */
5493 if (GET_CODE (scanbody) == SET
5494 && GET_CODE (SET_DEST (scanbody)) == PC)
5496 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
5497 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
5499 arm_ccfsm_state = 2;
5502 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
5505 else if (GET_CODE (scanbody) == RETURN
5508 arm_ccfsm_state = 2;
5511 else if (GET_CODE (scanbody) == PARALLEL)
5513 switch (get_attr_conds (this_insn))
5525 /* Instructions using or affecting the condition codes make it
5527 if ((GET_CODE (scanbody) == SET
5528 || GET_CODE (scanbody) == PARALLEL)
5529 && get_attr_conds (this_insn) != CONDS_NOCOND)
5539 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
5540 arm_target_label = CODE_LABEL_NUMBER (label);
5541 else if (seeking_return || arm_ccfsm_state == 2)
5543 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
5545 this_insn = next_nonnote_insn (this_insn);
5546 if (this_insn && (GET_CODE (this_insn) == BARRIER
5547 || GET_CODE (this_insn) == CODE_LABEL))
5552 /* Oh, dear! we ran off the end.. give up */
5553 recog (PATTERN (insn), insn, NULL_PTR);
5554 arm_ccfsm_state = 0;
5555 arm_target_insn = NULL;
5558 arm_target_insn = this_insn;
5567 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
5569 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
5570 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5571 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
5572 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5576 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
5579 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
5583 if (reverse || then_not_else)
5584 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
5586 /* restore recog_operand (getting the attributes of other insns can
5587 destroy this array, but final.c assumes that it remains intact
5588 across this call; since the insn has been recognized already we
5589 call recog direct). */
5590 recog (PATTERN (insn), insn, NULL_PTR);
5594 #ifdef AOF_ASSEMBLER
5595 /* Special functions only needed when producing AOF syntax assembler. */
5597 rtx aof_pic_label = NULL_RTX;
5600 struct pic_chain *next;
5604 static struct pic_chain *aof_pic_chain = NULL;
5610 struct pic_chain **chainp;
5613 if (aof_pic_label == NULL_RTX)
5615 /* This needs to persist throughout the compilation. */
5616 end_temporary_allocation ();
5617 aof_pic_label = gen_rtx (SYMBOL_REF, Pmode, "x$adcons");
5618 resume_temporary_allocation ();
5621 for (offset = 0, chainp = &aof_pic_chain; *chainp;
5622 offset += 4, chainp = &(*chainp)->next)
5623 if ((*chainp)->symname == XSTR (x, 0))
5624 return plus_constant (aof_pic_label, offset);
5626 *chainp = (struct pic_chain *) xmalloc (sizeof (struct pic_chain));
5627 (*chainp)->next = NULL;
5628 (*chainp)->symname = XSTR (x, 0);
5629 return plus_constant (aof_pic_label, offset);
5633 aof_dump_pic_table (f)
5636 struct pic_chain *chain;
5638 if (aof_pic_chain == NULL)
5641 fprintf (f, "\tAREA |%s$$adcons|, BASED %s%s\n",
5642 reg_names[PIC_OFFSET_TABLE_REGNUM], REGISTER_PREFIX,
5643 reg_names[PIC_OFFSET_TABLE_REGNUM]);
5644 fputs ("|x$adcons|\n", f);
5646 for (chain = aof_pic_chain; chain; chain = chain->next)
5648 fputs ("\tDCD\t", f);
5649 assemble_name (f, chain->symname);
5654 int arm_text_section_count = 1;
5659 static char buf[100];
5660 sprintf (buf, "\tAREA |C$$code%d|, CODE, READONLY",
5661 arm_text_section_count++);
5663 strcat (buf, ", PIC, REENTRANT");
5667 static int arm_data_section_count = 1;
5672 static char buf[100];
5673 sprintf (buf, "\tAREA |C$$data%d|, DATA", arm_data_section_count++);
5677 /* The AOF assembler is religiously strict about declarations of
5678 imported and exported symbols, so that it is impossible to declare
5679 a function as imported near the begining of the file, and then to
5680 export it later on. It is, however, possible to delay the decision
5681 until all the functions in the file have been compiled. To get
5682 around this, we maintain a list of the imports and exports, and
5683 delete from it any that are subsequently defined. At the end of
5684 compilation we spit the remainder of the list out before the END
5689 struct import *next;
5693 static struct import *imports_list = NULL;
5696 aof_add_import (name)
5701 for (new = imports_list; new; new = new->next)
5702 if (new->name == name)
5705 new = (struct import *) xmalloc (sizeof (struct import));
5706 new->next = imports_list;
5712 aof_delete_import (name)
5715 struct import **old;
5717 for (old = &imports_list; *old; old = & (*old)->next)
5719 if ((*old)->name == name)
5721 *old = (*old)->next;
5727 int arm_main_function = 0;
5730 aof_dump_imports (f)
5733 /* The AOF assembler needs this to cause the startup code to be extracted
5734 from the library. Brining in __main causes the whole thing to work
5736 if (arm_main_function)
5739 fputs ("\tIMPORT __main\n", f);
5740 fputs ("\tDCD __main\n", f);
5743 /* Now dump the remaining imports. */
5744 while (imports_list)
5746 fprintf (f, "\tIMPORT\t");
5747 assemble_name (f, imports_list->name);
5749 imports_list = imports_list->next;
5752 #endif /* AOF_ASSEMBLER */