1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009, 2010 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
43 #include "diagnostic-core.h"
49 #include "target-def.h"
50 #include "langhooks.h"
51 #include "tm-constrs.h"
54 struct lm32_frame_info
56 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
57 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
58 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
59 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
60 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
61 unsigned int reg_save_mask; /* mask of saved registers. */
64 /* Prototypes for static functions. */
65 static rtx emit_add (rtx dest, rtx src0, rtx src1);
66 static void expand_save_restore (struct lm32_frame_info *info, int op);
67 static void stack_adjust (HOST_WIDE_INT amount);
68 static bool lm32_in_small_data_p (const_tree);
69 static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
70 enum machine_mode mode, tree type,
71 int *pretend_size, int no_rtl);
72 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
74 static bool lm32_can_eliminate (const int, const int);
76 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
77 static HOST_WIDE_INT lm32_compute_frame_size (int size);
78 static void lm32_option_override (void);
79 static rtx lm32_function_arg (CUMULATIVE_ARGS * cum,
80 enum machine_mode mode, const_tree type,
82 static void lm32_function_arg_advance (CUMULATIVE_ARGS * cum,
83 enum machine_mode mode,
84 const_tree type, bool named);
86 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
87 static const struct default_options lm32_option_optimization_table[] =
89 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
90 { OPT_LEVELS_NONE, 0, NULL, 0 }
93 #undef TARGET_OPTION_OVERRIDE
94 #define TARGET_OPTION_OVERRIDE lm32_option_override
95 #undef TARGET_OPTION_OPTIMIZATION_TABLE
96 #define TARGET_OPTION_OPTIMIZATION_TABLE lm32_option_optimization_table
97 #undef TARGET_ADDRESS_COST
98 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
99 #undef TARGET_RTX_COSTS
100 #define TARGET_RTX_COSTS lm32_rtx_costs
101 #undef TARGET_IN_SMALL_DATA_P
102 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
103 #undef TARGET_PROMOTE_FUNCTION_MODE
104 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
105 #undef TARGET_SETUP_INCOMING_VARARGS
106 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
107 #undef TARGET_FUNCTION_ARG
108 #define TARGET_FUNCTION_ARG lm32_function_arg
109 #undef TARGET_FUNCTION_ARG_ADVANCE
110 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
111 #undef TARGET_PROMOTE_PROTOTYPES
112 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
113 #undef TARGET_MIN_ANCHOR_OFFSET
114 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
115 #undef TARGET_MAX_ANCHOR_OFFSET
116 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
117 #undef TARGET_CAN_ELIMINATE
118 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
119 #undef TARGET_LEGITIMATE_ADDRESS_P
120 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
122 struct gcc_target targetm = TARGET_INITIALIZER;
124 /* Current frame information calculated by lm32_compute_frame_size. */
125 static struct lm32_frame_info current_frame_info;
127 /* Return non-zero if the given return type should be returned in memory. */
130 lm32_return_in_memory (tree type)
134 if (!AGGREGATE_TYPE_P (type))
136 /* All simple types are returned in registers. */
140 size = int_size_in_bytes (type);
141 if (size >= 0 && size <= UNITS_PER_WORD)
143 /* If it can fit in one register. */
150 /* Generate an emit a word sized add instruction. */
153 emit_add (rtx dest, rtx src0, rtx src1)
156 insn = emit_insn (gen_addsi3 (dest, src0, src1));
160 /* Generate the code to compare (and possibly branch) two integer values
161 TEST_CODE is the comparison code we are trying to emulate
162 (or implement directly)
163 RESULT is where to store the result of the comparison,
164 or null to emit a branch
165 CMP0 CMP1 are the two comparison operands
166 DESTINATION is the destination of the branch, or null to only compare
170 gen_int_relational (enum rtx_code code,
176 enum machine_mode mode;
179 mode = GET_MODE (cmp0);
180 if (mode == VOIDmode)
181 mode = GET_MODE (cmp1);
183 /* Is this a branch or compare. */
184 branch_p = (destination != 0);
186 /* Instruction set doesn't support LE or LT, so swap operands and use
197 code = swap_condition (code);
209 rtx insn, cond, label;
211 /* Operands must be in registers. */
212 if (!register_operand (cmp0, mode))
213 cmp0 = force_reg (mode, cmp0);
214 if (!register_operand (cmp1, mode))
215 cmp1 = force_reg (mode, cmp1);
217 /* Generate conditional branch instruction. */
218 cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
219 label = gen_rtx_LABEL_REF (VOIDmode, destination);
220 insn = gen_rtx_SET (VOIDmode, pc_rtx,
221 gen_rtx_IF_THEN_ELSE (VOIDmode,
222 cond, label, pc_rtx));
223 emit_jump_insn (insn);
227 /* We can't have const_ints in cmp0, other than 0. */
228 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
229 cmp0 = force_reg (mode, cmp0);
231 /* If the comparison is against an int not in legal range
232 move it into a register. */
233 if (GET_CODE (cmp1) == CONST_INT)
243 if (!satisfies_constraint_K (cmp1))
244 cmp1 = force_reg (mode, cmp1);
250 if (!satisfies_constraint_L (cmp1))
251 cmp1 = force_reg (mode, cmp1);
258 /* Generate compare instruction. */
259 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
263 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
264 and OPERAND[3]. Store the result in OPERANDS[0]. */
267 lm32_expand_scc (rtx operands[])
269 rtx target = operands[0];
270 enum rtx_code code = GET_CODE (operands[1]);
271 rtx op0 = operands[2];
272 rtx op1 = operands[3];
274 gen_int_relational (code, target, op0, op1, NULL_RTX);
277 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
278 CODE and jump to OPERANDS[3] if the condition holds. */
281 lm32_expand_conditional_branch (rtx operands[])
283 enum rtx_code code = GET_CODE (operands[0]);
284 rtx op0 = operands[1];
285 rtx op1 = operands[2];
286 rtx destination = operands[3];
288 gen_int_relational (code, NULL_RTX, op0, op1, destination);
291 /* Generate and emit RTL to save or restore callee save registers. */
293 expand_save_restore (struct lm32_frame_info *info, int op)
295 unsigned int reg_save_mask = info->reg_save_mask;
297 HOST_WIDE_INT offset;
300 /* Callee saves are below locals and above outgoing arguments. */
301 offset = info->args_size + info->callee_size;
302 for (regno = 0; regno <= 31; regno++)
304 if ((reg_save_mask & (1 << regno)) != 0)
309 offset_rtx = GEN_INT (offset);
310 if (satisfies_constraint_K (offset_rtx))
312 mem = gen_rtx_MEM (word_mode,
319 /* r10 is caller saved so it can be used as a temp reg. */
322 r10 = gen_rtx_REG (word_mode, 10);
323 insn = emit_move_insn (r10, offset_rtx);
325 RTX_FRAME_RELATED_P (insn) = 1;
326 insn = emit_add (r10, r10, stack_pointer_rtx);
328 RTX_FRAME_RELATED_P (insn) = 1;
329 mem = gen_rtx_MEM (word_mode, r10);
333 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
335 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
337 /* only prologue instructions which set the sp fp or save a
338 register should be marked as frame related. */
340 RTX_FRAME_RELATED_P (insn) = 1;
341 offset -= UNITS_PER_WORD;
347 stack_adjust (HOST_WIDE_INT amount)
351 if (!IN_RANGE (amount, -32776, 32768))
353 /* r10 is caller saved so it can be used as a temp reg. */
355 r10 = gen_rtx_REG (word_mode, 10);
356 insn = emit_move_insn (r10, GEN_INT (amount));
358 RTX_FRAME_RELATED_P (insn) = 1;
359 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
361 RTX_FRAME_RELATED_P (insn) = 1;
365 insn = emit_add (stack_pointer_rtx,
366 stack_pointer_rtx, GEN_INT (amount));
368 RTX_FRAME_RELATED_P (insn) = 1;
373 /* Create and emit instructions for a functions prologue. */
375 lm32_expand_prologue (void)
379 lm32_compute_frame_size (get_frame_size ());
381 if (current_frame_info.total_size > 0)
383 /* Add space on stack new frame. */
384 stack_adjust (-current_frame_info.total_size);
386 /* Save callee save registers. */
387 if (current_frame_info.reg_save_mask != 0)
388 expand_save_restore (¤t_frame_info, 0);
390 /* Setup frame pointer if it's needed. */
391 if (frame_pointer_needed == 1)
393 /* Load offset - Don't use total_size, as that includes pretend_size,
394 which isn't part of this frame? */
396 emit_move_insn (frame_pointer_rtx,
397 GEN_INT (current_frame_info.args_size +
398 current_frame_info.callee_size +
399 current_frame_info.locals_size));
400 RTX_FRAME_RELATED_P (insn) = 1;
403 insn = emit_add (frame_pointer_rtx,
404 frame_pointer_rtx, stack_pointer_rtx);
405 RTX_FRAME_RELATED_P (insn) = 1;
408 /* Prevent prologue from being scheduled into function body. */
409 emit_insn (gen_blockage ());
413 /* Create an emit instructions for a functions epilogue. */
415 lm32_expand_epilogue (void)
417 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
419 lm32_compute_frame_size (get_frame_size ());
421 if (current_frame_info.total_size > 0)
423 /* Prevent stack code from being reordered. */
424 emit_insn (gen_blockage ());
426 /* Restore callee save registers. */
427 if (current_frame_info.reg_save_mask != 0)
428 expand_save_restore (¤t_frame_info, 1);
430 /* Deallocate stack. */
431 stack_adjust (current_frame_info.total_size);
433 /* Return to calling function. */
434 emit_jump_insn (gen_return_internal (ra_rtx));
438 /* Return to calling function. */
439 emit_jump_insn (gen_return_internal (ra_rtx));
443 /* Return the bytes needed to compute the frame pointer from the current
446 lm32_compute_frame_size (int size)
449 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
450 unsigned int reg_save_mask;
453 args_size = crtl->outgoing_args_size;
454 pretend_size = crtl->args.pretend_args_size;
458 /* Build mask that actually determines which regsiters we save
459 and calculate size required to store them in the stack. */
460 for (regno = 1; regno < SP_REGNUM; regno++)
462 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
464 reg_save_mask |= 1 << regno;
465 callee_size += UNITS_PER_WORD;
468 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
471 reg_save_mask |= 1 << RA_REGNUM;
472 callee_size += UNITS_PER_WORD;
474 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
476 reg_save_mask |= 1 << FP_REGNUM;
477 callee_size += UNITS_PER_WORD;
480 /* Compute total frame size. */
481 total_size = pretend_size + args_size + locals_size + callee_size;
483 /* Align frame to appropriate boundary. */
484 total_size = (total_size + 3) & ~3;
486 /* Save computed information. */
487 current_frame_info.total_size = total_size;
488 current_frame_info.callee_size = callee_size;
489 current_frame_info.pretend_size = pretend_size;
490 current_frame_info.locals_size = locals_size;
491 current_frame_info.args_size = args_size;
492 current_frame_info.reg_save_mask = reg_save_mask;
498 lm32_print_operand (FILE * file, rtx op, int letter)
502 code = GET_CODE (op);
504 if (code == SIGN_EXTEND)
505 op = XEXP (op, 0), code = GET_CODE (op);
506 else if (code == REG || code == SUBREG)
513 regnum = true_regnum (op);
515 fprintf (file, "%s", reg_names[regnum]);
517 else if (code == HIGH)
518 output_addr_const (file, XEXP (op, 0));
519 else if (code == MEM)
520 output_address (XEXP (op, 0));
521 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
522 fprintf (file, "%s", reg_names[0]);
523 else if (GET_CODE (op) == CONST_DOUBLE)
525 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
526 output_operand_lossage ("only 0.0 can be loaded as an immediate");
531 fprintf (file, "e ");
533 fprintf (file, "ne ");
535 fprintf (file, "g ");
536 else if (code == GTU)
537 fprintf (file, "gu ");
539 fprintf (file, "l ");
540 else if (code == LTU)
541 fprintf (file, "lu ");
543 fprintf (file, "ge ");
544 else if (code == GEU)
545 fprintf (file, "geu");
547 fprintf (file, "le ");
548 else if (code == LEU)
549 fprintf (file, "leu");
551 output_addr_const (file, op);
554 /* A C compound statement to output to stdio stream STREAM the
555 assembler syntax for an instruction operand that is a memory
556 reference whose address is ADDR. ADDR is an RTL expression.
558 On some machines, the syntax for a symbolic address depends on
559 the section that the address refers to. On these machines,
560 define the macro `ENCODE_SECTION_INFO' to store the information
561 into the `symbol_ref', and then check for it here. */
564 lm32_print_operand_address (FILE * file, rtx addr)
566 switch (GET_CODE (addr))
569 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
573 output_address (XEXP (addr, 0));
578 rtx arg0 = XEXP (addr, 0);
579 rtx arg1 = XEXP (addr, 1);
581 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
583 if (GET_CODE (arg1) == CONST_INT)
584 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
588 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
589 output_addr_const (file, arg1);
593 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
594 output_addr_const (file, addr);
596 fatal_insn ("bad operand", addr);
601 if (SYMBOL_REF_SMALL_P (addr))
603 fprintf (file, "gp(");
604 output_addr_const (file, addr);
608 fatal_insn ("can't use non gp relative absolute address", addr);
612 fatal_insn ("invalid addressing mode", addr);
617 /* Determine where to put an argument to a function.
618 Value is zero to push the argument on the stack,
619 or a hard register in which to store the argument.
621 MODE is the argument's machine mode.
622 TYPE is the data type of the argument (as a tree).
623 This is null for libcalls where that information may
625 CUM is a variable of type CUMULATIVE_ARGS which gives info about
626 the preceding args and about the function being called.
627 NAMED is nonzero if this argument is a named parameter
628 (otherwise it is an extra parameter matching an ellipsis). */
631 lm32_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
632 const_tree type, bool named)
634 if (mode == VOIDmode)
635 /* Compute operand 2 of the call insn. */
638 if (targetm.calls.must_pass_in_stack (mode, type))
641 if (!named || (*cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
644 return gen_rtx_REG (mode, *cum + LM32_FIRST_ARG_REG);
648 lm32_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
649 const_tree type, bool named ATTRIBUTE_UNUSED)
651 *cum += LM32_NUM_REGS2 (mode, type);
655 lm32_compute_initial_elimination_offset (int from, int to)
657 HOST_WIDE_INT offset = 0;
661 case ARG_POINTER_REGNUM:
664 case FRAME_POINTER_REGNUM:
667 case STACK_POINTER_REGNUM:
669 lm32_compute_frame_size (get_frame_size ()) -
670 current_frame_info.pretend_size;
684 lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
685 tree type, int *pretend_size, int no_rtl)
690 fntype = TREE_TYPE (current_function_decl);
692 if (stdarg_p (fntype))
693 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
696 /* this is the common case, we have been passed details setup
697 for the last named argument, we want to skip over the
698 registers, if any used in passing this named paramter in
699 order to determine which is the first registers used to pass
700 anonymous arguments. */
704 size = int_size_in_bytes (type);
706 size = GET_MODE_SIZE (mode);
709 *cum + LM32_FIRST_ARG_REG +
710 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
713 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
715 int first_reg_offset = first_anon_arg;
716 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
719 regblock = gen_rtx_MEM (BLKmode,
720 plus_constant (arg_pointer_rtx,
721 FIRST_PARM_OFFSET (0)));
722 move_block_from_reg (first_reg_offset, regblock, size);
724 *pretend_size = size * UNITS_PER_WORD;
728 /* Override command line options. */
730 lm32_option_override (void)
732 /* We must have sign-extend enabled if barrel-shift isn't. */
733 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
734 target_flags |= MASK_SIGN_EXTEND_ENABLED;
737 /* Return nonzero if this function is known to have a null epilogue.
738 This allows the optimizer to omit jumps to jumps if no stack
741 lm32_can_use_return (void)
743 if (!reload_completed)
746 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
749 if (lm32_compute_frame_size (get_frame_size ()) != 0)
755 /* Support function to determine the return address of the function
756 'count' frames back up the stack. */
758 lm32_return_addr_rtx (int count, rtx frame)
763 if (!df_regs_ever_live_p (RA_REGNUM))
764 r = gen_rtx_REG (Pmode, RA_REGNUM);
767 r = gen_rtx_MEM (Pmode,
768 gen_rtx_PLUS (Pmode, frame,
769 GEN_INT (-2 * UNITS_PER_WORD)));
770 set_mem_alias_set (r, get_frame_alias_set ());
773 else if (flag_omit_frame_pointer)
777 r = gen_rtx_MEM (Pmode,
778 gen_rtx_PLUS (Pmode, frame,
779 GEN_INT (-2 * UNITS_PER_WORD)));
780 set_mem_alias_set (r, get_frame_alias_set ());
785 /* Return true if EXP should be placed in the small data section. */
788 lm32_in_small_data_p (const_tree exp)
790 /* We want to merge strings, so we never consider them small data. */
791 if (TREE_CODE (exp) == STRING_CST)
794 /* Functions are never in the small data area. Duh. */
795 if (TREE_CODE (exp) == FUNCTION_DECL)
798 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
800 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
801 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
806 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
808 /* If this is an incomplete type with size 0, then we can't put it
809 in sdata because it might be too big when completed. */
810 if (size > 0 && size <= g_switch_value)
817 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
818 Assume that the areas do not overlap. */
821 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
822 HOST_WIDE_INT alignment)
824 HOST_WIDE_INT offset, delta;
825 unsigned HOST_WIDE_INT bits;
827 enum machine_mode mode;
830 /* Work out how many bits to move at a time. */
844 mode = mode_for_size (bits, MODE_INT, 0);
845 delta = bits / BITS_PER_UNIT;
847 /* Allocate a buffer for the temporary registers. */
848 regs = XALLOCAVEC (rtx, length / delta);
850 /* Load as many BITS-sized chunks as possible. */
851 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
853 regs[i] = gen_reg_rtx (mode);
854 emit_move_insn (regs[i], adjust_address (src, mode, offset));
857 /* Copy the chunks to the destination. */
858 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
859 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
861 /* Mop up any left-over bytes. */
864 src = adjust_address (src, BLKmode, offset);
865 dest = adjust_address (dest, BLKmode, offset);
866 move_by_pieces (dest, src, length - offset,
867 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
871 /* Expand string/block move operations.
873 operands[0] is the pointer to the destination.
874 operands[1] is the pointer to the source.
875 operands[2] is the number of bytes to move.
876 operands[3] is the alignment. */
879 lm32_expand_block_move (rtx * operands)
881 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
883 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
884 INTVAL (operands[3]));
890 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
891 isn't protected by a PIC unspec. */
893 nonpic_symbol_mentioned_p (rtx x)
898 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
899 || GET_CODE (x) == PC)
902 /* We don't want to look into the possible MEM location of a
903 CONST_DOUBLE, since we're not going to use it, in general. */
904 if (GET_CODE (x) == CONST_DOUBLE)
907 if (GET_CODE (x) == UNSPEC)
910 fmt = GET_RTX_FORMAT (GET_CODE (x));
911 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
917 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
918 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
921 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
928 /* Compute a (partial) cost for rtx X. Return true if the complete
929 cost has been computed, and false if subexpressions should be
930 scanned. In either case, *TOTAL contains the cost result. */
933 lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
935 enum machine_mode mode = GET_MODE (x);
938 const int arithmetic_latency = 1;
939 const int shift_latency = 1;
940 const int compare_latency = 2;
941 const int multiply_latency = 3;
942 const int load_latency = 3;
943 const int libcall_size_cost = 5;
945 /* Determine if we can handle the given mode size in a single instruction. */
946 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
959 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
962 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
969 *total = COSTS_N_INSNS (1);
971 *total = COSTS_N_INSNS (compare_latency);
975 /* FIXME. Guessing here. */
976 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
983 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
986 *total = COSTS_N_INSNS (1);
988 *total = COSTS_N_INSNS (shift_latency);
990 else if (TARGET_BARREL_SHIFT_ENABLED)
992 /* FIXME: Guessing here. */
993 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
995 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
997 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
1003 *total = COSTS_N_INSNS (libcall_size_cost);
1005 *total = COSTS_N_INSNS (100);
1010 if (TARGET_MULTIPLY_ENABLED && small_mode)
1013 *total = COSTS_N_INSNS (1);
1015 *total = COSTS_N_INSNS (multiply_latency);
1021 *total = COSTS_N_INSNS (libcall_size_cost);
1023 *total = COSTS_N_INSNS (100);
1031 if (TARGET_DIVIDE_ENABLED && small_mode)
1034 *total = COSTS_N_INSNS (1);
1037 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1040 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1047 if (IN_RANGE (i, 0, 65536))
1048 *total = COSTS_N_INSNS (1 + 1 + cycles);
1050 *total = COSTS_N_INSNS (2 + 1 + cycles);
1053 else if (GET_CODE (XEXP (x, 1)) == REG)
1055 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1060 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1069 *total = COSTS_N_INSNS (libcall_size_cost);
1071 *total = COSTS_N_INSNS (100);
1078 *total = COSTS_N_INSNS (1);
1080 *total = COSTS_N_INSNS (arithmetic_latency);
1084 if (MEM_P (XEXP (x, 0)))
1085 *total = COSTS_N_INSNS (0);
1086 else if (small_mode)
1089 *total = COSTS_N_INSNS (1);
1091 *total = COSTS_N_INSNS (arithmetic_latency);
1094 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1103 *total = COSTS_N_INSNS (0);
1114 if (satisfies_constraint_L (x))
1115 *total = COSTS_N_INSNS (0);
1117 *total = COSTS_N_INSNS (2);
1124 if (satisfies_constraint_K (x))
1125 *total = COSTS_N_INSNS (0);
1127 *total = COSTS_N_INSNS (2);
1131 if (TARGET_MULTIPLY_ENABLED)
1133 if (satisfies_constraint_K (x))
1134 *total = COSTS_N_INSNS (0);
1136 *total = COSTS_N_INSNS (2);
1142 if (satisfies_constraint_K (x))
1143 *total = COSTS_N_INSNS (1);
1145 *total = COSTS_N_INSNS (2);
1156 *total = COSTS_N_INSNS (0);
1163 *total = COSTS_N_INSNS (0);
1172 *total = COSTS_N_INSNS (2);
1176 *total = COSTS_N_INSNS (1);
1181 *total = COSTS_N_INSNS (1);
1183 *total = COSTS_N_INSNS (load_latency);
1191 /* Implemenent TARGET_CAN_ELIMINATE. */
1194 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1196 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1199 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1202 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1205 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1207 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1211 if (GET_CODE (x) == PLUS
1212 && REG_P (XEXP (x, 0))
1213 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1214 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1215 && GET_CODE (XEXP (x, 1)) == CONST_INT
1216 && satisfies_constraint_K (XEXP ((x), 1)))
1220 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1226 /* Check a move is not memory to memory. */
1229 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1230 if (memory_operand (operands[0], mode))
1231 return register_or_zero_operand (operands[1], mode);
1235 /* Implement LEGITIMATE_CONSTANT_P. */
1238 lm32_legitimate_constant_p (rtx x)
1240 /* 32-bit addresses require multiple instructions. */
1241 if (!flag_pic && reloc_operand (x, GET_MODE (x)))