1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
48 #include "target-def.h"
49 #include "langhooks.h"
50 #include "tm-constrs.h"
53 struct lm32_frame_info
55 HOST_WIDE_INT total_size; /* number of bytes of entire frame. */
56 HOST_WIDE_INT callee_size; /* number of bytes to save callee saves. */
57 HOST_WIDE_INT pretend_size; /* number of bytes we pretend caller did. */
58 HOST_WIDE_INT args_size; /* number of bytes for outgoing arguments. */
59 HOST_WIDE_INT locals_size; /* number of bytes for local variables. */
60 unsigned int reg_save_mask; /* mask of saved registers. */
63 /* Prototypes for static functions. */
64 static rtx emit_add (rtx dest, rtx src0, rtx src1);
65 static void expand_save_restore (struct lm32_frame_info *info, int op);
66 static void stack_adjust (HOST_WIDE_INT amount);
67 static bool lm32_in_small_data_p (const_tree);
68 static void lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum,
69 enum machine_mode mode, tree type,
70 int *pretend_size, int no_rtl);
71 static bool lm32_rtx_costs (rtx x, int code, int outer_code, int *total,
73 static bool lm32_can_eliminate (const int, const int);
75 lm32_legitimate_address_p (enum machine_mode mode, rtx x, bool strict);
76 static HOST_WIDE_INT lm32_compute_frame_size (int size);
78 #undef TARGET_ADDRESS_COST
79 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
80 #undef TARGET_RTX_COSTS
81 #define TARGET_RTX_COSTS lm32_rtx_costs
82 #undef TARGET_IN_SMALL_DATA_P
83 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
84 #undef TARGET_PROMOTE_FUNCTION_MODE
85 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
86 #undef TARGET_SETUP_INCOMING_VARARGS
87 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
88 #undef TARGET_PROMOTE_PROTOTYPES
89 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
90 #undef TARGET_MIN_ANCHOR_OFFSET
91 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
92 #undef TARGET_MAX_ANCHOR_OFFSET
93 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
94 #undef TARGET_CAN_ELIMINATE
95 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
96 #undef TARGET_LEGITIMATE_ADDRESS_P
97 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
99 struct gcc_target targetm = TARGET_INITIALIZER;
101 /* Current frame information calculated by lm32_compute_frame_size. */
102 static struct lm32_frame_info current_frame_info;
104 /* Return non-zero if the given return type should be returned in memory. */
107 lm32_return_in_memory (tree type)
111 if (!AGGREGATE_TYPE_P (type))
113 /* All simple types are returned in registers. */
117 size = int_size_in_bytes (type);
118 if (size >= 0 && size <= UNITS_PER_WORD)
120 /* If it can fit in one register. */
127 /* Generate an emit a word sized add instruction. */
130 emit_add (rtx dest, rtx src0, rtx src1)
133 insn = emit_insn (gen_addsi3 (dest, src0, src1));
137 /* Generate the code to compare (and possibly branch) two integer values
138 TEST_CODE is the comparison code we are trying to emulate
139 (or implement directly)
140 RESULT is where to store the result of the comparison,
141 or null to emit a branch
142 CMP0 CMP1 are the two comparison operands
143 DESTINATION is the destination of the branch, or null to only compare
147 gen_int_relational (enum rtx_code code,
153 enum machine_mode mode;
156 mode = GET_MODE (cmp0);
157 if (mode == VOIDmode)
158 mode = GET_MODE (cmp1);
160 /* Is this a branch or compare. */
161 branch_p = (destination != 0);
163 /* Instruction set doesn't support LE or LT, so swap operands and use
171 code = swap_condition (code);
184 /* Operands must be in registers. */
185 if (!register_operand (cmp0, mode))
186 cmp0 = force_reg (mode, cmp0);
187 if (!register_operand (cmp1, mode))
188 cmp1 = force_reg (mode, cmp1);
190 /* Generate conditional branch instruction. */
191 rtx cond = gen_rtx_fmt_ee (code, mode, cmp0, cmp1);
192 rtx label = gen_rtx_LABEL_REF (VOIDmode, destination);
193 insn = gen_rtx_SET (VOIDmode, pc_rtx,
194 gen_rtx_IF_THEN_ELSE (VOIDmode,
195 cond, label, pc_rtx));
196 emit_jump_insn (insn);
200 /* We can't have const_ints in cmp0, other than 0. */
201 if ((GET_CODE (cmp0) == CONST_INT) && (INTVAL (cmp0) != 0))
202 cmp0 = force_reg (mode, cmp0);
204 /* If the comparison is against an int not in legal range
205 move it into a register. */
206 if (GET_CODE (cmp1) == CONST_INT)
216 if (!satisfies_constraint_K (cmp1))
217 cmp1 = force_reg (mode, cmp1);
223 if (!satisfies_constraint_L (cmp1))
224 cmp1 = force_reg (mode, cmp1);
231 /* Generate compare instruction. */
232 emit_move_insn (result, gen_rtx_fmt_ee (code, mode, cmp0, cmp1));
236 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
237 and OPERAND[3]. Store the result in OPERANDS[0]. */
240 lm32_expand_scc (rtx operands[])
242 rtx target = operands[0];
243 enum rtx_code code = GET_CODE (operands[1]);
244 rtx op0 = operands[2];
245 rtx op1 = operands[3];
247 gen_int_relational (code, target, op0, op1, NULL_RTX);
250 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
251 CODE and jump to OPERANDS[3] if the condition holds. */
254 lm32_expand_conditional_branch (rtx operands[])
256 enum rtx_code code = GET_CODE (operands[0]);
257 rtx op0 = operands[1];
258 rtx op1 = operands[2];
259 rtx destination = operands[3];
261 gen_int_relational (code, NULL_RTX, op0, op1, destination);
264 /* Generate and emit RTL to save or restore callee save registers. */
266 expand_save_restore (struct lm32_frame_info *info, int op)
268 unsigned int reg_save_mask = info->reg_save_mask;
270 HOST_WIDE_INT offset;
273 /* Callee saves are below locals and above outgoing arguments. */
274 offset = info->args_size + info->callee_size;
275 for (regno = 0; regno <= 31; regno++)
277 if ((reg_save_mask & (1 << regno)) != 0)
282 offset_rtx = GEN_INT (offset);
283 if (satisfies_constraint_K (offset_rtx))
285 mem = gen_rtx_MEM (word_mode,
292 /* r10 is caller saved so it can be used as a temp reg. */
295 r10 = gen_rtx_REG (word_mode, 10);
296 insn = emit_move_insn (r10, offset_rtx);
298 RTX_FRAME_RELATED_P (insn) = 1;
299 insn = emit_add (r10, r10, stack_pointer_rtx);
301 RTX_FRAME_RELATED_P (insn) = 1;
302 mem = gen_rtx_MEM (word_mode, r10);
306 insn = emit_move_insn (mem, gen_rtx_REG (word_mode, regno));
308 insn = emit_move_insn (gen_rtx_REG (word_mode, regno), mem);
310 /* only prologue instructions which set the sp fp or save a
311 register should be marked as frame related. */
313 RTX_FRAME_RELATED_P (insn) = 1;
314 offset -= UNITS_PER_WORD;
320 stack_adjust (HOST_WIDE_INT amount)
324 if (!IN_RANGE (amount, -32776, 32768))
326 /* r10 is caller saved so it can be used as a temp reg. */
328 r10 = gen_rtx_REG (word_mode, 10);
329 insn = emit_move_insn (r10, GEN_INT (amount));
331 RTX_FRAME_RELATED_P (insn) = 1;
332 insn = emit_add (stack_pointer_rtx, stack_pointer_rtx, r10);
334 RTX_FRAME_RELATED_P (insn) = 1;
338 insn = emit_add (stack_pointer_rtx,
339 stack_pointer_rtx, GEN_INT (amount));
341 RTX_FRAME_RELATED_P (insn) = 1;
346 /* Create and emit instructions for a functions prologue. */
348 lm32_expand_prologue (void)
352 lm32_compute_frame_size (get_frame_size ());
354 if (current_frame_info.total_size > 0)
356 /* Add space on stack new frame. */
357 stack_adjust (-current_frame_info.total_size);
359 /* Save callee save registers. */
360 if (current_frame_info.reg_save_mask != 0)
361 expand_save_restore (¤t_frame_info, 0);
363 /* Setup frame pointer if it's needed. */
364 if (frame_pointer_needed == 1)
366 /* Load offset - Don't use total_size, as that includes pretend_size,
367 which isn't part of this frame? */
369 emit_move_insn (frame_pointer_rtx,
370 GEN_INT (current_frame_info.args_size +
371 current_frame_info.callee_size +
372 current_frame_info.locals_size));
373 RTX_FRAME_RELATED_P (insn) = 1;
376 insn = emit_add (frame_pointer_rtx,
377 frame_pointer_rtx, stack_pointer_rtx);
378 RTX_FRAME_RELATED_P (insn) = 1;
381 /* Prevent prologue from being scheduled into function body. */
382 emit_insn (gen_blockage ());
386 /* Create an emit instructions for a functions epilogue. */
388 lm32_expand_epilogue (void)
390 rtx ra_rtx = gen_rtx_REG (Pmode, RA_REGNUM);
392 lm32_compute_frame_size (get_frame_size ());
394 if (current_frame_info.total_size > 0)
396 /* Prevent stack code from being reordered. */
397 emit_insn (gen_blockage ());
399 /* Restore callee save registers. */
400 if (current_frame_info.reg_save_mask != 0)
401 expand_save_restore (¤t_frame_info, 1);
403 /* Deallocate stack. */
404 stack_adjust (current_frame_info.total_size);
406 /* Return to calling function. */
407 emit_jump_insn (gen_return_internal (ra_rtx));
411 /* Return to calling function. */
412 emit_jump_insn (gen_return_internal (ra_rtx));
416 /* Return the bytes needed to compute the frame pointer from the current
419 lm32_compute_frame_size (int size)
422 HOST_WIDE_INT total_size, locals_size, args_size, pretend_size, callee_size;
423 unsigned int reg_save_mask;
426 args_size = crtl->outgoing_args_size;
427 pretend_size = crtl->args.pretend_args_size;
431 /* Build mask that actually determines which regsiters we save
432 and calculate size required to store them in the stack. */
433 for (regno = 1; regno < SP_REGNUM; regno++)
435 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
437 reg_save_mask |= 1 << regno;
438 callee_size += UNITS_PER_WORD;
441 if (df_regs_ever_live_p (RA_REGNUM) || !current_function_is_leaf
444 reg_save_mask |= 1 << RA_REGNUM;
445 callee_size += UNITS_PER_WORD;
447 if (!(reg_save_mask & (1 << FP_REGNUM)) && frame_pointer_needed)
449 reg_save_mask |= 1 << FP_REGNUM;
450 callee_size += UNITS_PER_WORD;
453 /* Compute total frame size. */
454 total_size = pretend_size + args_size + locals_size + callee_size;
456 /* Align frame to appropriate boundary. */
457 total_size = (total_size + 3) & ~3;
459 /* Save computed information. */
460 current_frame_info.total_size = total_size;
461 current_frame_info.callee_size = callee_size;
462 current_frame_info.pretend_size = pretend_size;
463 current_frame_info.locals_size = locals_size;
464 current_frame_info.args_size = args_size;
465 current_frame_info.reg_save_mask = reg_save_mask;
471 lm32_print_operand (FILE * file, rtx op, int letter)
475 code = GET_CODE (op);
477 if (code == SIGN_EXTEND)
478 op = XEXP (op, 0), code = GET_CODE (op);
479 else if (code == REG || code == SUBREG)
486 regnum = true_regnum (op);
488 fprintf (file, "%s", reg_names[regnum]);
490 else if (code == HIGH)
491 output_addr_const (file, XEXP (op, 0));
492 else if (code == MEM)
493 output_address (XEXP (op, 0));
494 else if (letter == 'z' && GET_CODE (op) == CONST_INT && INTVAL (op) == 0)
495 fprintf (file, "%s", reg_names[0]);
496 else if (GET_CODE (op) == CONST_DOUBLE)
498 if ((CONST_DOUBLE_LOW (op) != 0) || (CONST_DOUBLE_HIGH (op) != 0))
499 output_operand_lossage ("Only 0.0 can be loaded as an immediate");
504 fprintf (file, "e ");
506 fprintf (file, "ne ");
508 fprintf (file, "g ");
509 else if (code == GTU)
510 fprintf (file, "gu ");
512 fprintf (file, "l ");
513 else if (code == LTU)
514 fprintf (file, "lu ");
516 fprintf (file, "ge ");
517 else if (code == GEU)
518 fprintf (file, "geu");
520 fprintf (file, "le ");
521 else if (code == LEU)
522 fprintf (file, "leu");
524 output_addr_const (file, op);
527 /* A C compound statement to output to stdio stream STREAM the
528 assembler syntax for an instruction operand that is a memory
529 reference whose address is ADDR. ADDR is an RTL expression.
531 On some machines, the syntax for a symbolic address depends on
532 the section that the address refers to. On these machines,
533 define the macro `ENCODE_SECTION_INFO' to store the information
534 into the `symbol_ref', and then check for it here. */
537 lm32_print_operand_address (FILE * file, rtx addr)
539 switch (GET_CODE (addr))
542 fprintf (file, "(%s+0)", reg_names[REGNO (addr)]);
546 output_address (XEXP (addr, 0));
551 rtx arg0 = XEXP (addr, 0);
552 rtx arg1 = XEXP (addr, 1);
554 if (GET_CODE (arg0) == REG && CONSTANT_P (arg1))
556 if (GET_CODE (arg1) == CONST_INT)
557 fprintf (file, "(%s+%ld)", reg_names[REGNO (arg0)],
561 fprintf (file, "(%s+", reg_names[REGNO (arg0)]);
562 output_addr_const (file, arg1);
566 else if (CONSTANT_P (arg0) && CONSTANT_P (arg1))
567 output_addr_const (file, addr);
569 fatal_insn ("bad operand", addr);
574 if (SYMBOL_REF_SMALL_P (addr))
576 fprintf (file, "gp(");
577 output_addr_const (file, addr);
581 fatal_insn ("can't use non gp relative absolute address", addr);
585 fatal_insn ("invalid addressing mode", addr);
590 /* Determine where to put an argument to a function.
591 Value is zero to push the argument on the stack,
592 or a hard register in which to store the argument.
594 MODE is the argument's machine mode.
595 TYPE is the data type of the argument (as a tree).
596 This is null for libcalls where that information may
598 CUM is a variable of type CUMULATIVE_ARGS which gives info about
599 the preceding args and about the function being called.
600 NAMED is nonzero if this argument is a named parameter
601 (otherwise it is an extra parameter matching an ellipsis). */
604 lm32_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
605 tree type, int named)
607 if (mode == VOIDmode)
608 /* Compute operand 2 of the call insn. */
611 if (targetm.calls.must_pass_in_stack (mode, type))
614 if (!named || (cum + LM32_NUM_REGS2 (mode, type) > LM32_NUM_ARG_REGS))
617 return gen_rtx_REG (mode, cum + LM32_FIRST_ARG_REG);
621 lm32_compute_initial_elimination_offset (int from, int to)
623 HOST_WIDE_INT offset = 0;
627 case ARG_POINTER_REGNUM:
630 case FRAME_POINTER_REGNUM:
633 case STACK_POINTER_REGNUM:
635 lm32_compute_frame_size (get_frame_size ()) -
636 current_frame_info.pretend_size;
650 lm32_setup_incoming_varargs (CUMULATIVE_ARGS * cum, enum machine_mode mode,
651 tree type, int *pretend_size, int no_rtl)
657 fntype = TREE_TYPE (current_function_decl);
658 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
659 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
663 first_anon_arg = *cum + LM32_FIRST_ARG_REG;
666 /* this is the common case, we have been passed details setup
667 for the last named argument, we want to skip over the
668 registers, if any used in passing this named paramter in
669 order to determine which is the first registers used to pass
670 anonymous arguments. */
674 size = int_size_in_bytes (type);
676 size = GET_MODE_SIZE (mode);
679 *cum + LM32_FIRST_ARG_REG +
680 ((size + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
683 if ((first_anon_arg < (LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS)) && !no_rtl)
685 int first_reg_offset = first_anon_arg;
686 int size = LM32_FIRST_ARG_REG + LM32_NUM_ARG_REGS - first_anon_arg;
689 regblock = gen_rtx_MEM (BLKmode,
690 plus_constant (arg_pointer_rtx,
691 FIRST_PARM_OFFSET (0)));
692 move_block_from_reg (first_reg_offset, regblock, size);
694 *pretend_size = size * UNITS_PER_WORD;
698 /* Override command line options. */
700 lm32_override_options (void)
702 /* We must have sign-extend enabled if barrel-shift isn't. */
703 if (!TARGET_BARREL_SHIFT_ENABLED && !TARGET_SIGN_EXTEND_ENABLED)
704 target_flags |= MASK_SIGN_EXTEND_ENABLED;
707 /* Return nonzero if this function is known to have a null epilogue.
708 This allows the optimizer to omit jumps to jumps if no stack
711 lm32_can_use_return (void)
713 if (!reload_completed)
716 if (df_regs_ever_live_p (RA_REGNUM) || crtl->profile)
719 if (lm32_compute_frame_size (get_frame_size ()) != 0)
725 /* Support function to determine the return address of the function
726 'count' frames back up the stack. */
728 lm32_return_addr_rtx (int count, rtx frame)
733 if (!df_regs_ever_live_p (RA_REGNUM))
734 r = gen_rtx_REG (Pmode, RA_REGNUM);
737 r = gen_rtx_MEM (Pmode,
738 gen_rtx_PLUS (Pmode, frame,
739 GEN_INT (-2 * UNITS_PER_WORD)));
740 set_mem_alias_set (r, get_frame_alias_set ());
743 else if (flag_omit_frame_pointer)
747 r = gen_rtx_MEM (Pmode,
748 gen_rtx_PLUS (Pmode, frame,
749 GEN_INT (-2 * UNITS_PER_WORD)));
750 set_mem_alias_set (r, get_frame_alias_set ());
755 /* Return true if EXP should be placed in the small data section. */
758 lm32_in_small_data_p (const_tree exp)
760 /* We want to merge strings, so we never consider them small data. */
761 if (TREE_CODE (exp) == STRING_CST)
764 /* Functions are never in the small data area. Duh. */
765 if (TREE_CODE (exp) == FUNCTION_DECL)
768 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
770 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp));
771 if (strcmp (section, ".sdata") == 0 || strcmp (section, ".sbss") == 0)
776 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
778 /* If this is an incomplete type with size 0, then we can't put it
779 in sdata because it might be too big when completed. */
780 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
787 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
788 Assume that the areas do not overlap. */
791 lm32_block_move_inline (rtx dest, rtx src, HOST_WIDE_INT length,
792 HOST_WIDE_INT alignment)
794 HOST_WIDE_INT offset, delta;
795 unsigned HOST_WIDE_INT bits;
797 enum machine_mode mode;
800 /* Work out how many bits to move at a time. */
814 mode = mode_for_size (bits, MODE_INT, 0);
815 delta = bits / BITS_PER_UNIT;
817 /* Allocate a buffer for the temporary registers. */
818 regs = alloca (sizeof (rtx) * length / delta);
820 /* Load as many BITS-sized chunks as possible. */
821 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
823 regs[i] = gen_reg_rtx (mode);
824 emit_move_insn (regs[i], adjust_address (src, mode, offset));
827 /* Copy the chunks to the destination. */
828 for (offset = 0, i = 0; offset + delta <= length; offset += delta, i++)
829 emit_move_insn (adjust_address (dest, mode, offset), regs[i]);
831 /* Mop up any left-over bytes. */
834 src = adjust_address (src, BLKmode, offset);
835 dest = adjust_address (dest, BLKmode, offset);
836 move_by_pieces (dest, src, length - offset,
837 MIN (MEM_ALIGN (src), MEM_ALIGN (dest)), 0);
841 /* Expand string/block move operations.
843 operands[0] is the pointer to the destination.
844 operands[1] is the pointer to the source.
845 operands[2] is the number of bytes to move.
846 operands[3] is the alignment. */
849 lm32_expand_block_move (rtx * operands)
851 if ((GET_CODE (operands[2]) == CONST_INT) && (INTVAL (operands[2]) <= 32))
853 lm32_block_move_inline (operands[0], operands[1], INTVAL (operands[2]),
854 INTVAL (operands[3]));
860 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
861 isn't protected by a PIC unspec. */
863 nonpic_symbol_mentioned_p (rtx x)
868 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF
869 || GET_CODE (x) == PC)
872 /* We don't want to look into the possible MEM location of a
873 CONST_DOUBLE, since we're not going to use it, in general. */
874 if (GET_CODE (x) == CONST_DOUBLE)
877 if (GET_CODE (x) == UNSPEC)
880 fmt = GET_RTX_FORMAT (GET_CODE (x));
881 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
887 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
888 if (nonpic_symbol_mentioned_p (XVECEXP (x, i, j)))
891 else if (fmt[i] == 'e' && nonpic_symbol_mentioned_p (XEXP (x, i)))
898 /* Compute a (partial) cost for rtx X. Return true if the complete
899 cost has been computed, and false if subexpressions should be
900 scanned. In either case, *TOTAL contains the cost result. */
903 lm32_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed)
905 enum machine_mode mode = GET_MODE (x);
908 const int arithmetic_latency = 1;
909 const int shift_latency = 1;
910 const int compare_latency = 2;
911 const int multiply_latency = 3;
912 const int load_latency = 3;
913 const int libcall_size_cost = 5;
915 /* Determine if we can handle the given mode size in a single instruction. */
916 small_mode = (mode == QImode) || (mode == HImode) || (mode == SImode);
929 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode));
932 COSTS_N_INSNS (arithmetic_latency + (LM32_NUM_REGS (mode) - 1));
939 *total = COSTS_N_INSNS (1);
941 *total = COSTS_N_INSNS (compare_latency);
945 /* FIXME. Guessing here. */
946 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * (2 + 3) / 2);
953 if (TARGET_BARREL_SHIFT_ENABLED && small_mode)
956 *total = COSTS_N_INSNS (1);
958 *total = COSTS_N_INSNS (shift_latency);
960 else if (TARGET_BARREL_SHIFT_ENABLED)
962 /* FIXME: Guessing here. */
963 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) * 4);
965 else if (small_mode && GET_CODE (XEXP (x, 1)) == CONST_INT)
967 *total = COSTS_N_INSNS (INTVAL (XEXP (x, 1)));
973 *total = COSTS_N_INSNS (libcall_size_cost);
975 *total = COSTS_N_INSNS (100);
980 if (TARGET_MULTIPLY_ENABLED && small_mode)
983 *total = COSTS_N_INSNS (1);
985 *total = COSTS_N_INSNS (multiply_latency);
991 *total = COSTS_N_INSNS (libcall_size_cost);
993 *total = COSTS_N_INSNS (100);
1001 if (TARGET_DIVIDE_ENABLED && small_mode)
1004 *total = COSTS_N_INSNS (1);
1007 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1010 unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
1017 if (IN_RANGE (i, 0, 65536))
1018 *total = COSTS_N_INSNS (1 + 1 + cycles);
1020 *total = COSTS_N_INSNS (2 + 1 + cycles);
1023 else if (GET_CODE (XEXP (x, 1)) == REG)
1025 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1030 *total = COSTS_N_INSNS (1 + GET_MODE_SIZE (mode) / 2);
1039 *total = COSTS_N_INSNS (libcall_size_cost);
1041 *total = COSTS_N_INSNS (100);
1048 *total = COSTS_N_INSNS (1);
1050 *total = COSTS_N_INSNS (arithmetic_latency);
1054 if (MEM_P (XEXP (x, 0)))
1055 *total = COSTS_N_INSNS (0);
1056 else if (small_mode)
1059 *total = COSTS_N_INSNS (1);
1061 *total = COSTS_N_INSNS (arithmetic_latency);
1064 *total = COSTS_N_INSNS (LM32_NUM_REGS (mode) / 2);
1073 *total = COSTS_N_INSNS (0);
1084 if (satisfies_constraint_L (x))
1085 *total = COSTS_N_INSNS (0);
1087 *total = COSTS_N_INSNS (2);
1094 if (satisfies_constraint_K (x))
1095 *total = COSTS_N_INSNS (0);
1097 *total = COSTS_N_INSNS (2);
1101 if (TARGET_MULTIPLY_ENABLED)
1103 if (satisfies_constraint_K (x))
1104 *total = COSTS_N_INSNS (0);
1106 *total = COSTS_N_INSNS (2);
1112 if (satisfies_constraint_K (x))
1113 *total = COSTS_N_INSNS (1);
1115 *total = COSTS_N_INSNS (2);
1126 *total = COSTS_N_INSNS (0);
1133 *total = COSTS_N_INSNS (0);
1142 *total = COSTS_N_INSNS (2);
1146 *total = COSTS_N_INSNS (1);
1151 *total = COSTS_N_INSNS (1);
1153 *total = COSTS_N_INSNS (load_latency);
1161 /* Implemenent TARGET_CAN_ELIMINATE. */
1164 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
1166 return (to == STACK_POINTER_REGNUM && frame_pointer_needed) ? false : true;
1169 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1172 lm32_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x, bool strict)
1175 if (strict && REG_P (x) && STRICT_REG_OK_FOR_BASE_P (x))
1177 if (!strict && REG_P (x) && NONSTRICT_REG_OK_FOR_BASE_P (x))
1181 if (GET_CODE (x) == PLUS
1182 && REG_P (XEXP (x, 0))
1183 && ((strict && STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0)))
1184 || (!strict && NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))))
1185 && GET_CODE (XEXP (x, 1)) == CONST_INT
1186 && satisfies_constraint_K (XEXP ((x), 1)))
1190 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_SMALL_P (x))
1196 /* Check a move is not memory to memory. */
1199 lm32_move_ok (enum machine_mode mode, rtx operands[2]) {
1200 if (memory_operand (operands[0], mode))
1201 return register_or_zero_operand (operands[1], mode);
1205 /* Implement LEGITIMATE_CONSTANT_P. */
1208 lm32_legitimate_constant_p (rtx x)
1210 /* 32-bit addresses require multiple instructions. */
1211 if (!flag_pic && reloc_operand (x, GET_MODE (x)))