1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57 static bool s390_assemble_integer (rtx, unsigned int, int);
58 static void s390_select_rtx_section (enum machine_mode, rtx,
59 unsigned HOST_WIDE_INT);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static void s390_init_builtins (void);
64 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
65 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
67 static enum attr_type s390_safe_attr_type (rtx);
69 static int s390_adjust_cost (rtx, rtx, rtx, int);
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_use_dfa_pipeline_interface (void);
73 static int s390_first_cycle_multipass_dfa_lookahead (void);
74 static int s390_sched_reorder2 (FILE *, int, rtx *, int *, int);
75 static bool s390_rtx_costs (rtx, int, int, int *);
76 static int s390_address_cost (rtx);
77 static void s390_reorg (void);
78 static bool s390_valid_pointer_mode (enum machine_mode);
80 #undef TARGET_ASM_ALIGNED_HI_OP
81 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
82 #undef TARGET_ASM_ALIGNED_DI_OP
83 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
84 #undef TARGET_ASM_INTEGER
85 #define TARGET_ASM_INTEGER s390_assemble_integer
87 #undef TARGET_ASM_OPEN_PAREN
88 #define TARGET_ASM_OPEN_PAREN ""
90 #undef TARGET_ASM_CLOSE_PAREN
91 #define TARGET_ASM_CLOSE_PAREN ""
93 #undef TARGET_ASM_SELECT_RTX_SECTION
94 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
96 #undef TARGET_ENCODE_SECTION_INFO
97 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
100 #undef TARGET_HAVE_TLS
101 #define TARGET_HAVE_TLS true
103 #undef TARGET_CANNOT_FORCE_CONST_MEM
104 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
106 #undef TARGET_DELEGITIMIZE_ADDRESS
107 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
109 #undef TARGET_INIT_BUILTINS
110 #define TARGET_INIT_BUILTINS s390_init_builtins
111 #undef TARGET_EXPAND_BUILTIN
112 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
114 #undef TARGET_ASM_OUTPUT_MI_THUNK
115 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
116 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
117 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
119 #undef TARGET_SCHED_ADJUST_COST
120 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
121 #undef TARGET_SCHED_ADJUST_PRIORITY
122 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
123 #undef TARGET_SCHED_ISSUE_RATE
124 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
125 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
126 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
127 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
128 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
129 #undef TARGET_SCHED_REORDER2
130 #define TARGET_SCHED_REORDER2 s390_sched_reorder2
132 #undef TARGET_RTX_COSTS
133 #define TARGET_RTX_COSTS s390_rtx_costs
134 #undef TARGET_ADDRESS_COST
135 #define TARGET_ADDRESS_COST s390_address_cost
137 #undef TARGET_MACHINE_DEPENDENT_REORG
138 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
140 #undef TARGET_VALID_POINTER_MODE
141 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
143 struct gcc_target targetm = TARGET_INITIALIZER;
145 extern int reload_completed;
147 /* The alias set for prologue/epilogue register save/restore. */
148 static int s390_sr_alias_set = 0;
150 /* Save information from a "cmpxx" operation until the branch or scc is
152 rtx s390_compare_op0, s390_compare_op1;
154 /* Structure used to hold the components of a S/390 memory
155 address. A legitimate address on S/390 is of the general
157 base + index + displacement
158 where any of the components is optional.
160 base and index are registers of the class ADDR_REGS,
161 displacement is an unsigned 12-bit immediate constant. */
171 /* Which cpu are we tuning for. */
172 enum processor_type s390_tune;
173 enum processor_flags s390_tune_flags;
174 /* Which instruction set architecture to use. */
175 enum processor_type s390_arch;
176 enum processor_flags s390_arch_flags;
178 /* Strings to hold which cpu and instruction set architecture to use. */
179 const char *s390_tune_string; /* for -mtune=<xxx> */
180 const char *s390_arch_string; /* for -march=<xxx> */
182 /* Define the structure for the machine field in struct function. */
184 struct machine_function GTY(())
186 /* Label of start of initial literal pool. */
187 rtx literal_pool_label;
189 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
192 /* Set if return address needs to be saved because the current
193 function uses __builtin_return_addr (0). */
194 bool save_return_addr_p;
196 /* Number of first and last gpr to be saved, restored. */
198 int first_restore_gpr;
201 /* Size of stack frame. */
202 HOST_WIDE_INT frame_size;
204 /* Some local-dynamic TLS symbol name. */
205 const char *some_ld_name;
208 static int s390_match_ccmode_set (rtx, enum machine_mode);
209 static int s390_branch_condition_mask (rtx);
210 static const char *s390_branch_condition_mnemonic (rtx, int);
211 static int check_mode (rtx, enum machine_mode *);
212 static int general_s_operand (rtx, enum machine_mode, int);
213 static int s390_short_displacement (rtx);
214 static int s390_decompose_address (rtx, struct s390_address *);
215 static rtx get_thread_pointer (void);
216 static rtx legitimize_tls_address (rtx, rtx);
217 static const char *get_some_local_dynamic_name (void);
218 static int get_some_local_dynamic_name_1 (rtx *, void *);
219 static int reg_used_in_mem_p (int, rtx);
220 static int addr_generation_dependency_p (rtx, rtx);
221 static int s390_split_branches (rtx, bool *);
222 static void find_constant_pool_ref (rtx, rtx *);
223 static void replace_constant_pool_ref (rtx *, rtx, rtx);
224 static rtx find_ltrel_base (rtx);
225 static void replace_ltrel_base (rtx *, rtx);
226 static void s390_optimize_prolog (int);
227 static int find_unused_clobbered_reg (void);
228 static void s390_frame_info (void);
229 static rtx save_fpr (rtx, int, int);
230 static rtx restore_fpr (rtx, int, int);
231 static rtx save_gprs (rtx, int, int, int);
232 static rtx restore_gprs (rtx, int, int, int);
233 static int s390_function_arg_size (enum machine_mode, tree);
234 static bool s390_function_arg_float (enum machine_mode, tree);
235 static struct machine_function * s390_init_machine_status (void);
237 /* Check whether integer displacement is in range. */
238 #define DISP_IN_RANGE(d) \
239 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
240 : ((d) >= 0 && (d) <= 4095))
242 /* Return true if SET either doesn't set the CC register, or else
243 the source and destination have matching CC modes and that
244 CC mode is at least as constrained as REQ_MODE. */
247 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
249 enum machine_mode set_mode;
251 if (GET_CODE (set) != SET)
254 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
257 set_mode = GET_MODE (SET_DEST (set));
270 if (req_mode != set_mode)
275 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
276 && req_mode != CCSRmode && req_mode != CCURmode)
282 if (req_mode != CCAmode)
290 return (GET_MODE (SET_SRC (set)) == set_mode);
293 /* Return true if every SET in INSN that sets the CC register
294 has source and destination with matching CC modes and that
295 CC mode is at least as constrained as REQ_MODE.
296 If REQ_MODE is VOIDmode, always return false. */
299 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
303 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
304 if (req_mode == VOIDmode)
307 if (GET_CODE (PATTERN (insn)) == SET)
308 return s390_match_ccmode_set (PATTERN (insn), req_mode);
310 if (GET_CODE (PATTERN (insn)) == PARALLEL)
311 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
313 rtx set = XVECEXP (PATTERN (insn), 0, i);
314 if (GET_CODE (set) == SET)
315 if (!s390_match_ccmode_set (set, req_mode))
322 /* If a test-under-mask instruction can be used to implement
323 (compare (and ... OP1) OP2), return the CC mode required
324 to do that. Otherwise, return VOIDmode.
325 MIXED is true if the instruction can distinguish between
326 CC1 and CC2 for mixed selected bits (TMxx), it is false
327 if the instruction cannot (TM). */
330 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
334 /* ??? Fixme: should work on CONST_DOUBLE as well. */
335 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
338 /* Selected bits all zero: CC0. */
339 if (INTVAL (op2) == 0)
342 /* Selected bits all one: CC3. */
343 if (INTVAL (op2) == INTVAL (op1))
346 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
349 bit1 = exact_log2 (INTVAL (op2));
350 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
351 if (bit0 != -1 && bit1 != -1)
352 return bit0 > bit1 ? CCT1mode : CCT2mode;
358 /* Given a comparison code OP (EQ, NE, etc.) and the operands
359 OP0 and OP1 of a COMPARE, return the mode to be used for the
363 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
369 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
370 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
372 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
373 || GET_CODE (op1) == NEG)
374 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
377 if (GET_CODE (op0) == AND)
379 /* Check whether we can potentially do it via TM. */
380 enum machine_mode ccmode;
381 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
382 if (ccmode != VOIDmode)
384 /* Relax CCTmode to CCZmode to allow fall-back to AND
385 if that turns out to be beneficial. */
386 return ccmode == CCTmode ? CCZmode : ccmode;
390 if (register_operand (op0, HImode)
391 && GET_CODE (op1) == CONST_INT
392 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
394 if (register_operand (op0, QImode)
395 && GET_CODE (op1) == CONST_INT
396 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
405 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
406 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
408 if (INTVAL (XEXP((op0), 1)) < 0)
421 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
422 && GET_CODE (op1) != CONST_INT)
428 if (GET_CODE (op0) == PLUS
429 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
432 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
433 && GET_CODE (op1) != CONST_INT)
439 if (GET_CODE (op0) == MINUS
440 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
443 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
444 && GET_CODE (op1) != CONST_INT)
453 /* Return branch condition mask to implement a branch
454 specified by CODE. */
457 s390_branch_condition_mask (rtx code)
459 const int CC0 = 1 << 3;
460 const int CC1 = 1 << 2;
461 const int CC2 = 1 << 1;
462 const int CC3 = 1 << 0;
464 if (GET_CODE (XEXP (code, 0)) != REG
465 || REGNO (XEXP (code, 0)) != CC_REGNUM
466 || XEXP (code, 1) != const0_rtx)
469 switch (GET_MODE (XEXP (code, 0)))
472 switch (GET_CODE (code))
475 case NE: return CC1 | CC2 | CC3;
482 switch (GET_CODE (code))
485 case NE: return CC0 | CC2 | CC3;
492 switch (GET_CODE (code))
495 case NE: return CC0 | CC1 | CC3;
502 switch (GET_CODE (code))
505 case NE: return CC0 | CC1 | CC2;
512 switch (GET_CODE (code))
514 case EQ: return CC0 | CC2;
515 case NE: return CC1 | CC3;
522 switch (GET_CODE (code))
524 case LTU: return CC2 | CC3; /* carry */
525 case GEU: return CC0 | CC1; /* no carry */
532 switch (GET_CODE (code))
534 case GTU: return CC0 | CC1; /* borrow */
535 case LEU: return CC2 | CC3; /* no borrow */
542 switch (GET_CODE (code))
545 case NE: return CC1 | CC2 | CC3;
546 case LTU: return CC1;
547 case GTU: return CC2;
548 case LEU: return CC0 | CC1;
549 case GEU: return CC0 | CC2;
556 switch (GET_CODE (code))
559 case NE: return CC2 | CC1 | CC3;
560 case LTU: return CC2;
561 case GTU: return CC1;
562 case LEU: return CC0 | CC2;
563 case GEU: return CC0 | CC1;
570 switch (GET_CODE (code))
573 case NE: return CC1 | CC2 | CC3;
574 case LT: return CC1 | CC3;
576 case LE: return CC0 | CC1 | CC3;
577 case GE: return CC0 | CC2;
584 switch (GET_CODE (code))
587 case NE: return CC1 | CC2 | CC3;
589 case GT: return CC2 | CC3;
590 case LE: return CC0 | CC1;
591 case GE: return CC0 | CC2 | CC3;
598 switch (GET_CODE (code))
601 case NE: return CC1 | CC2 | CC3;
604 case LE: return CC0 | CC1;
605 case GE: return CC0 | CC2;
606 case UNORDERED: return CC3;
607 case ORDERED: return CC0 | CC1 | CC2;
608 case UNEQ: return CC0 | CC3;
609 case UNLT: return CC1 | CC3;
610 case UNGT: return CC2 | CC3;
611 case UNLE: return CC0 | CC1 | CC3;
612 case UNGE: return CC0 | CC2 | CC3;
613 case LTGT: return CC1 | CC2;
620 switch (GET_CODE (code))
623 case NE: return CC2 | CC1 | CC3;
626 case LE: return CC0 | CC2;
627 case GE: return CC0 | CC1;
628 case UNORDERED: return CC3;
629 case ORDERED: return CC0 | CC2 | CC1;
630 case UNEQ: return CC0 | CC3;
631 case UNLT: return CC2 | CC3;
632 case UNGT: return CC1 | CC3;
633 case UNLE: return CC0 | CC2 | CC3;
634 case UNGE: return CC0 | CC1 | CC3;
635 case LTGT: return CC2 | CC1;
646 /* If INV is false, return assembler mnemonic string to implement
647 a branch specified by CODE. If INV is true, return mnemonic
648 for the corresponding inverted branch. */
651 s390_branch_condition_mnemonic (rtx code, int inv)
653 static const char *const mnemonic[16] =
655 NULL, "o", "h", "nle",
656 "l", "nhe", "lh", "ne",
657 "e", "nlh", "he", "nl",
658 "le", "nh", "no", NULL
661 int mask = s390_branch_condition_mask (code);
666 if (mask < 1 || mask > 14)
669 return mnemonic[mask];
672 /* If OP is an integer constant of mode MODE with exactly one
673 HImode subpart unequal to DEF, return the number of that
674 subpart. As a special case, all HImode subparts of OP are
675 equal to DEF, return zero. Otherwise, return -1. */
678 s390_single_hi (rtx op, enum machine_mode mode, int def)
680 if (GET_CODE (op) == CONST_INT)
682 unsigned HOST_WIDE_INT value = 0;
683 int n_parts = GET_MODE_SIZE (mode) / 2;
686 for (i = 0; i < n_parts; i++)
689 value = (unsigned HOST_WIDE_INT) INTVAL (op);
693 if ((value & 0xffff) != (unsigned)(def & 0xffff))
702 return part == -1 ? 0 : (n_parts - 1 - part);
705 else if (GET_CODE (op) == CONST_DOUBLE
706 && GET_MODE (op) == VOIDmode)
708 unsigned HOST_WIDE_INT value = 0;
709 int n_parts = GET_MODE_SIZE (mode) / 2;
712 for (i = 0; i < n_parts; i++)
715 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
716 else if (i == HOST_BITS_PER_WIDE_INT / 16)
717 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
721 if ((value & 0xffff) != (unsigned)(def & 0xffff))
730 return part == -1 ? 0 : (n_parts - 1 - part);
736 /* Extract the HImode part number PART from integer
737 constant OP of mode MODE. */
740 s390_extract_hi (rtx op, enum machine_mode mode, int part)
742 int n_parts = GET_MODE_SIZE (mode) / 2;
743 if (part < 0 || part >= n_parts)
746 part = n_parts - 1 - part;
748 if (GET_CODE (op) == CONST_INT)
750 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
751 return ((value >> (16 * part)) & 0xffff);
753 else if (GET_CODE (op) == CONST_DOUBLE
754 && GET_MODE (op) == VOIDmode)
756 unsigned HOST_WIDE_INT value;
757 if (part < HOST_BITS_PER_WIDE_INT / 16)
758 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
760 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
761 part -= HOST_BITS_PER_WIDE_INT / 16;
763 return ((value >> (16 * part)) & 0xffff);
769 /* If OP is an integer constant of mode MODE with exactly one
770 QImode subpart unequal to DEF, return the number of that
771 subpart. As a special case, all QImode subparts of OP are
772 equal to DEF, return zero. Otherwise, return -1. */
775 s390_single_qi (rtx op, enum machine_mode mode, int def)
777 if (GET_CODE (op) == CONST_INT)
779 unsigned HOST_WIDE_INT value = 0;
780 int n_parts = GET_MODE_SIZE (mode);
783 for (i = 0; i < n_parts; i++)
786 value = (unsigned HOST_WIDE_INT) INTVAL (op);
790 if ((value & 0xff) != (unsigned)(def & 0xff))
799 return part == -1 ? 0 : (n_parts - 1 - part);
802 else if (GET_CODE (op) == CONST_DOUBLE
803 && GET_MODE (op) == VOIDmode)
805 unsigned HOST_WIDE_INT value = 0;
806 int n_parts = GET_MODE_SIZE (mode);
809 for (i = 0; i < n_parts; i++)
812 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
813 else if (i == HOST_BITS_PER_WIDE_INT / 8)
814 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
818 if ((value & 0xff) != (unsigned)(def & 0xff))
827 return part == -1 ? 0 : (n_parts - 1 - part);
833 /* Extract the QImode part number PART from integer
834 constant OP of mode MODE. */
837 s390_extract_qi (rtx op, enum machine_mode mode, int part)
839 int n_parts = GET_MODE_SIZE (mode);
840 if (part < 0 || part >= n_parts)
843 part = n_parts - 1 - part;
845 if (GET_CODE (op) == CONST_INT)
847 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
848 return ((value >> (8 * part)) & 0xff);
850 else if (GET_CODE (op) == CONST_DOUBLE
851 && GET_MODE (op) == VOIDmode)
853 unsigned HOST_WIDE_INT value;
854 if (part < HOST_BITS_PER_WIDE_INT / 8)
855 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
857 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
858 part -= HOST_BITS_PER_WIDE_INT / 8;
860 return ((value >> (8 * part)) & 0xff);
866 /* Check whether we can (and want to) split a double-word
867 move in mode MODE from SRC to DST into two single-word
868 moves, moving the subword FIRST_SUBWORD first. */
871 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
873 /* Floating point registers cannot be split. */
874 if (FP_REG_P (src) || FP_REG_P (dst))
877 /* We don't need to split if operands are directly accessible. */
878 if (s_operand (src, mode) || s_operand (dst, mode))
881 /* Non-offsettable memory references cannot be split. */
882 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
883 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
886 /* Moving the first subword must not clobber a register
887 needed to move the second subword. */
888 if (register_operand (dst, mode))
890 rtx subreg = operand_subword (dst, first_subword, 0, mode);
891 if (reg_overlap_mentioned_p (subreg, src))
899 /* Change optimizations to be performed, depending on the
902 LEVEL is the optimization level specified; 2 if `-O2' is
903 specified, 1 if `-O' is specified, and 0 if neither is specified.
905 SIZE is nonzero if `-Os' is specified and zero otherwise. */
908 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
910 /* ??? There are apparently still problems with -fcaller-saves. */
911 flag_caller_saves = 0;
913 /* By default, always emit DWARF-2 unwind info. This allows debugging
914 without maintaining a stack frame back-chain. */
915 flag_asynchronous_unwind_tables = 1;
919 override_options (void)
924 const char *const name; /* processor name or nickname. */
925 const enum processor_type processor;
926 const enum processor_flags flags;
928 const processor_alias_table[] =
930 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
931 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
932 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
933 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
934 | PF_LONG_DISPLACEMENT},
937 int const pta_size = ARRAY_SIZE (processor_alias_table);
939 /* Acquire a unique set number for our register saves and restores. */
940 s390_sr_alias_set = new_alias_set ();
942 /* Set up function hooks. */
943 init_machine_status = s390_init_machine_status;
945 /* Architecture mode defaults according to ABI. */
946 if (!(target_flags_explicit & MASK_ZARCH))
949 target_flags |= MASK_ZARCH;
951 target_flags &= ~MASK_ZARCH;
954 /* Determine processor architectural level. */
955 if (!s390_arch_string)
956 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
958 for (i = 0; i < pta_size; i++)
959 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
961 s390_arch = processor_alias_table[i].processor;
962 s390_arch_flags = processor_alias_table[i].flags;
966 error ("Unknown cpu used in -march=%s.", s390_arch_string);
968 /* Determine processor to tune for. */
969 if (!s390_tune_string)
971 s390_tune = s390_arch;
972 s390_tune_flags = s390_arch_flags;
973 s390_tune_string = s390_arch_string;
977 for (i = 0; i < pta_size; i++)
978 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
980 s390_tune = processor_alias_table[i].processor;
981 s390_tune_flags = processor_alias_table[i].flags;
985 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
989 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
990 error ("z/Architecture mode not supported on %s.", s390_arch_string);
991 if (TARGET_64BIT && !TARGET_ZARCH)
992 error ("64-bit ABI not supported in ESA/390 mode.");
995 /* Map for smallest class containing reg regno. */
997 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
998 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
999 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1000 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1001 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1002 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1003 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1004 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1005 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1006 ADDR_REGS, NO_REGS, ADDR_REGS
1009 /* Return attribute type of insn. */
1011 static enum attr_type
1012 s390_safe_attr_type (rtx insn)
1014 if (recog_memoized (insn) >= 0)
1015 return get_attr_type (insn);
1020 /* Return true if OP a (const_int 0) operand.
1021 OP is the current operation.
1022 MODE is the current operation mode. */
1025 const0_operand (register rtx op, enum machine_mode mode)
1027 return op == CONST0_RTX (mode);
1030 /* Return true if OP is constant.
1031 OP is the current operation.
1032 MODE is the current operation mode. */
1035 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1037 return CONSTANT_P (op);
1040 /* Return true if the mode of operand OP matches MODE.
1041 If MODE is set to VOIDmode, set it to the mode of OP. */
1044 check_mode (register rtx op, enum machine_mode *mode)
1046 if (*mode == VOIDmode)
1047 *mode = GET_MODE (op);
1050 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1056 /* Return true if OP a valid operand for the LARL instruction.
1057 OP is the current operation.
1058 MODE is the current operation mode. */
1061 larl_operand (register rtx op, enum machine_mode mode)
1063 if (! check_mode (op, &mode))
1066 /* Allow labels and local symbols. */
1067 if (GET_CODE (op) == LABEL_REF)
1069 if (GET_CODE (op) == SYMBOL_REF)
1070 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1071 && SYMBOL_REF_TLS_MODEL (op) == 0
1072 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1074 /* Everything else must have a CONST, so strip it. */
1075 if (GET_CODE (op) != CONST)
1079 /* Allow adding *even* in-range constants. */
1080 if (GET_CODE (op) == PLUS)
1082 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1083 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1085 #if HOST_BITS_PER_WIDE_INT > 32
1086 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1087 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1093 /* Labels and local symbols allowed here as well. */
1094 if (GET_CODE (op) == LABEL_REF)
1096 if (GET_CODE (op) == SYMBOL_REF)
1097 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1098 && SYMBOL_REF_TLS_MODEL (op) == 0
1099 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1101 /* Now we must have a @GOTENT offset or @PLT stub
1102 or an @INDNTPOFF TLS offset. */
1103 if (GET_CODE (op) == UNSPEC
1104 && XINT (op, 1) == UNSPEC_GOTENT)
1106 if (GET_CODE (op) == UNSPEC
1107 && XINT (op, 1) == UNSPEC_PLT)
1109 if (GET_CODE (op) == UNSPEC
1110 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1116 /* Helper routine to implement s_operand and s_imm_operand.
1117 OP is the current operation.
1118 MODE is the current operation mode.
1119 ALLOW_IMMEDIATE specifies whether immediate operands should
1120 be accepted or not. */
1123 general_s_operand (register rtx op, enum machine_mode mode,
1124 int allow_immediate)
1126 struct s390_address addr;
1128 /* Call general_operand first, so that we don't have to
1129 check for many special cases. */
1130 if (!general_operand (op, mode))
1133 /* Just like memory_operand, allow (subreg (mem ...))
1135 if (reload_completed
1136 && GET_CODE (op) == SUBREG
1137 && GET_CODE (SUBREG_REG (op)) == MEM)
1138 op = SUBREG_REG (op);
1140 switch (GET_CODE (op))
1142 /* Constants that we are sure will be forced to the
1143 literal pool in reload are OK as s-operand. Note
1144 that we cannot call s390_preferred_reload_class here
1145 because it might not be known yet at this point
1146 whether the current function is a leaf or not. */
1149 if (!allow_immediate || reload_completed)
1151 if (!legitimate_reload_constant_p (op))
1157 /* Memory operands are OK unless they already use an
1160 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1162 if (s390_decompose_address (XEXP (op, 0), &addr)
1174 /* Return true if OP is a valid S-type operand.
1175 OP is the current operation.
1176 MODE is the current operation mode. */
1179 s_operand (register rtx op, enum machine_mode mode)
1181 return general_s_operand (op, mode, 0);
1184 /* Return true if OP is a valid S-type operand or an immediate
1185 operand that can be addressed as S-type operand by forcing
1186 it into the literal pool.
1187 OP is the current operation.
1188 MODE is the current operation mode. */
1191 s_imm_operand (register rtx op, enum machine_mode mode)
1193 return general_s_operand (op, mode, 1);
1196 /* Return true if DISP is a valid short displacement. */
1199 s390_short_displacement (rtx disp)
1201 /* No displacement is OK. */
1205 /* Integer displacement in range. */
1206 if (GET_CODE (disp) == CONST_INT)
1207 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1209 /* GOT offset is not OK, the GOT can be large. */
1210 if (GET_CODE (disp) == CONST
1211 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1212 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1215 /* All other symbolic constants are literal pool references,
1216 which are OK as the literal pool must be small. */
1217 if (GET_CODE (disp) == CONST)
1223 /* Return true if OP is a valid operand for a C constraint. */
1226 s390_extra_constraint (rtx op, int c)
1228 struct s390_address addr;
1233 if (GET_CODE (op) != MEM)
1235 if (!s390_decompose_address (XEXP (op, 0), &addr))
1240 if (TARGET_LONG_DISPLACEMENT)
1242 if (!s390_short_displacement (addr.disp))
1248 if (GET_CODE (op) != MEM)
1251 if (TARGET_LONG_DISPLACEMENT)
1253 if (!s390_decompose_address (XEXP (op, 0), &addr))
1255 if (!s390_short_displacement (addr.disp))
1261 if (!TARGET_LONG_DISPLACEMENT)
1263 if (GET_CODE (op) != MEM)
1265 if (!s390_decompose_address (XEXP (op, 0), &addr))
1269 if (s390_short_displacement (addr.disp))
1274 if (!TARGET_LONG_DISPLACEMENT)
1276 if (GET_CODE (op) != MEM)
1278 /* Any invalid address here will be fixed up by reload,
1279 so accept it for the most generic constraint. */
1280 if (s390_decompose_address (XEXP (op, 0), &addr)
1281 && s390_short_displacement (addr.disp))
1286 if (TARGET_LONG_DISPLACEMENT)
1288 if (!s390_decompose_address (op, &addr))
1290 if (!s390_short_displacement (addr.disp))
1296 if (!TARGET_LONG_DISPLACEMENT)
1298 /* Any invalid address here will be fixed up by reload,
1299 so accept it for the most generic constraint. */
1300 if (s390_decompose_address (op, &addr)
1301 && s390_short_displacement (addr.disp))
1312 /* Compute a (partial) cost for rtx X. Return true if the complete
1313 cost has been computed, and false if subexpressions should be
1314 scanned. In either case, *TOTAL contains the cost result. */
1317 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1322 if (GET_CODE (XEXP (x, 0)) == MINUS
1323 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1330 /* Force_const_mem does not work out of reload, because the
1331 saveable_obstack is set to reload_obstack, which does not
1332 live long enough. Because of this we cannot use force_const_mem
1333 in addsi3. This leads to problems with gen_add2_insn with a
1334 constant greater than a short. Because of that we give an
1335 addition of greater constants a cost of 3 (reload1.c 10096). */
1336 /* ??? saveable_obstack no longer exists. */
1337 if (outer_code == PLUS
1338 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1339 *total = COSTS_N_INSNS (3);
1360 *total = COSTS_N_INSNS (1);
1364 if (GET_MODE (XEXP (x, 0)) == DImode)
1365 *total = COSTS_N_INSNS (40);
1367 *total = COSTS_N_INSNS (7);
1374 *total = COSTS_N_INSNS (33);
1382 /* Return the cost of an address rtx ADDR. */
1385 s390_address_cost (rtx addr)
1387 struct s390_address ad;
1388 if (!s390_decompose_address (addr, &ad))
1391 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1394 /* Return true if OP is a valid operand for the BRAS instruction.
1395 OP is the current operation.
1396 MODE is the current operation mode. */
1399 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1401 register enum rtx_code code = GET_CODE (op);
1403 /* Allow SYMBOL_REFs. */
1404 if (code == SYMBOL_REF)
1407 /* Allow @PLT stubs. */
1409 && GET_CODE (XEXP (op, 0)) == UNSPEC
1410 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1415 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1416 otherwise return 0. */
1419 tls_symbolic_operand (register rtx op)
1421 if (GET_CODE (op) != SYMBOL_REF)
1423 return SYMBOL_REF_TLS_MODEL (op);
1426 /* Return true if OP is a load multiple operation. It is known to be a
1427 PARALLEL and the first section will be tested.
1428 OP is the current operation.
1429 MODE is the current operation mode. */
1432 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1434 int count = XVECLEN (op, 0);
1435 unsigned int dest_regno;
1440 /* Perform a quick check so we don't blow up below. */
1442 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1443 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1444 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1447 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1448 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1450 /* Check, is base, or base + displacement. */
1452 if (GET_CODE (src_addr) == REG)
1454 else if (GET_CODE (src_addr) == PLUS
1455 && GET_CODE (XEXP (src_addr, 0)) == REG
1456 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1458 off = INTVAL (XEXP (src_addr, 1));
1459 src_addr = XEXP (src_addr, 0);
1464 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1467 for (i = 1; i < count; i++)
1469 rtx elt = XVECEXP (op, 0, i);
1471 if (GET_CODE (elt) != SET
1472 || GET_CODE (SET_DEST (elt)) != REG
1473 || GET_MODE (SET_DEST (elt)) != Pmode
1474 || REGNO (SET_DEST (elt)) != dest_regno + i
1475 || GET_CODE (SET_SRC (elt)) != MEM
1476 || GET_MODE (SET_SRC (elt)) != Pmode
1477 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1478 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1479 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1480 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1481 != off + i * UNITS_PER_WORD)
1488 /* Return true if OP is a store multiple operation. It is known to be a
1489 PARALLEL and the first section will be tested.
1490 OP is the current operation.
1491 MODE is the current operation mode. */
1494 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1496 int count = XVECLEN (op, 0);
1497 unsigned int src_regno;
1501 /* Perform a quick check so we don't blow up below. */
1503 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1504 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1505 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1508 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1509 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1511 /* Check, is base, or base + displacement. */
1513 if (GET_CODE (dest_addr) == REG)
1515 else if (GET_CODE (dest_addr) == PLUS
1516 && GET_CODE (XEXP (dest_addr, 0)) == REG
1517 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1519 off = INTVAL (XEXP (dest_addr, 1));
1520 dest_addr = XEXP (dest_addr, 0);
1525 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1528 for (i = 1; i < count; i++)
1530 rtx elt = XVECEXP (op, 0, i);
1532 if (GET_CODE (elt) != SET
1533 || GET_CODE (SET_SRC (elt)) != REG
1534 || GET_MODE (SET_SRC (elt)) != Pmode
1535 || REGNO (SET_SRC (elt)) != src_regno + i
1536 || GET_CODE (SET_DEST (elt)) != MEM
1537 || GET_MODE (SET_DEST (elt)) != Pmode
1538 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1539 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1540 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1541 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1542 != off + i * UNITS_PER_WORD)
1549 /* Return true if OP contains a symbol reference */
1552 symbolic_reference_mentioned_p (rtx op)
1554 register const char *fmt;
1557 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1560 fmt = GET_RTX_FORMAT (GET_CODE (op));
1561 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1567 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1568 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1572 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1579 /* Return true if OP contains a reference to a thread-local symbol. */
1582 tls_symbolic_reference_mentioned_p (rtx op)
1584 register const char *fmt;
1587 if (GET_CODE (op) == SYMBOL_REF)
1588 return tls_symbolic_operand (op);
1590 fmt = GET_RTX_FORMAT (GET_CODE (op));
1591 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1597 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1598 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1602 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1610 /* Return true if OP is a legitimate general operand when
1611 generating PIC code. It is given that flag_pic is on
1612 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1615 legitimate_pic_operand_p (register rtx op)
1617 /* Accept all non-symbolic constants. */
1618 if (!SYMBOLIC_CONST (op))
1621 /* Reject everything else; must be handled
1622 via emit_symbolic_move. */
1626 /* Returns true if the constant value OP is a legitimate general operand.
1627 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1630 legitimate_constant_p (register rtx op)
1632 /* Accept all non-symbolic constants. */
1633 if (!SYMBOLIC_CONST (op))
1636 /* Accept immediate LARL operands. */
1637 if (TARGET_64BIT && larl_operand (op, VOIDmode))
1640 /* Thread-local symbols are never legal constants. This is
1641 so that emit_call knows that computing such addresses
1642 might require a function call. */
1643 if (TLS_SYMBOLIC_CONST (op))
1646 /* In the PIC case, symbolic constants must *not* be
1647 forced into the literal pool. We accept them here,
1648 so that they will be handled by emit_symbolic_move. */
1652 /* All remaining non-PIC symbolic constants are
1653 forced into the literal pool. */
1657 /* Determine if it's legal to put X into the constant pool. This
1658 is not possible if X contains the address of a symbol that is
1659 not constant (TLS) or not known at final link time (PIC). */
1662 s390_cannot_force_const_mem (rtx x)
1664 switch (GET_CODE (x))
1668 /* Accept all non-symbolic constants. */
1672 /* Labels are OK iff we are non-PIC. */
1673 return flag_pic != 0;
1676 /* 'Naked' TLS symbol references are never OK,
1677 non-TLS symbols are OK iff we are non-PIC. */
1678 if (tls_symbolic_operand (x))
1681 return flag_pic != 0;
1684 return s390_cannot_force_const_mem (XEXP (x, 0));
1687 return s390_cannot_force_const_mem (XEXP (x, 0))
1688 || s390_cannot_force_const_mem (XEXP (x, 1));
1691 switch (XINT (x, 1))
1693 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1694 case UNSPEC_LTREL_OFFSET:
1702 case UNSPEC_GOTNTPOFF:
1703 case UNSPEC_INDNTPOFF:
1716 /* Returns true if the constant value OP is a legitimate general
1717 operand during and after reload. The difference to
1718 legitimate_constant_p is that this function will not accept
1719 a constant that would need to be forced to the literal pool
1720 before it can be used as operand. */
1723 legitimate_reload_constant_p (register rtx op)
1725 /* Accept la(y) operands. */
1726 if (GET_CODE (op) == CONST_INT
1727 && DISP_IN_RANGE (INTVAL (op)))
1730 /* Accept l(g)hi operands. */
1731 if (GET_CODE (op) == CONST_INT
1732 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1735 /* Accept lliXX operands. */
1737 && s390_single_hi (op, DImode, 0) >= 0)
1740 /* Accept larl operands. */
1742 && larl_operand (op, VOIDmode))
1745 /* Everything else cannot be handled without reload. */
1749 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1750 return the class of reg to actually use. */
1753 s390_preferred_reload_class (rtx op, enum reg_class class)
1755 /* This can happen if a floating point constant is being
1756 reloaded into an integer register. Leave well alone. */
1757 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1758 && class != FP_REGS)
1761 switch (GET_CODE (op))
1763 /* Constants we cannot reload must be forced into the
1768 if (legitimate_reload_constant_p (op))
1773 /* If a symbolic constant or a PLUS is reloaded,
1774 it is most likely being used as an address, so
1775 prefer ADDR_REGS. If 'class' is not a superset
1776 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1781 if (reg_class_subset_p (ADDR_REGS, class))
1793 /* Return the register class of a scratch register needed to
1794 load IN into a register of class CLASS in MODE.
1796 We need a temporary when loading a PLUS expression which
1797 is not a legitimate operand of the LOAD ADDRESS instruction. */
1800 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1801 enum machine_mode mode, rtx in)
1803 if (s390_plus_operand (in, mode))
1809 /* Return the register class of a scratch register needed to
1810 store a register of class CLASS in MODE into OUT:
1812 We need a temporary when storing a double-word to a
1813 non-offsettable memory address. */
1816 s390_secondary_output_reload_class (enum reg_class class,
1817 enum machine_mode mode, rtx out)
1819 if ((TARGET_64BIT ? mode == TImode
1820 : (mode == DImode || mode == DFmode))
1821 && reg_classes_intersect_p (GENERAL_REGS, class)
1822 && GET_CODE (out) == MEM
1823 && !offsettable_memref_p (out)
1824 && !s_operand (out, VOIDmode))
1830 /* Return true if OP is a PLUS that is not a legitimate
1831 operand for the LA instruction.
1832 OP is the current operation.
1833 MODE is the current operation mode. */
1836 s390_plus_operand (register rtx op, enum machine_mode mode)
1838 if (!check_mode (op, &mode) || mode != Pmode)
1841 if (GET_CODE (op) != PLUS)
1844 if (legitimate_la_operand_p (op))
1850 /* Generate code to load SRC, which is PLUS that is not a
1851 legitimate operand for the LA instruction, into TARGET.
1852 SCRATCH may be used as scratch register. */
1855 s390_expand_plus_operand (register rtx target, register rtx src,
1856 register rtx scratch)
1859 struct s390_address ad;
1861 /* src must be a PLUS; get its two operands. */
1862 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1865 /* Check if any of the two operands is already scheduled
1866 for replacement by reload. This can happen e.g. when
1867 float registers occur in an address. */
1868 sum1 = find_replacement (&XEXP (src, 0));
1869 sum2 = find_replacement (&XEXP (src, 1));
1870 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1872 /* If the address is already strictly valid, there's nothing to do. */
1873 if (!s390_decompose_address (src, &ad)
1874 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1875 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1877 /* Otherwise, one of the operands cannot be an address register;
1878 we reload its value into the scratch register. */
1879 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1881 emit_move_insn (scratch, sum1);
1884 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1886 emit_move_insn (scratch, sum2);
1890 /* According to the way these invalid addresses are generated
1891 in reload.c, it should never happen (at least on s390) that
1892 *neither* of the PLUS components, after find_replacements
1893 was applied, is an address register. */
1894 if (sum1 == scratch && sum2 == scratch)
1900 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1903 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1904 is only ever performed on addresses, so we can mark the
1905 sum as legitimate for LA in any case. */
1906 s390_load_address (target, src);
1910 /* Decompose a RTL expression ADDR for a memory address into
1911 its components, returned in OUT.
1913 Returns 0 if ADDR is not a valid memory address, nonzero
1914 otherwise. If OUT is NULL, don't return the components,
1915 but check for validity only.
1917 Note: Only addresses in canonical form are recognized.
1918 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1919 canonical form so that they will be recognized. */
1922 s390_decompose_address (register rtx addr, struct s390_address *out)
1924 rtx base = NULL_RTX;
1925 rtx indx = NULL_RTX;
1926 rtx disp = NULL_RTX;
1927 int pointer = FALSE;
1928 int base_ptr = FALSE;
1929 int indx_ptr = FALSE;
1931 /* Decompose address into base + index + displacement. */
1933 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1936 else if (GET_CODE (addr) == PLUS)
1938 rtx op0 = XEXP (addr, 0);
1939 rtx op1 = XEXP (addr, 1);
1940 enum rtx_code code0 = GET_CODE (op0);
1941 enum rtx_code code1 = GET_CODE (op1);
1943 if (code0 == REG || code0 == UNSPEC)
1945 if (code1 == REG || code1 == UNSPEC)
1947 indx = op0; /* index + base */
1953 base = op0; /* base + displacement */
1958 else if (code0 == PLUS)
1960 indx = XEXP (op0, 0); /* index + base + disp */
1961 base = XEXP (op0, 1);
1972 disp = addr; /* displacement */
1975 /* Validate base register. */
1978 if (GET_CODE (base) == UNSPEC)
1980 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != UNSPEC_LTREL_BASE)
1982 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1985 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1988 if (REGNO (base) == BASE_REGISTER
1989 || REGNO (base) == STACK_POINTER_REGNUM
1990 || REGNO (base) == FRAME_POINTER_REGNUM
1991 || ((reload_completed || reload_in_progress)
1992 && frame_pointer_needed
1993 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1994 || REGNO (base) == ARG_POINTER_REGNUM
1995 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1996 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1998 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1999 pointer = base_ptr = TRUE;
2002 /* Validate index register. */
2005 if (GET_CODE (indx) == UNSPEC)
2007 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != UNSPEC_LTREL_BASE)
2009 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2012 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2015 if (REGNO (indx) == BASE_REGISTER
2016 || REGNO (indx) == STACK_POINTER_REGNUM
2017 || REGNO (indx) == FRAME_POINTER_REGNUM
2018 || ((reload_completed || reload_in_progress)
2019 && frame_pointer_needed
2020 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2021 || REGNO (indx) == ARG_POINTER_REGNUM
2022 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
2023 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
2025 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2026 pointer = indx_ptr = TRUE;
2029 /* Prefer to use pointer as base, not index. */
2030 if (base && indx && !base_ptr
2031 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2038 /* Validate displacement. */
2041 /* Allow integer constant in range. */
2042 if (GET_CODE (disp) == CONST_INT)
2044 /* If the argument pointer is involved, the displacement will change
2045 later anyway as the argument pointer gets eliminated. This could
2046 make a valid displacement invalid, but it is more likely to make
2047 an invalid displacement valid, because we sometimes access the
2048 register save area via negative offsets to the arg pointer.
2049 Thus we don't check the displacement for validity here. If after
2050 elimination the displacement turns out to be invalid after all,
2051 this is fixed up by reload in any case. */
2052 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
2054 if (!DISP_IN_RANGE (INTVAL (disp)))
2059 /* In the small-PIC case, the linker converts @GOT
2060 and @GOTNTPOFF offsets to possible displacements. */
2061 else if (GET_CODE (disp) == CONST
2062 && GET_CODE (XEXP (disp, 0)) == UNSPEC
2063 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
2064 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
2072 /* Accept chunkfied literal pool symbol references. */
2073 else if (GET_CODE (disp) == CONST
2074 && GET_CODE (XEXP (disp, 0)) == MINUS
2075 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
2076 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
2081 /* Likewise if a constant offset is present. */
2082 else if (GET_CODE (disp) == CONST
2083 && GET_CODE (XEXP (disp, 0)) == PLUS
2084 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
2085 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
2086 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
2087 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
2092 /* We can convert literal pool addresses to
2093 displacements by basing them off the base register. */
2096 /* In some cases, we can accept an additional
2097 small constant offset. Split these off here. */
2099 unsigned int offset = 0;
2101 if (GET_CODE (disp) == CONST
2102 && GET_CODE (XEXP (disp, 0)) == PLUS
2103 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2105 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2106 disp = XEXP (XEXP (disp, 0), 0);
2109 /* Now we must have a literal pool address. */
2110 if (GET_CODE (disp) != SYMBOL_REF
2111 || !CONSTANT_POOL_ADDRESS_P (disp))
2114 /* If we have an offset, make sure it does not
2115 exceed the size of the constant pool entry. */
2116 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2119 /* Either base or index must be free to
2120 hold the base register. */
2124 /* Convert the address. */
2126 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2128 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2130 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2131 UNSPEC_LTREL_OFFSET);
2132 disp = gen_rtx_CONST (Pmode, disp);
2135 disp = plus_constant (disp, offset);
2149 out->pointer = pointer;
2155 /* Return nonzero if ADDR is a valid memory address.
2156 STRICT specifies whether strict register checking applies. */
2159 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2160 register rtx addr, int strict)
2162 struct s390_address ad;
2163 if (!s390_decompose_address (addr, &ad))
2168 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2170 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2175 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2177 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2184 /* Return 1 if OP is a valid operand for the LA instruction.
2185 In 31-bit, we need to prove that the result is used as an
2186 address, as LA performs only a 31-bit addition. */
2189 legitimate_la_operand_p (register rtx op)
2191 struct s390_address addr;
2192 if (!s390_decompose_address (op, &addr))
2195 if (TARGET_64BIT || addr.pointer)
2201 /* Return 1 if OP is a valid operand for the LA instruction,
2202 and we prefer to use LA over addition to compute it. */
2205 preferred_la_operand_p (register rtx op)
2207 struct s390_address addr;
2208 if (!s390_decompose_address (op, &addr))
2211 if (!TARGET_64BIT && !addr.pointer)
2217 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2218 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2224 /* Emit a forced load-address operation to load SRC into DST.
2225 This will use the LOAD ADDRESS instruction even in situations
2226 where legitimate_la_operand_p (SRC) returns false. */
2229 s390_load_address (rtx dst, rtx src)
2232 emit_move_insn (dst, src);
2234 emit_insn (gen_force_la_31 (dst, src));
2237 /* Return a legitimate reference for ORIG (an address) using the
2238 register REG. If REG is 0, a new pseudo is generated.
2240 There are two types of references that must be handled:
2242 1. Global data references must load the address from the GOT, via
2243 the PIC reg. An insn is emitted to do this load, and the reg is
2246 2. Static data references, constant pool addresses, and code labels
2247 compute the address as an offset from the GOT, whose base is in
2248 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2249 differentiate them from global data objects. The returned
2250 address is the PIC reg + an unspec constant.
2252 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2253 reg also appears in the address. */
2256 legitimize_pic_address (rtx orig, rtx reg)
2262 if (GET_CODE (addr) == LABEL_REF
2263 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2265 /* This is a local symbol. */
2266 if (TARGET_64BIT && larl_operand (addr, VOIDmode))
2268 /* Access local symbols PC-relative via LARL.
2269 This is the same as in the non-PIC case, so it is
2270 handled automatically ... */
2274 /* Access local symbols relative to the GOT. */
2276 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2278 if (reload_in_progress || reload_completed)
2279 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2281 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2282 addr = gen_rtx_CONST (Pmode, addr);
2283 addr = force_const_mem (Pmode, addr);
2284 emit_move_insn (temp, addr);
2286 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2289 emit_move_insn (reg, new);
2294 else if (GET_CODE (addr) == SYMBOL_REF)
2297 reg = gen_reg_rtx (Pmode);
2301 /* Assume GOT offset < 4k. This is handled the same way
2302 in both 31- and 64-bit code (@GOT). */
2304 if (reload_in_progress || reload_completed)
2305 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2307 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2308 new = gen_rtx_CONST (Pmode, new);
2309 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2310 new = gen_rtx_MEM (Pmode, new);
2311 RTX_UNCHANGING_P (new) = 1;
2312 emit_move_insn (reg, new);
2315 else if (TARGET_64BIT)
2317 /* If the GOT offset might be >= 4k, we determine the position
2318 of the GOT entry via a PC-relative LARL (@GOTENT). */
2320 rtx temp = gen_reg_rtx (Pmode);
2322 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2323 new = gen_rtx_CONST (Pmode, new);
2324 emit_move_insn (temp, new);
2326 new = gen_rtx_MEM (Pmode, temp);
2327 RTX_UNCHANGING_P (new) = 1;
2328 emit_move_insn (reg, new);
2333 /* If the GOT offset might be >= 4k, we have to load it
2334 from the literal pool (@GOT). */
2336 rtx temp = gen_reg_rtx (Pmode);
2338 if (reload_in_progress || reload_completed)
2339 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2341 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2342 addr = gen_rtx_CONST (Pmode, addr);
2343 addr = force_const_mem (Pmode, addr);
2344 emit_move_insn (temp, addr);
2346 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2347 new = gen_rtx_MEM (Pmode, new);
2348 RTX_UNCHANGING_P (new) = 1;
2349 emit_move_insn (reg, new);
2355 if (GET_CODE (addr) == CONST)
2357 addr = XEXP (addr, 0);
2358 if (GET_CODE (addr) == UNSPEC)
2360 if (XVECLEN (addr, 0) != 1)
2362 switch (XINT (addr, 1))
2364 /* If someone moved a GOT-relative UNSPEC
2365 out of the literal pool, force them back in. */
2368 new = force_const_mem (Pmode, orig);
2371 /* @GOT is OK as is if small. */
2374 new = force_const_mem (Pmode, orig);
2377 /* @GOTENT is OK as is. */
2381 /* @PLT is OK as is on 64-bit, must be converted to
2382 GOT-relative @PLTOFF on 31-bit. */
2386 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2388 if (reload_in_progress || reload_completed)
2389 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2391 addr = XVECEXP (addr, 0, 0);
2392 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2394 addr = gen_rtx_CONST (Pmode, addr);
2395 addr = force_const_mem (Pmode, addr);
2396 emit_move_insn (temp, addr);
2398 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2401 emit_move_insn (reg, new);
2407 /* Everything else cannot happen. */
2412 else if (GET_CODE (addr) != PLUS)
2415 if (GET_CODE (addr) == PLUS)
2417 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2418 /* Check first to see if this is a constant offset
2419 from a local symbol reference. */
2420 if ((GET_CODE (op0) == LABEL_REF
2421 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2422 && GET_CODE (op1) == CONST_INT)
2424 if (TARGET_64BIT && larl_operand (op0, VOIDmode))
2426 if (INTVAL (op1) & 1)
2428 /* LARL can't handle odd offsets, so emit a
2429 pair of LARL and LA. */
2430 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2432 if (!DISP_IN_RANGE (INTVAL (op1)))
2434 int even = INTVAL (op1) - 1;
2435 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2436 op0 = gen_rtx_CONST (Pmode, op0);
2440 emit_move_insn (temp, op0);
2441 new = gen_rtx_PLUS (Pmode, temp, op1);
2445 emit_move_insn (reg, new);
2451 /* If the offset is even, we can just use LARL.
2452 This will happen automatically. */
2457 /* Access local symbols relative to the GOT. */
2459 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2461 if (reload_in_progress || reload_completed)
2462 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2464 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2466 addr = gen_rtx_PLUS (Pmode, addr, op1);
2467 addr = gen_rtx_CONST (Pmode, addr);
2468 addr = force_const_mem (Pmode, addr);
2469 emit_move_insn (temp, addr);
2471 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2474 emit_move_insn (reg, new);
2480 /* Now, check whether it is a GOT relative symbol plus offset
2481 that was pulled out of the literal pool. Force it back in. */
2483 else if (GET_CODE (op0) == UNSPEC
2484 && GET_CODE (op1) == CONST_INT)
2486 if (XVECLEN (op0, 0) != 1)
2488 if (XINT (op0, 1) != UNSPEC_GOTOFF)
2491 new = force_const_mem (Pmode, orig);
2494 /* Otherwise, compute the sum. */
2497 base = legitimize_pic_address (XEXP (addr, 0), reg);
2498 new = legitimize_pic_address (XEXP (addr, 1),
2499 base == reg ? NULL_RTX : reg);
2500 if (GET_CODE (new) == CONST_INT)
2501 new = plus_constant (base, INTVAL (new));
2504 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2506 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2507 new = XEXP (new, 1);
2509 new = gen_rtx_PLUS (Pmode, base, new);
2512 if (GET_CODE (new) == CONST)
2513 new = XEXP (new, 0);
2514 new = force_operand (new, 0);
2521 /* Load the thread pointer into a register. */
2524 get_thread_pointer (void)
2528 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2529 tp = force_reg (Pmode, tp);
2530 mark_reg_pointer (tp, BITS_PER_WORD);
2535 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2537 static GTY(()) rtx s390_tls_symbol;
2539 s390_tls_get_offset (void)
2541 if (!s390_tls_symbol)
2542 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2544 return s390_tls_symbol;
2547 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2548 this (thread-local) address. REG may be used as temporary. */
2551 legitimize_tls_address (rtx addr, rtx reg)
2553 rtx new, tls_call, temp, base, r2, insn;
2555 if (GET_CODE (addr) == SYMBOL_REF)
2556 switch (tls_symbolic_operand (addr))
2558 case TLS_MODEL_GLOBAL_DYNAMIC:
2560 r2 = gen_rtx_REG (Pmode, 2);
2561 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2562 new = gen_rtx_CONST (Pmode, tls_call);
2563 new = force_const_mem (Pmode, new);
2564 emit_move_insn (r2, new);
2565 emit_call_insn (gen_call_value_tls (r2, tls_call));
2566 insn = get_insns ();
2569 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2570 temp = gen_reg_rtx (Pmode);
2571 emit_libcall_block (insn, temp, r2, new);
2573 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2576 s390_load_address (reg, new);
2581 case TLS_MODEL_LOCAL_DYNAMIC:
2583 r2 = gen_rtx_REG (Pmode, 2);
2584 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2585 new = gen_rtx_CONST (Pmode, tls_call);
2586 new = force_const_mem (Pmode, new);
2587 emit_move_insn (r2, new);
2588 emit_call_insn (gen_call_value_tls (r2, tls_call));
2589 insn = get_insns ();
2592 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2593 temp = gen_reg_rtx (Pmode);
2594 emit_libcall_block (insn, temp, r2, new);
2596 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2597 base = gen_reg_rtx (Pmode);
2598 s390_load_address (base, new);
2600 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2601 new = gen_rtx_CONST (Pmode, new);
2602 new = force_const_mem (Pmode, new);
2603 temp = gen_reg_rtx (Pmode);
2604 emit_move_insn (temp, new);
2606 new = gen_rtx_PLUS (Pmode, base, temp);
2609 s390_load_address (reg, new);
2614 case TLS_MODEL_INITIAL_EXEC:
2617 /* Assume GOT offset < 4k. This is handled the same way
2618 in both 31- and 64-bit code. */
2620 if (reload_in_progress || reload_completed)
2621 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2623 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2624 new = gen_rtx_CONST (Pmode, new);
2625 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2626 new = gen_rtx_MEM (Pmode, new);
2627 RTX_UNCHANGING_P (new) = 1;
2628 temp = gen_reg_rtx (Pmode);
2629 emit_move_insn (temp, new);
2631 else if (TARGET_64BIT)
2633 /* If the GOT offset might be >= 4k, we determine the position
2634 of the GOT entry via a PC-relative LARL. */
2636 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2637 new = gen_rtx_CONST (Pmode, new);
2638 temp = gen_reg_rtx (Pmode);
2639 emit_move_insn (temp, new);
2641 new = gen_rtx_MEM (Pmode, temp);
2642 RTX_UNCHANGING_P (new) = 1;
2643 temp = gen_reg_rtx (Pmode);
2644 emit_move_insn (temp, new);
2648 /* If the GOT offset might be >= 4k, we have to load it
2649 from the literal pool. */
2651 if (reload_in_progress || reload_completed)
2652 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2654 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2655 new = gen_rtx_CONST (Pmode, new);
2656 new = force_const_mem (Pmode, new);
2657 temp = gen_reg_rtx (Pmode);
2658 emit_move_insn (temp, new);
2660 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2661 new = gen_rtx_MEM (Pmode, new);
2662 RTX_UNCHANGING_P (new) = 1;
2664 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2665 temp = gen_reg_rtx (Pmode);
2666 emit_insn (gen_rtx_SET (Pmode, temp, new));
2670 /* In position-dependent code, load the absolute address of
2671 the GOT entry from the literal pool. */
2673 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2674 new = gen_rtx_CONST (Pmode, new);
2675 new = force_const_mem (Pmode, new);
2676 temp = gen_reg_rtx (Pmode);
2677 emit_move_insn (temp, new);
2680 new = gen_rtx_MEM (Pmode, new);
2681 RTX_UNCHANGING_P (new) = 1;
2683 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2684 temp = gen_reg_rtx (Pmode);
2685 emit_insn (gen_rtx_SET (Pmode, temp, new));
2688 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2691 s390_load_address (reg, new);
2696 case TLS_MODEL_LOCAL_EXEC:
2697 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2698 new = gen_rtx_CONST (Pmode, new);
2699 new = force_const_mem (Pmode, new);
2700 temp = gen_reg_rtx (Pmode);
2701 emit_move_insn (temp, new);
2703 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2706 s390_load_address (reg, new);
2715 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2717 switch (XINT (XEXP (addr, 0), 1))
2719 case UNSPEC_INDNTPOFF:
2732 abort (); /* for now ... */
2737 /* Emit insns to move operands[1] into operands[0]. */
2740 emit_symbolic_move (rtx *operands)
2742 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2744 if (GET_CODE (operands[0]) == MEM)
2745 operands[1] = force_reg (Pmode, operands[1]);
2746 else if (TLS_SYMBOLIC_CONST (operands[1]))
2747 operands[1] = legitimize_tls_address (operands[1], temp);
2749 operands[1] = legitimize_pic_address (operands[1], temp);
2752 /* Try machine-dependent ways of modifying an illegitimate address X
2753 to be legitimate. If we find one, return the new, valid address.
2755 OLDX is the address as it was before break_out_memory_refs was called.
2756 In some cases it is useful to look at this to decide what needs to be done.
2758 MODE is the mode of the operand pointed to by X.
2760 When -fpic is used, special handling is needed for symbolic references.
2761 See comments by legitimize_pic_address for details. */
2764 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
2765 enum machine_mode mode ATTRIBUTE_UNUSED)
2767 rtx constant_term = const0_rtx;
2769 if (TLS_SYMBOLIC_CONST (x))
2771 x = legitimize_tls_address (x, 0);
2773 if (legitimate_address_p (mode, x, FALSE))
2778 if (SYMBOLIC_CONST (x)
2779 || (GET_CODE (x) == PLUS
2780 && (SYMBOLIC_CONST (XEXP (x, 0))
2781 || SYMBOLIC_CONST (XEXP (x, 1)))))
2782 x = legitimize_pic_address (x, 0);
2784 if (legitimate_address_p (mode, x, FALSE))
2788 x = eliminate_constant_term (x, &constant_term);
2790 /* Optimize loading of large displacements by splitting them
2791 into the multiple of 4K and the rest; this allows the
2792 former to be CSE'd if possible.
2794 Don't do this if the displacement is added to a register
2795 pointing into the stack frame, as the offsets will
2796 change later anyway. */
2798 if (GET_CODE (constant_term) == CONST_INT
2799 && !TARGET_LONG_DISPLACEMENT
2800 && !DISP_IN_RANGE (INTVAL (constant_term))
2801 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2803 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2804 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2806 rtx temp = gen_reg_rtx (Pmode);
2807 rtx val = force_operand (GEN_INT (upper), temp);
2809 emit_move_insn (temp, val);
2811 x = gen_rtx_PLUS (Pmode, x, temp);
2812 constant_term = GEN_INT (lower);
2815 if (GET_CODE (x) == PLUS)
2817 if (GET_CODE (XEXP (x, 0)) == REG)
2819 register rtx temp = gen_reg_rtx (Pmode);
2820 register rtx val = force_operand (XEXP (x, 1), temp);
2822 emit_move_insn (temp, val);
2824 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2827 else if (GET_CODE (XEXP (x, 1)) == REG)
2829 register rtx temp = gen_reg_rtx (Pmode);
2830 register rtx val = force_operand (XEXP (x, 0), temp);
2832 emit_move_insn (temp, val);
2834 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2838 if (constant_term != const0_rtx)
2839 x = gen_rtx_PLUS (Pmode, x, constant_term);
2844 /* Emit code to move LEN bytes from DST to SRC. */
2847 s390_expand_movstr (rtx dst, rtx src, rtx len)
2849 rtx (*gen_short) (rtx, rtx, rtx) =
2850 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2851 rtx (*gen_long) (rtx, rtx, rtx, rtx) =
2852 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2855 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2857 if (INTVAL (len) > 0)
2858 emit_insn (gen_short (dst, src, GEN_INT (INTVAL (len) - 1)));
2861 else if (TARGET_MVCLE)
2863 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2864 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2865 rtx reg0 = gen_reg_rtx (double_mode);
2866 rtx reg1 = gen_reg_rtx (double_mode);
2868 emit_move_insn (gen_highpart (single_mode, reg0),
2869 force_operand (XEXP (dst, 0), NULL_RTX));
2870 emit_move_insn (gen_highpart (single_mode, reg1),
2871 force_operand (XEXP (src, 0), NULL_RTX));
2873 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2874 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2876 emit_insn (gen_long (reg0, reg1, reg0, reg1));
2881 rtx dst_addr, src_addr, count, blocks, temp;
2882 rtx end_label = gen_label_rtx ();
2883 enum machine_mode mode;
2886 mode = GET_MODE (len);
2887 if (mode == VOIDmode)
2890 type = lang_hooks.types.type_for_mode (mode, 1);
2894 dst_addr = gen_reg_rtx (Pmode);
2895 src_addr = gen_reg_rtx (Pmode);
2896 count = gen_reg_rtx (mode);
2897 blocks = gen_reg_rtx (mode);
2899 convert_move (count, len, 1);
2900 emit_cmp_and_jump_insns (count, const0_rtx,
2901 EQ, NULL_RTX, mode, 1, end_label);
2903 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2904 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2905 dst = change_address (dst, VOIDmode, dst_addr);
2906 src = change_address (src, VOIDmode, src_addr);
2908 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2910 emit_move_insn (count, temp);
2912 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2914 emit_move_insn (blocks, temp);
2916 expand_start_loop (1);
2917 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2918 make_tree (type, blocks),
2919 make_tree (type, const0_rtx)));
2921 emit_insn (gen_short (dst, src, GEN_INT (255)));
2922 s390_load_address (dst_addr,
2923 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2924 s390_load_address (src_addr,
2925 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2927 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2929 emit_move_insn (blocks, temp);
2933 emit_insn (gen_short (dst, src, convert_to_mode (word_mode, count, 1)));
2934 emit_label (end_label);
2938 /* Emit code to clear LEN bytes at DST. */
2941 s390_expand_clrstr (rtx dst, rtx len)
2943 rtx (*gen_short) (rtx, rtx) =
2944 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2945 rtx (*gen_long) (rtx, rtx, rtx) =
2946 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2949 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2951 if (INTVAL (len) > 0)
2952 emit_insn (gen_short (dst, GEN_INT (INTVAL (len) - 1)));
2955 else if (TARGET_MVCLE)
2957 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2958 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2959 rtx reg0 = gen_reg_rtx (double_mode);
2960 rtx reg1 = gen_reg_rtx (double_mode);
2962 emit_move_insn (gen_highpart (single_mode, reg0),
2963 force_operand (XEXP (dst, 0), NULL_RTX));
2964 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2966 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2967 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2969 emit_insn (gen_long (reg0, reg1, reg0));
2974 rtx dst_addr, src_addr, count, blocks, temp;
2975 rtx end_label = gen_label_rtx ();
2976 enum machine_mode mode;
2979 mode = GET_MODE (len);
2980 if (mode == VOIDmode)
2983 type = lang_hooks.types.type_for_mode (mode, 1);
2987 dst_addr = gen_reg_rtx (Pmode);
2988 src_addr = gen_reg_rtx (Pmode);
2989 count = gen_reg_rtx (mode);
2990 blocks = gen_reg_rtx (mode);
2992 convert_move (count, len, 1);
2993 emit_cmp_and_jump_insns (count, const0_rtx,
2994 EQ, NULL_RTX, mode, 1, end_label);
2996 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2997 dst = change_address (dst, VOIDmode, dst_addr);
2999 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3001 emit_move_insn (count, temp);
3003 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3005 emit_move_insn (blocks, temp);
3007 expand_start_loop (1);
3008 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3009 make_tree (type, blocks),
3010 make_tree (type, const0_rtx)));
3012 emit_insn (gen_short (dst, GEN_INT (255)));
3013 s390_load_address (dst_addr,
3014 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3016 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3018 emit_move_insn (blocks, temp);
3022 emit_insn (gen_short (dst, convert_to_mode (word_mode, count, 1)));
3023 emit_label (end_label);
3027 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3028 and return the result in TARGET. */
3031 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3033 rtx (*gen_short) (rtx, rtx, rtx) =
3034 TARGET_64BIT ? gen_cmpmem_short_64 : gen_cmpmem_short_31;
3035 rtx (*gen_long) (rtx, rtx, rtx, rtx) =
3036 TARGET_64BIT ? gen_cmpmem_long_64 : gen_cmpmem_long_31;
3037 rtx (*gen_result) (rtx) =
3038 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3040 op0 = protect_from_queue (op0, 0);
3041 op1 = protect_from_queue (op1, 0);
3042 len = protect_from_queue (len, 0);
3044 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3046 if (INTVAL (len) > 0)
3048 emit_insn (gen_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3049 emit_insn (gen_result (target));
3052 emit_move_insn (target, const0_rtx);
3055 else /* if (TARGET_MVCLE) */
3057 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3058 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3059 rtx reg0 = gen_reg_rtx (double_mode);
3060 rtx reg1 = gen_reg_rtx (double_mode);
3062 emit_move_insn (gen_highpart (single_mode, reg0),
3063 force_operand (XEXP (op0, 0), NULL_RTX));
3064 emit_move_insn (gen_highpart (single_mode, reg1),
3065 force_operand (XEXP (op1, 0), NULL_RTX));
3067 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3068 convert_move (gen_lowpart (single_mode, reg1), len, 1);
3070 emit_insn (gen_long (reg0, reg1, reg0, reg1));
3071 emit_insn (gen_result (target));
3075 /* Deactivate for now as profile code cannot cope with
3076 CC being live across basic block boundaries. */
3079 rtx addr0, addr1, count, blocks, temp;
3080 rtx end_label = gen_label_rtx ();
3081 enum machine_mode mode;
3084 mode = GET_MODE (len);
3085 if (mode == VOIDmode)
3088 type = lang_hooks.types.type_for_mode (mode, 1);
3092 addr0 = gen_reg_rtx (Pmode);
3093 addr1 = gen_reg_rtx (Pmode);
3094 count = gen_reg_rtx (mode);
3095 blocks = gen_reg_rtx (mode);
3097 convert_move (count, len, 1);
3098 emit_cmp_and_jump_insns (count, const0_rtx,
3099 EQ, NULL_RTX, mode, 1, end_label);
3101 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3102 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3103 op0 = change_address (op0, VOIDmode, addr0);
3104 op1 = change_address (op1, VOIDmode, addr1);
3106 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3108 emit_move_insn (count, temp);
3110 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3112 emit_move_insn (blocks, temp);
3114 expand_start_loop (1);
3115 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3116 make_tree (type, blocks),
3117 make_tree (type, const0_rtx)));
3119 emit_insn (gen_short (op0, op1, GEN_INT (255)));
3120 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3121 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3122 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3123 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3124 emit_jump_insn (temp);
3126 s390_load_address (addr0,
3127 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3128 s390_load_address (addr1,
3129 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3131 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3133 emit_move_insn (blocks, temp);
3137 emit_insn (gen_short (op0, op1, convert_to_mode (word_mode, count, 1)));
3138 emit_label (end_label);
3140 emit_insn (gen_result (target));
3145 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3146 We need to emit DTP-relative relocations. */
3149 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3154 fputs ("\t.long\t", file);
3157 fputs ("\t.quad\t", file);
3162 output_addr_const (file, x);
3163 fputs ("@DTPOFF", file);
3166 /* In the name of slightly smaller debug output, and to cater to
3167 general assembler losage, recognize various UNSPEC sequences
3168 and turn them back into a direct symbol reference. */
3171 s390_delegitimize_address (rtx orig_x)
3175 if (GET_CODE (x) != MEM)
3179 if (GET_CODE (x) == PLUS
3180 && GET_CODE (XEXP (x, 1)) == CONST
3181 && GET_CODE (XEXP (x, 0)) == REG
3182 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3184 y = XEXP (XEXP (x, 1), 0);
3185 if (GET_CODE (y) == UNSPEC
3186 && XINT (y, 1) == UNSPEC_GOT)
3187 return XVECEXP (y, 0, 0);
3191 if (GET_CODE (x) == CONST)
3194 if (GET_CODE (y) == UNSPEC
3195 && XINT (y, 1) == UNSPEC_GOTENT)
3196 return XVECEXP (y, 0, 0);
3203 /* Locate some local-dynamic symbol still in use by this function
3204 so that we can print its name in local-dynamic base patterns. */
3207 get_some_local_dynamic_name (void)
3211 if (cfun->machine->some_ld_name)
3212 return cfun->machine->some_ld_name;
3214 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3216 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3217 return cfun->machine->some_ld_name;
3223 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3227 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3229 x = get_pool_constant (x);
3230 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3233 if (GET_CODE (x) == SYMBOL_REF
3234 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3236 cfun->machine->some_ld_name = XSTR (x, 0);
3243 /* Output symbolic constant X in assembler syntax to
3244 stdio stream FILE. */
3247 s390_output_symbolic_const (FILE *file, rtx x)
3249 switch (GET_CODE (x))
3254 s390_output_symbolic_const (file, XEXP (x, 0));
3258 s390_output_symbolic_const (file, XEXP (x, 0));
3259 fprintf (file, "+");
3260 s390_output_symbolic_const (file, XEXP (x, 1));
3264 s390_output_symbolic_const (file, XEXP (x, 0));
3265 fprintf (file, "-");
3266 s390_output_symbolic_const (file, XEXP (x, 1));
3273 output_addr_const (file, x);
3277 if (XVECLEN (x, 0) != 1)
3278 output_operand_lossage ("invalid UNSPEC as operand (1)");
3279 switch (XINT (x, 1))
3281 case UNSPEC_LTREL_OFFSET:
3282 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3283 fprintf (file, "-");
3284 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3287 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3288 fprintf (file, "@GOTENT");
3291 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3292 fprintf (file, "@GOT");
3295 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3296 fprintf (file, "@GOTOFF");
3299 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3300 fprintf (file, "@PLT");
3303 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3304 fprintf (file, "@PLTOFF");
3307 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3308 fprintf (file, "@TLSGD");
3311 assemble_name (file, get_some_local_dynamic_name ());
3312 fprintf (file, "@TLSLDM");
3315 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3316 fprintf (file, "@DTPOFF");
3319 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3320 fprintf (file, "@NTPOFF");
3322 case UNSPEC_GOTNTPOFF:
3323 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3324 fprintf (file, "@GOTNTPOFF");
3326 case UNSPEC_INDNTPOFF:
3327 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3328 fprintf (file, "@INDNTPOFF");
3331 output_operand_lossage ("invalid UNSPEC as operand (2)");
3337 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3342 /* Output address operand ADDR in assembler syntax to
3343 stdio stream FILE. */
3346 print_operand_address (FILE *file, rtx addr)
3348 struct s390_address ad;
3350 if (!s390_decompose_address (addr, &ad)
3351 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3352 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3353 output_operand_lossage ("Cannot decompose address.");
3356 s390_output_symbolic_const (file, ad.disp);
3358 fprintf (file, "0");
3360 if (ad.base && ad.indx)
3361 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3362 reg_names[REGNO (ad.base)]);
3364 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3367 /* Output operand X in assembler syntax to stdio stream FILE.
3368 CODE specified the format flag. The following format flags
3371 'C': print opcode suffix for branch condition.
3372 'D': print opcode suffix for inverse branch condition.
3373 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3374 'O': print only the displacement of a memory reference.
3375 'R': print only the base register of a memory reference.
3376 'N': print the second word of a DImode operand.
3377 'M': print the second word of a TImode operand.
3379 'b': print integer X as if it's an unsigned byte.
3380 'x': print integer X as if it's an unsigned word.
3381 'h': print integer X as if it's a signed word. */
3384 print_operand (FILE *file, rtx x, int code)
3389 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3393 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3397 if (GET_CODE (x) == SYMBOL_REF)
3399 fprintf (file, "%s", ":tls_load:");
3400 output_addr_const (file, x);
3402 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3404 fprintf (file, "%s", ":tls_gdcall:");
3405 output_addr_const (file, XVECEXP (x, 0, 0));
3407 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3409 fprintf (file, "%s", ":tls_ldcall:");
3410 assemble_name (file, get_some_local_dynamic_name ());
3418 struct s390_address ad;
3420 if (GET_CODE (x) != MEM
3421 || !s390_decompose_address (XEXP (x, 0), &ad)
3422 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3427 s390_output_symbolic_const (file, ad.disp);
3429 fprintf (file, "0");
3435 struct s390_address ad;
3437 if (GET_CODE (x) != MEM
3438 || !s390_decompose_address (XEXP (x, 0), &ad)
3439 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3444 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3446 fprintf (file, "0");
3451 if (GET_CODE (x) == REG)
3452 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3453 else if (GET_CODE (x) == MEM)
3454 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3460 if (GET_CODE (x) == REG)
3461 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3462 else if (GET_CODE (x) == MEM)
3463 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3469 switch (GET_CODE (x))
3472 fprintf (file, "%s", reg_names[REGNO (x)]);
3476 output_address (XEXP (x, 0));
3483 s390_output_symbolic_const (file, x);
3488 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3489 else if (code == 'x')
3490 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3491 else if (code == 'h')
3492 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3494 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3498 if (GET_MODE (x) != VOIDmode)
3501 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3502 else if (code == 'x')
3503 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3504 else if (code == 'h')
3505 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3511 fatal_insn ("UNKNOWN in print_operand !?", x);
3516 /* Target hook for assembling integer objects. We need to define it
3517 here to work a round a bug in some versions of GAS, which couldn't
3518 handle values smaller than INT_MIN when printed in decimal. */
3521 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
3523 if (size == 8 && aligned_p
3524 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3526 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3530 return default_assemble_integer (x, size, aligned_p);
3533 /* Returns true if register REGNO is used for forming
3534 a memory address in expression X. */
3537 reg_used_in_mem_p (int regno, rtx x)
3539 enum rtx_code code = GET_CODE (x);
3545 if (refers_to_regno_p (regno, regno+1,
3549 else if (code == SET
3550 && GET_CODE (SET_DEST (x)) == PC)
3552 if (refers_to_regno_p (regno, regno+1,
3557 fmt = GET_RTX_FORMAT (code);
3558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3561 && reg_used_in_mem_p (regno, XEXP (x, i)))
3564 else if (fmt[i] == 'E')
3565 for (j = 0; j < XVECLEN (x, i); j++)
3566 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3572 /* Returns true if expression DEP_RTX sets an address register
3573 used by instruction INSN to address memory. */
3576 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
3580 if (GET_CODE (dep_rtx) == INSN)
3581 dep_rtx = PATTERN (dep_rtx);
3583 if (GET_CODE (dep_rtx) == SET)
3585 target = SET_DEST (dep_rtx);
3586 if (GET_CODE (target) == STRICT_LOW_PART)
3587 target = XEXP (target, 0);
3588 while (GET_CODE (target) == SUBREG)
3589 target = SUBREG_REG (target);
3591 if (GET_CODE (target) == REG)
3593 int regno = REGNO (target);
3595 if (s390_safe_attr_type (insn) == TYPE_LA)
3597 pat = PATTERN (insn);
3598 if (GET_CODE (pat) == PARALLEL)
3600 if (XVECLEN (pat, 0) != 2)
3602 pat = XVECEXP (pat, 0, 0);
3604 if (GET_CODE (pat) == SET)
3605 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3609 else if (get_attr_atype (insn) == ATYPE_AGEN)
3610 return reg_used_in_mem_p (regno, PATTERN (insn));
3616 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3619 s390_agen_dep_p (rtx dep_insn, rtx insn)
3621 rtx dep_rtx = PATTERN (dep_insn);
3624 if (GET_CODE (dep_rtx) == SET
3625 && addr_generation_dependency_p (dep_rtx, insn))
3627 else if (GET_CODE (dep_rtx) == PARALLEL)
3629 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3631 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3638 /* Return the modified cost of the dependency of instruction INSN
3639 on instruction DEP_INSN through the link LINK. COST is the
3640 default cost of that dependency.
3642 Data dependencies are all handled without delay. However, if a
3643 register is modified and subsequently used as base or index
3644 register of a memory reference, at least 4 cycles need to pass
3645 between setting and using the register to avoid pipeline stalls.
3646 An exception is the LA instruction. An address generated by LA can
3647 be used by introducing only a one cycle stall on the pipeline. */
3650 s390_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3655 /* If the dependence is an anti-dependence, there is no cost. For an
3656 output dependence, there is sometimes a cost, but it doesn't seem
3657 worth handling those few cases. */
3659 if (REG_NOTE_KIND (link) != 0)
3662 /* If we can't recognize the insns, we can't really do anything. */
3663 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3666 /* DFA based scheduling checks address dependency in md file. */
3667 if (s390_use_dfa_pipeline_interface ())
3669 /* Operand forward in case of lr, load and la. */
3670 if (s390_tune == PROCESSOR_2084_Z990
3672 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3673 || s390_safe_attr_type (dep_insn) == TYPE_LR
3674 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3679 dep_rtx = PATTERN (dep_insn);
3681 if (GET_CODE (dep_rtx) == SET
3682 && addr_generation_dependency_p (dep_rtx, insn))
3683 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3684 else if (GET_CODE (dep_rtx) == PARALLEL)
3686 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3688 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3689 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3695 /* A C statement (sans semicolon) to update the integer scheduling priority
3696 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3697 reduce the priority to execute INSN later. Do not define this macro if
3698 you do not need to adjust the scheduling priorities of insns.
3700 A STD instruction should be scheduled earlier,
3701 in order to use the bypass. */
3704 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
3706 if (! INSN_P (insn))
3709 if (s390_tune != PROCESSOR_2084_Z990)
3712 switch (s390_safe_attr_type (insn))
3716 priority = priority << 3;
3719 priority = priority << 1;
3727 /* The number of instructions that can be issued per cycle. */
3730 s390_issue_rate (void)
3732 if (s390_tune == PROCESSOR_2084_Z990)
3737 /* If the following function returns TRUE, we will use the the DFA
3741 s390_use_dfa_pipeline_interface (void)
3743 if (s390_tune == PROCESSOR_2064_Z900
3744 || s390_tune == PROCESSOR_2084_Z990)
3751 s390_first_cycle_multipass_dfa_lookahead (void)
3753 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3756 /* Called after issuing each insn.
3757 Triggers default sort algorithm to better slot instructions. */
3760 s390_sched_reorder2 (FILE *dump ATTRIBUTE_UNUSED,
3761 int sched_verbose ATTRIBUTE_UNUSED,
3762 rtx *ready ATTRIBUTE_UNUSED,
3763 int *pn_ready ATTRIBUTE_UNUSED,
3764 int clock_var ATTRIBUTE_UNUSED)
3766 return s390_issue_rate();
3770 /* Split all branches that exceed the maximum distance.
3771 Returns true if this created a new literal pool entry.
3773 Code generated by this routine is allowed to use
3774 TEMP_REG as temporary scratch register. If this is
3775 done, TEMP_USED is set to true. */
3778 s390_split_branches (rtx temp_reg, bool *temp_used)
3780 int new_literal = 0;
3781 rtx insn, pat, tmp, target;
3784 /* We need correct insn addresses. */
3786 shorten_branches (get_insns ());
3788 /* Find all branches that exceed 64KB, and split them. */
3790 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3792 if (GET_CODE (insn) != JUMP_INSN)
3795 pat = PATTERN (insn);
3796 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3797 pat = XVECEXP (pat, 0, 0);
3798 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3801 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3803 label = &SET_SRC (pat);
3805 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3807 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3808 label = &XEXP (SET_SRC (pat), 1);
3809 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3810 label = &XEXP (SET_SRC (pat), 2);
3817 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3824 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3825 INSN_ADDRESSES_NEW (tmp, -1);
3832 tmp = force_const_mem (Pmode, *label);
3833 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3834 INSN_ADDRESSES_NEW (tmp, -1);
3841 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
3842 UNSPEC_LTREL_OFFSET);
3843 target = gen_rtx_CONST (Pmode, target);
3844 target = force_const_mem (Pmode, target);
3845 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
3846 INSN_ADDRESSES_NEW (tmp, -1);
3848 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (target, 0)),
3850 target = gen_rtx_PLUS (Pmode, temp_reg, target);
3853 if (!validate_change (insn, label, target, 0))
3861 /* Find a literal pool symbol referenced in RTX X, and store
3862 it at REF. Will abort if X contains references to more than
3863 one such pool symbol; multiple references to the same symbol
3864 are allowed, however.
3866 The rtx pointed to by REF must be initialized to NULL_RTX
3867 by the caller before calling this routine. */
3870 find_constant_pool_ref (rtx x, rtx *ref)
3875 /* Ignore LTREL_BASE references. */
3876 if (GET_CODE (x) == UNSPEC
3877 && XINT (x, 1) == UNSPEC_LTREL_BASE)
3880 if (GET_CODE (x) == SYMBOL_REF
3881 && CONSTANT_POOL_ADDRESS_P (x))
3883 if (*ref == NULL_RTX)
3889 fmt = GET_RTX_FORMAT (GET_CODE (x));
3890 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3894 find_constant_pool_ref (XEXP (x, i), ref);
3896 else if (fmt[i] == 'E')
3898 for (j = 0; j < XVECLEN (x, i); j++)
3899 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3904 /* Replace every reference to the literal pool symbol REF
3905 in X by the address ADDR. Fix up MEMs as required. */
3908 replace_constant_pool_ref (rtx *x, rtx ref, rtx addr)
3916 /* Literal pool references can only occur inside a MEM ... */
3917 if (GET_CODE (*x) == MEM)
3919 rtx memref = XEXP (*x, 0);
3923 *x = replace_equiv_address (*x, addr);
3927 if (GET_CODE (memref) == CONST
3928 && GET_CODE (XEXP (memref, 0)) == PLUS
3929 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3930 && XEXP (XEXP (memref, 0), 0) == ref)
3932 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3933 *x = replace_equiv_address (*x, plus_constant (addr, off));
3938 /* ... or a load-address type pattern. */
3939 if (GET_CODE (*x) == SET)
3941 rtx addrref = SET_SRC (*x);
3945 SET_SRC (*x) = addr;
3949 if (GET_CODE (addrref) == CONST
3950 && GET_CODE (XEXP (addrref, 0)) == PLUS
3951 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3952 && XEXP (XEXP (addrref, 0), 0) == ref)
3954 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3955 SET_SRC (*x) = plus_constant (addr, off);
3960 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3961 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3965 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3967 else if (fmt[i] == 'E')
3969 for (j = 0; j < XVECLEN (*x, i); j++)
3970 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3975 /* Check whether X contains an UNSPEC_LTREL_BASE.
3976 Return its constant pool symbol if found, NULL_RTX otherwise. */
3979 find_ltrel_base (rtx x)
3984 if (GET_CODE (x) == UNSPEC
3985 && XINT (x, 1) == UNSPEC_LTREL_BASE)
3986 return XVECEXP (x, 0, 0);
3988 fmt = GET_RTX_FORMAT (GET_CODE (x));
3989 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3993 rtx fnd = find_ltrel_base (XEXP (x, i));
3997 else if (fmt[i] == 'E')
3999 for (j = 0; j < XVECLEN (x, i); j++)
4001 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4011 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
4014 replace_ltrel_base (rtx *x, rtx base)
4019 if (GET_CODE (*x) == UNSPEC
4020 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4026 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4027 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4031 replace_ltrel_base (&XEXP (*x, i), base);
4033 else if (fmt[i] == 'E')
4035 for (j = 0; j < XVECLEN (*x, i); j++)
4036 replace_ltrel_base (&XVECEXP (*x, i, j), base);
4042 /* We keep a list of constants which we have to add to internal
4043 constant tables in the middle of large functions. */
4045 #define NR_C_MODES 7
4046 enum machine_mode constant_modes[NR_C_MODES] =
4057 struct constant *next;
4062 struct constant_pool
4064 struct constant_pool *next;
4069 struct constant *constants[NR_C_MODES];
4074 static struct constant_pool * s390_chunkify_start (void);
4075 static void s390_chunkify_finish (struct constant_pool *);
4076 static void s390_chunkify_cancel (struct constant_pool *);
4078 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4079 static void s390_end_pool (struct constant_pool *, rtx);
4080 static void s390_add_pool_insn (struct constant_pool *, rtx);
4081 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4082 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4083 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4084 static rtx s390_dump_pool (struct constant_pool *);
4085 static void s390_free_pool (struct constant_pool *);
4087 /* Create new constant pool covering instructions starting at INSN
4088 and chain it to the end of POOL_LIST. */
4090 static struct constant_pool *
4091 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4093 struct constant_pool *pool, **prev;
4096 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4098 for (i = 0; i < NR_C_MODES; i++)
4099 pool->constants[i] = NULL;
4101 pool->label = gen_label_rtx ();
4102 pool->first_insn = insn;
4103 pool->pool_insn = NULL_RTX;
4104 pool->insns = BITMAP_XMALLOC ();
4107 for (prev = pool_list; *prev; prev = &(*prev)->next)
4114 /* End range of instructions covered by POOL at INSN and emit
4115 placeholder insn representing the pool. */
4118 s390_end_pool (struct constant_pool *pool, rtx insn)
4120 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4123 insn = get_last_insn ();
4125 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4126 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4129 /* Add INSN to the list of insns covered by POOL. */
4132 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4134 bitmap_set_bit (pool->insns, INSN_UID (insn));
4137 /* Return pool out of POOL_LIST that covers INSN. */
4139 static struct constant_pool *
4140 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4142 struct constant_pool *pool;
4144 for (pool = pool_list; pool; pool = pool->next)
4145 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4151 /* Add constant VAL of mode MODE to the constant pool POOL. */
4154 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4159 for (i = 0; i < NR_C_MODES; i++)
4160 if (constant_modes[i] == mode)
4162 if (i == NR_C_MODES)
4165 for (c = pool->constants[i]; c != NULL; c = c->next)
4166 if (rtx_equal_p (val, c->value))
4171 c = (struct constant *) xmalloc (sizeof *c);
4173 c->label = gen_label_rtx ();
4174 c->next = pool->constants[i];
4175 pool->constants[i] = c;
4176 pool->size += GET_MODE_SIZE (mode);
4180 /* Find constant VAL of mode MODE in the constant pool POOL.
4181 Return an RTX describing the distance from the start of
4182 the pool to the location of the new constant. */
4185 s390_find_constant (struct constant_pool *pool, rtx val,
4186 enum machine_mode mode)
4192 for (i = 0; i < NR_C_MODES; i++)
4193 if (constant_modes[i] == mode)
4195 if (i == NR_C_MODES)
4198 for (c = pool->constants[i]; c != NULL; c = c->next)
4199 if (rtx_equal_p (val, c->value))
4205 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4206 gen_rtx_LABEL_REF (Pmode, pool->label));
4207 offset = gen_rtx_CONST (Pmode, offset);
4211 /* Dump out the constants in POOL. */
4214 s390_dump_pool (struct constant_pool *pool)
4220 /* Pool start insn switches to proper section
4221 and guarantees necessary alignment. */
4223 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4225 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4226 INSN_ADDRESSES_NEW (insn, -1);
4228 insn = emit_label_after (pool->label, insn);
4229 INSN_ADDRESSES_NEW (insn, -1);
4231 /* Dump constants in descending alignment requirement order,
4232 ensuring proper alignment for every constant. */
4233 for (i = 0; i < NR_C_MODES; i++)
4234 for (c = pool->constants[i]; c; c = c->next)
4236 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4237 rtx value = c->value;
4238 if (GET_CODE (value) == CONST
4239 && GET_CODE (XEXP (value, 0)) == UNSPEC
4240 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
4241 && XVECLEN (XEXP (value, 0), 0) == 1)
4243 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4244 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4245 value = gen_rtx_CONST (VOIDmode, value);
4248 insn = emit_label_after (c->label, insn);
4249 INSN_ADDRESSES_NEW (insn, -1);
4251 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
4252 gen_rtvec (1, value),
4253 UNSPECV_POOL_ENTRY);
4254 insn = emit_insn_after (value, insn);
4255 INSN_ADDRESSES_NEW (insn, -1);
4258 /* Pool end insn switches back to previous section
4259 and guarantees necessary alignment. */
4261 insn = emit_insn_after (gen_pool_end_64 (), insn);
4263 insn = emit_insn_after (gen_pool_end_31 (), insn);
4264 INSN_ADDRESSES_NEW (insn, -1);
4266 insn = emit_barrier_after (insn);
4267 INSN_ADDRESSES_NEW (insn, -1);
4269 /* Remove placeholder insn. */
4270 remove_insn (pool->pool_insn);
4275 /* Free all memory used by POOL. */
4278 s390_free_pool (struct constant_pool *pool)
4282 for (i = 0; i < NR_C_MODES; i++)
4284 struct constant *c = pool->constants[i];
4287 struct constant *next = c->next;
4293 BITMAP_XFREE (pool->insns);
4298 /* Chunkify the literal pool if required. */
4300 #define S390_POOL_CHUNK_MIN 0xc00
4301 #define S390_POOL_CHUNK_MAX 0xe00
4303 static struct constant_pool *
4304 s390_chunkify_start (void)
4306 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4308 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4311 rtx pending_ltrel = NULL_RTX;
4314 rtx (*gen_reload_base) (rtx, rtx) =
4315 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4318 /* Do we need to chunkify the literal pool? */
4320 if (get_pool_size () < S390_POOL_CHUNK_MAX)
4323 /* We need correct insn addresses. */
4325 shorten_branches (get_insns ());
4327 /* Scan all insns and move literals to pool chunks. */
4329 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4331 /* Check for pending LTREL_BASE. */
4334 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
4337 if (ltrel_base == pending_ltrel)
4338 pending_ltrel = NULL_RTX;
4344 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4346 rtx pool_ref = NULL_RTX;
4347 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4350 rtx constant = get_pool_constant (pool_ref);
4351 enum machine_mode mode = get_pool_mode (pool_ref);
4354 curr_pool = s390_start_pool (&pool_list, insn);
4356 s390_add_constant (curr_pool, constant, mode);
4357 s390_add_pool_insn (curr_pool, insn);
4359 /* Don't split the pool chunk between a LTREL_OFFSET load
4360 and the corresponding LTREL_BASE. */
4361 if (GET_CODE (constant) == CONST
4362 && GET_CODE (XEXP (constant, 0)) == UNSPEC
4363 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
4367 pending_ltrel = pool_ref;
4372 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4375 s390_add_pool_insn (curr_pool, insn);
4376 /* An LTREL_BASE must follow within the same basic block. */
4382 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4383 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4388 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4391 s390_end_pool (curr_pool, NULL_RTX);
4396 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4397 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4400 /* We will later have to insert base register reload insns.
4401 Those will have an effect on code size, which we need to
4402 consider here. This calculation makes rather pessimistic
4403 worst-case assumptions. */
4404 if (GET_CODE (insn) == CODE_LABEL)
4407 if (chunk_size < S390_POOL_CHUNK_MIN
4408 && curr_pool->size < S390_POOL_CHUNK_MIN)
4411 /* Pool chunks can only be inserted after BARRIERs ... */
4412 if (GET_CODE (insn) == BARRIER)
4414 s390_end_pool (curr_pool, insn);
4419 /* ... so if we don't find one in time, create one. */
4420 else if ((chunk_size > S390_POOL_CHUNK_MAX
4421 || curr_pool->size > S390_POOL_CHUNK_MAX))
4423 rtx label, jump, barrier;
4425 /* We can insert the barrier only after a 'real' insn. */
4426 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4428 if (get_attr_length (insn) == 0)
4431 /* Don't separate LTREL_BASE from the corresponding
4432 LTREL_OFFSET load. */
4436 label = gen_label_rtx ();
4437 jump = emit_jump_insn_after (gen_jump (label), insn);
4438 barrier = emit_barrier_after (jump);
4439 insn = emit_label_after (label, barrier);
4440 JUMP_LABEL (jump) = label;
4441 LABEL_NUSES (label) = 1;
4443 INSN_ADDRESSES_NEW (jump, -1);
4444 INSN_ADDRESSES_NEW (barrier, -1);
4445 INSN_ADDRESSES_NEW (insn, -1);
4447 s390_end_pool (curr_pool, barrier);
4455 s390_end_pool (curr_pool, NULL_RTX);
4460 /* Find all labels that are branched into
4461 from an insn belonging to a different chunk. */
4463 far_labels = BITMAP_XMALLOC ();
4465 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4467 /* Labels marked with LABEL_PRESERVE_P can be target
4468 of non-local jumps, so we have to mark them.
4469 The same holds for named labels.
4471 Don't do that, however, if it is the label before
4474 if (GET_CODE (insn) == CODE_LABEL
4475 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4477 rtx vec_insn = next_real_insn (insn);
4478 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4479 PATTERN (vec_insn) : NULL_RTX;
4481 || !(GET_CODE (vec_pat) == ADDR_VEC
4482 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4483 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4486 /* If we have a direct jump (conditional or unconditional)
4487 or a casesi jump, check all potential targets. */
4488 else if (GET_CODE (insn) == JUMP_INSN)
4490 rtx pat = PATTERN (insn);
4491 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4492 pat = XVECEXP (pat, 0, 0);
4494 if (GET_CODE (pat) == SET)
4496 rtx label = JUMP_LABEL (insn);
4499 if (s390_find_pool (pool_list, label)
4500 != s390_find_pool (pool_list, insn))
4501 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4504 else if (GET_CODE (pat) == PARALLEL
4505 && XVECLEN (pat, 0) == 2
4506 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4507 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4508 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4510 /* Find the jump table used by this casesi jump. */
4511 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4512 rtx vec_insn = next_real_insn (vec_label);
4513 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4514 PATTERN (vec_insn) : NULL_RTX;
4516 && (GET_CODE (vec_pat) == ADDR_VEC
4517 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4519 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4521 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4523 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4525 if (s390_find_pool (pool_list, label)
4526 != s390_find_pool (pool_list, insn))
4527 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4534 /* Insert base register reload insns before every pool. */
4536 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4538 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4539 rtx insn = curr_pool->first_insn;
4540 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4543 /* Insert base register reload insns at every far label. */
4545 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4546 if (GET_CODE (insn) == CODE_LABEL
4547 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4549 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4552 rtx new_insn = gen_reload_base (base_reg, pool->label);
4553 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4558 BITMAP_XFREE (far_labels);
4561 /* Recompute insn addresses. */
4563 init_insn_lengths ();
4564 shorten_branches (get_insns ());
4569 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4570 After we have decided to use this list, finish implementing
4571 all changes to the current function as required. */
4574 s390_chunkify_finish (struct constant_pool *pool_list)
4576 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4577 struct constant_pool *curr_pool = NULL;
4581 /* Replace all literal pool references. */
4583 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4586 replace_ltrel_base (&PATTERN (insn), base_reg);
4588 curr_pool = s390_find_pool (pool_list, insn);
4592 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4594 rtx addr, pool_ref = NULL_RTX;
4595 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4598 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4599 get_pool_mode (pool_ref));
4600 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4601 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4602 INSN_CODE (insn) = -1;
4607 /* Dump out all literal pools. */
4609 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4610 s390_dump_pool (curr_pool);
4612 /* Free pool list. */
4616 struct constant_pool *next = pool_list->next;
4617 s390_free_pool (pool_list);
4622 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4623 We have decided we cannot use this list, so revert all changes
4624 to the current function that were done by s390_chunkify_start. */
4627 s390_chunkify_cancel (struct constant_pool *pool_list)
4629 struct constant_pool *curr_pool = NULL;
4632 /* Remove all pool placeholder insns. */
4634 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4636 /* Did we insert an extra barrier? Remove it. */
4637 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4638 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4639 rtx label = NEXT_INSN (curr_pool->pool_insn);
4641 if (jump && GET_CODE (jump) == JUMP_INSN
4642 && barrier && GET_CODE (barrier) == BARRIER
4643 && label && GET_CODE (label) == CODE_LABEL
4644 && GET_CODE (PATTERN (jump)) == SET
4645 && SET_DEST (PATTERN (jump)) == pc_rtx
4646 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4647 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4650 remove_insn (barrier);
4651 remove_insn (label);
4654 remove_insn (curr_pool->pool_insn);
4657 /* Remove all base register reload insns. */
4659 for (insn = get_insns (); insn; )
4661 rtx next_insn = NEXT_INSN (insn);
4663 if (GET_CODE (insn) == INSN
4664 && GET_CODE (PATTERN (insn)) == SET
4665 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4666 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
4672 /* Free pool list. */
4676 struct constant_pool *next = pool_list->next;
4677 s390_free_pool (pool_list);
4683 /* Index of constant pool chunk that is currently being processed.
4684 Set to -1 before function output has started. */
4685 int s390_pool_count = -1;
4687 /* Number of elements of current constant pool. */
4688 int s390_nr_constants;
4690 /* Output main constant pool to stdio stream FILE. */
4693 s390_output_constant_pool (rtx start_label, rtx end_label)
4697 readonly_data_section ();
4698 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4699 targetm.asm_out.internal_label (asm_out_file, "L",
4700 CODE_LABEL_NUMBER (start_label));
4704 targetm.asm_out.internal_label (asm_out_file, "L",
4705 CODE_LABEL_NUMBER (start_label));
4706 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4709 s390_pool_count = 0;
4710 output_constant_pool (current_function_name, current_function_decl);
4711 s390_pool_count = -1;
4713 function_section (current_function_decl);
4716 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4717 targetm.asm_out.internal_label (asm_out_file, "L",
4718 CODE_LABEL_NUMBER (end_label));
4722 /* Output to FILE the constant pool entry EXP in mode MODE
4723 with alignment ALIGN. */
4726 s390_output_pool_entry (FILE *file, rtx exp, enum machine_mode mode,
4731 switch (GET_MODE_CLASS (mode))
4734 if (GET_CODE (exp) != CONST_DOUBLE)
4737 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
4738 assemble_real (r, mode, align);
4742 if (GET_CODE (exp) == CONST
4743 || GET_CODE (exp) == SYMBOL_REF
4744 || GET_CODE (exp) == LABEL_REF)
4746 fputs (integer_asm_op (GET_MODE_SIZE (mode), TRUE), file);
4747 s390_output_symbolic_const (file, exp);
4752 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
4762 /* Rework the prolog/epilog to avoid saving/restoring
4763 registers unnecessarily. If TEMP_REGNO is nonnegative,
4764 it specifies the number of a caller-saved register used
4765 as temporary scratch register by code emitted during
4766 machine dependent reorg. */
4769 s390_optimize_prolog (int temp_regno)
4771 int save_first, save_last, restore_first, restore_last;
4773 rtx insn, new_insn, next_insn;
4775 /* Recompute regs_ever_live data for special registers. */
4776 regs_ever_live[BASE_REGISTER] = 0;
4777 regs_ever_live[RETURN_REGNUM] = 0;
4778 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4780 /* If there is (possibly) any pool entry, we need to
4781 load the base register.
4782 ??? FIXME: this should be more precise. */
4783 if (get_pool_size ())
4784 regs_ever_live[BASE_REGISTER] = 1;
4786 /* In non-leaf functions, the prolog/epilog code relies
4787 on RETURN_REGNUM being saved in any case. We also need
4788 to save the return register if __builtin_return_address (0)
4789 was used in the current function. */
4790 if (!current_function_is_leaf
4791 || cfun->machine->save_return_addr_p)
4792 regs_ever_live[RETURN_REGNUM] = 1;
4794 /* We need to save/restore the temporary register. */
4795 if (temp_regno >= 0)
4796 regs_ever_live[temp_regno] = 1;
4799 /* Find first and last gpr to be saved. */
4801 for (i = 6; i < 16; i++)
4802 if (regs_ever_live[i])
4804 || i == STACK_POINTER_REGNUM
4805 || i == RETURN_REGNUM
4806 || i == BASE_REGISTER
4807 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4810 for (j = 15; j > i; j--)
4811 if (regs_ever_live[j])
4813 || j == STACK_POINTER_REGNUM
4814 || j == RETURN_REGNUM
4815 || j == BASE_REGISTER
4816 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4821 /* Nothing to save/restore. */
4822 save_first = restore_first = -1;
4823 save_last = restore_last = -1;
4827 /* Save/restore from i to j. */
4828 save_first = restore_first = i;
4829 save_last = restore_last = j;
4832 /* Varargs functions need to save gprs 2 to 6. */
4833 if (current_function_stdarg)
4841 /* If all special registers are in fact used, there's nothing we
4842 can do, so no point in walking the insn list. */
4843 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4844 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4848 /* Search for prolog/epilog insns and replace them. */
4850 for (insn = get_insns (); insn; insn = next_insn)
4852 int first, last, off;
4853 rtx set, base, offset;
4855 next_insn = NEXT_INSN (insn);
4857 if (GET_CODE (insn) != INSN)
4859 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4862 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4864 set = XVECEXP (PATTERN (insn), 0, 0);
4865 first = REGNO (SET_SRC (set));
4866 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4867 offset = const0_rtx;
4868 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4869 off = INTVAL (offset) - first * UNITS_PER_WORD;
4871 if (GET_CODE (base) != REG || off < 0)
4873 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4875 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4878 if (save_first != -1)
4880 new_insn = save_gprs (base, off, save_first, save_last);
4881 new_insn = emit_insn_before (new_insn, insn);
4882 INSN_ADDRESSES_NEW (new_insn, -1);
4888 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4890 set = XVECEXP (PATTERN (insn), 0, 0);
4891 first = REGNO (SET_DEST (set));
4892 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4893 offset = const0_rtx;
4894 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4895 off = INTVAL (offset) - first * UNITS_PER_WORD;
4897 if (GET_CODE (base) != REG || off < 0)
4899 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4901 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4904 if (restore_first != -1)
4906 new_insn = restore_gprs (base, off, restore_first, restore_last);
4907 new_insn = emit_insn_before (new_insn, insn);
4908 INSN_ADDRESSES_NEW (new_insn, -1);
4916 /* Perform machine-dependent processing. */
4921 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4924 /* Make sure all splits have been performed; splits after
4925 machine_dependent_reorg might confuse insn length counts. */
4926 split_all_insns_noflow ();
4929 /* There are two problematic situations we need to correct:
4931 - the literal pool might be > 4096 bytes in size, so that
4932 some of its elements cannot be directly accessed
4934 - a branch target might be > 64K away from the branch, so that
4935 it is not possible to use a PC-relative instruction.
4937 To fix those, we split the single literal pool into multiple
4938 pool chunks, reloading the pool base register at various
4939 points throughout the function to ensure it always points to
4940 the pool chunk the following code expects, and / or replace
4941 PC-relative branches by absolute branches.
4943 However, the two problems are interdependent: splitting the
4944 literal pool can move a branch further away from its target,
4945 causing the 64K limit to overflow, and on the other hand,
4946 replacing a PC-relative branch by an absolute branch means
4947 we need to put the branch target address into the literal
4948 pool, possibly causing it to overflow.
4950 So, we loop trying to fix up both problems until we manage
4951 to satisfy both conditions at the same time. Note that the
4952 loop is guaranteed to terminate as every pass of the loop
4953 strictly decreases the total number of PC-relative branches
4954 in the function. (This is not completely true as there
4955 might be branch-over-pool insns introduced by chunkify_start.
4956 Those never need to be split however.) */
4960 struct constant_pool *pool_list;
4962 /* Try to chunkify the literal pool. */
4963 pool_list = s390_chunkify_start ();
4965 /* Split out-of-range branches. If this has created new
4966 literal pool entries, cancel current chunk list and
4968 if (s390_split_branches (temp_reg, &temp_used))
4971 s390_chunkify_cancel (pool_list);
4976 /* If we made it up to here, both conditions are satisfied.
4977 Finish up pool chunkification if required. */
4979 s390_chunkify_finish (pool_list);
4984 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4988 /* Return an RTL expression representing the value of the return address
4989 for the frame COUNT steps up from the current frame. FRAME is the
4990 frame pointer of that frame. */
4993 s390_return_addr_rtx (int count, rtx frame)
4997 /* For the current frame, we need to make sure the initial
4998 value of RETURN_REGNUM is actually saved. */
5001 cfun->machine->save_return_addr_p = true;
5003 /* To retrieve the return address we read the stack slot where the
5004 corresponding RETURN_REGNUM value was saved. */
5006 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5007 addr = memory_address (Pmode, addr);
5008 return gen_rtx_MEM (Pmode, addr);
5011 /* Find first call clobbered register unsused in a function.
5012 This could be used as base register in a leaf function
5013 or for holding the return address before epilogue. */
5016 find_unused_clobbered_reg (void)
5019 for (i = 0; i < 6; i++)
5020 if (!regs_ever_live[i])
5025 /* Fill FRAME with info about frame of current function. */
5028 s390_frame_info (void)
5030 char gprs_ever_live[16];
5032 HOST_WIDE_INT fsize = get_frame_size ();
5034 if (fsize > 0x7fff0000)
5035 fatal_error ("Total size of local variables exceeds architecture limit.");
5037 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5038 cfun->machine->save_fprs_p = 0;
5040 for (i = 24; i < 32; i++)
5041 if (regs_ever_live[i] && !global_regs[i])
5043 cfun->machine->save_fprs_p = 1;
5047 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5049 /* Does function need to setup frame and save area. */
5051 if (! current_function_is_leaf
5052 || cfun->machine->frame_size > 0
5053 || current_function_calls_alloca
5054 || current_function_stdarg)
5055 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5057 /* Find first and last gpr to be saved. Note that at this point,
5058 we assume the return register and the base register always
5059 need to be saved. This is done because the usage of these
5060 register might change even after the prolog was emitted.
5061 If it turns out later that we really don't need them, the
5062 prolog/epilog code is modified again. */
5064 for (i = 0; i < 16; i++)
5065 gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5068 gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5069 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5070 gprs_ever_live[BASE_REGISTER] = 1;
5071 gprs_ever_live[RETURN_REGNUM] = 1;
5072 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5074 for (i = 6; i < 16; i++)
5075 if (gprs_ever_live[i])
5078 for (j = 15; j > i; j--)
5079 if (gprs_ever_live[j])
5083 /* Save / Restore from gpr i to j. */
5084 cfun->machine->first_save_gpr = i;
5085 cfun->machine->first_restore_gpr = i;
5086 cfun->machine->last_save_gpr = j;
5088 /* Varargs functions need to save gprs 2 to 6. */
5089 if (current_function_stdarg)
5090 cfun->machine->first_save_gpr = 2;
5093 /* Return offset between argument pointer and frame pointer
5094 initially after prologue. */
5097 s390_arg_frame_offset (void)
5099 HOST_WIDE_INT fsize = get_frame_size ();
5102 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5105 for (i = 24; i < 32; i++)
5106 if (regs_ever_live[i] && !global_regs[i])
5112 fsize = fsize + save_fprs_p * 64;
5114 /* Does function need to setup frame and save area. */
5116 if (! current_function_is_leaf
5118 || current_function_calls_alloca
5119 || current_function_stdarg)
5120 fsize += STARTING_FRAME_OFFSET;
5121 return fsize + STACK_POINTER_OFFSET;
5124 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5125 to register BASE. Return generated insn. */
5128 save_fpr (rtx base, int offset, int regnum)
5131 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5132 set_mem_alias_set (addr, s390_sr_alias_set);
5134 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5137 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5138 to register BASE. Return generated insn. */
5141 restore_fpr (rtx base, int offset, int regnum)
5144 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5145 set_mem_alias_set (addr, s390_sr_alias_set);
5147 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5150 /* Generate insn to save registers FIRST to LAST into
5151 the register save area located at offset OFFSET
5152 relative to register BASE. */
5155 save_gprs (rtx base, int offset, int first, int last)
5157 rtx addr, insn, note;
5160 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5161 addr = gen_rtx_MEM (Pmode, addr);
5162 set_mem_alias_set (addr, s390_sr_alias_set);
5164 /* Special-case single register. */
5168 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5170 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5172 RTX_FRAME_RELATED_P (insn) = 1;
5177 insn = gen_store_multiple (addr,
5178 gen_rtx_REG (Pmode, first),
5179 GEN_INT (last - first + 1));
5182 /* We need to set the FRAME_RELATED flag on all SETs
5183 inside the store-multiple pattern.
5185 However, we must not emit DWARF records for registers 2..5
5186 if they are stored for use by variable arguments ...
5188 ??? Unfortunately, it is not enough to simply not the the
5189 FRAME_RELATED flags for those SETs, because the first SET
5190 of the PARALLEL is always treated as if it had the flag
5191 set, even if it does not. Therefore we emit a new pattern
5192 without those registers as REG_FRAME_RELATED_EXPR note. */
5196 rtx pat = PATTERN (insn);
5198 for (i = 0; i < XVECLEN (pat, 0); i++)
5199 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5200 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5202 RTX_FRAME_RELATED_P (insn) = 1;
5206 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5207 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5208 gen_rtx_REG (Pmode, 6),
5209 GEN_INT (last - 6 + 1));
5210 note = PATTERN (note);
5213 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5214 note, REG_NOTES (insn));
5216 for (i = 0; i < XVECLEN (note, 0); i++)
5217 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5218 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5220 RTX_FRAME_RELATED_P (insn) = 1;
5226 /* Generate insn to restore registers FIRST to LAST from
5227 the register save area located at offset OFFSET
5228 relative to register BASE. */
5231 restore_gprs (rtx base, int offset, int first, int last)
5235 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5236 addr = gen_rtx_MEM (Pmode, addr);
5237 set_mem_alias_set (addr, s390_sr_alias_set);
5239 /* Special-case single register. */
5243 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5245 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5250 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5252 GEN_INT (last - first + 1));
5256 /* Emit code to load the GOT register. If MAYBE_DEAD is true,
5257 annotate generated insns with REG_MAYBE_DEAD notes. */
5259 static GTY(()) rtx got_symbol;
5261 s390_load_got (int maybe_dead)
5265 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5266 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5271 rtx insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5273 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5280 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
5281 UNSPEC_LTREL_OFFSET);
5282 offset = gen_rtx_CONST (Pmode, offset);
5283 offset = force_const_mem (Pmode, offset);
5285 insn = emit_move_insn (pic_offset_table_rtx, offset);
5287 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5290 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
5292 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
5294 insn = emit_move_insn (pic_offset_table_rtx, offset);
5296 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5301 /* Expand the prologue into a bunch of separate insns. */
5304 s390_emit_prologue (void)
5308 rtx pool_start_label, pool_end_label;
5311 /* Compute frame_info. */
5315 /* Choose best register to use for temp use within prologue.
5316 See below for why TPF must use the register 1. */
5318 if (!current_function_is_leaf
5320 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5322 temp_reg = gen_rtx_REG (Pmode, 1);
5324 /* Save call saved gprs. */
5326 insn = save_gprs (stack_pointer_rtx, 0,
5327 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5330 /* Dump constant pool and set constant pool register. */
5332 pool_start_label = gen_label_rtx();
5333 pool_end_label = gen_label_rtx();
5334 cfun->machine->literal_pool_label = pool_start_label;
5337 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5338 pool_start_label, pool_end_label));
5340 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5341 pool_start_label, pool_end_label));
5343 /* Save fprs for variable args. */
5345 if (current_function_stdarg)
5347 /* Save fpr 0 and 2. */
5349 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5350 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5354 /* Save fpr 4 and 6. */
5356 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5357 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5361 /* Save fprs 4 and 6 if used (31 bit ABI). */
5365 /* Save fpr 4 and 6. */
5366 if (regs_ever_live[18] && !global_regs[18])
5368 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5369 RTX_FRAME_RELATED_P (insn) = 1;
5371 if (regs_ever_live[19] && !global_regs[19])
5373 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5374 RTX_FRAME_RELATED_P (insn) = 1;
5378 /* Decrement stack pointer. */
5380 if (cfun->machine->frame_size > 0)
5382 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5384 /* Save incoming stack pointer into temp reg. */
5386 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5388 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5391 /* Subtract frame size from stack pointer. */
5393 if (DISP_IN_RANGE (INTVAL (frame_off)))
5395 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5396 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5398 insn = emit_insn (insn);
5402 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5403 frame_off = force_const_mem (Pmode, frame_off);
5405 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5408 RTX_FRAME_RELATED_P (insn) = 1;
5410 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5411 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5412 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5413 GEN_INT (-cfun->machine->frame_size))),
5416 /* Set backchain. */
5418 if (TARGET_BACKCHAIN)
5420 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5421 set_mem_alias_set (addr, s390_sr_alias_set);
5422 insn = emit_insn (gen_move_insn (addr, temp_reg));
5425 /* If we support asynchronous exceptions (e.g. for Java),
5426 we need to make sure the backchain pointer is set up
5427 before any possibly trapping memory access. */
5429 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5431 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5432 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5436 /* Save fprs 8 - 15 (64 bit ABI). */
5438 if (cfun->machine->save_fprs_p)
5440 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5442 for (i = 24; i < 32; i++)
5443 if (regs_ever_live[i] && !global_regs[i])
5445 rtx addr = plus_constant (stack_pointer_rtx,
5446 cfun->machine->frame_size - 64 + (i-24)*8);
5448 insn = save_fpr (temp_reg, (i-24)*8, i);
5449 RTX_FRAME_RELATED_P (insn) = 1;
5451 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5452 gen_rtx_SET (VOIDmode,
5453 gen_rtx_MEM (DFmode, addr),
5454 gen_rtx_REG (DFmode, i)),
5459 /* Set frame pointer, if needed. */
5461 if (frame_pointer_needed)
5463 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5464 RTX_FRAME_RELATED_P (insn) = 1;
5467 /* Set up got pointer, if needed. */
5469 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5470 s390_load_got(true);
5474 /* Generate a BAS instruction to serve as a function
5475 entry intercept to facilitate the use of tracing
5476 algorithms located at the branch target.
5478 This must use register 1. */
5483 addr = GEN_INT (0xfe0);
5484 unkn = CONST0_RTX (SImode);
5485 link = gen_rtx_REG (Pmode, 1);
5487 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5489 /* Emit a blockage here so that all code
5490 lies between the profiling mechanisms. */
5491 emit_insn (gen_blockage ());
5495 /* Expand the epilogue into a bunch of separate insns. */
5498 s390_emit_epilogue (void)
5500 rtx frame_pointer, return_reg;
5501 int area_bottom, area_top, offset = 0;
5507 /* Generate a BAS instruction to serve as a function
5508 entry intercept to facilitate the use of tracing
5509 algorithms located at the branch target.
5511 This must use register 1. */
5517 addr = GEN_INT (0xfe6);
5518 unkn = CONST0_RTX (SImode);
5519 link = gen_rtx_REG (Pmode, 1);
5521 /* Emit a blockage here so that all code
5522 lies between the profiling mechanisms. */
5523 emit_insn (gen_blockage ());
5525 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5528 /* Check whether to use frame or stack pointer for restore. */
5530 frame_pointer = frame_pointer_needed ?
5531 hard_frame_pointer_rtx : stack_pointer_rtx;
5533 /* Compute which parts of the save area we need to access. */
5535 if (cfun->machine->first_restore_gpr != -1)
5537 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5538 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5542 area_bottom = INT_MAX;
5548 if (cfun->machine->save_fprs_p)
5550 if (area_bottom > -64)
5558 if (regs_ever_live[18] && !global_regs[18])
5560 if (area_bottom > STACK_POINTER_OFFSET - 16)
5561 area_bottom = STACK_POINTER_OFFSET - 16;
5562 if (area_top < STACK_POINTER_OFFSET - 8)
5563 area_top = STACK_POINTER_OFFSET - 8;
5565 if (regs_ever_live[19] && !global_regs[19])
5567 if (area_bottom > STACK_POINTER_OFFSET - 8)
5568 area_bottom = STACK_POINTER_OFFSET - 8;
5569 if (area_top < STACK_POINTER_OFFSET)
5570 area_top = STACK_POINTER_OFFSET;
5574 /* Check whether we can access the register save area.
5575 If not, increment the frame pointer as required. */
5577 if (area_top <= area_bottom)
5579 /* Nothing to restore. */
5581 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
5582 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
5584 /* Area is in range. */
5585 offset = cfun->machine->frame_size;
5589 rtx insn, frame_off;
5591 offset = area_bottom < 0 ? -area_bottom : 0;
5592 frame_off = GEN_INT (cfun->machine->frame_size - offset);
5594 if (DISP_IN_RANGE (INTVAL (frame_off)))
5596 insn = gen_rtx_SET (VOIDmode, frame_pointer,
5597 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
5598 insn = emit_insn (insn);
5602 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5603 frame_off = force_const_mem (Pmode, frame_off);
5605 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5609 /* Restore call saved fprs. */
5615 if (cfun->machine->save_fprs_p)
5616 for (i = 24; i < 32; i++)
5617 if (regs_ever_live[i] && !global_regs[i])
5618 restore_fpr (frame_pointer,
5619 offset - 64 + (i-24) * 8, i);
5623 if (regs_ever_live[18] && !global_regs[18])
5624 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5625 if (regs_ever_live[19] && !global_regs[19])
5626 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5629 /* Return register. */
5631 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5633 /* Restore call saved gprs. */
5635 if (cfun->machine->first_restore_gpr != -1)
5640 /* Check for global register and save them
5641 to stack location from where they get restored. */
5643 for (i = cfun->machine->first_restore_gpr;
5644 i <= cfun->machine->last_save_gpr;
5647 /* These registers are special and need to be
5648 restored in any case. */
5649 if (i == STACK_POINTER_REGNUM
5650 || i == RETURN_REGNUM
5651 || i == BASE_REGISTER
5652 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5657 addr = plus_constant (frame_pointer,
5658 offset + i * UNITS_PER_WORD);
5659 addr = gen_rtx_MEM (Pmode, addr);
5660 set_mem_alias_set (addr, s390_sr_alias_set);
5661 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5665 /* Fetch return address from stack before load multiple,
5666 this will do good for scheduling. */
5668 if (!current_function_is_leaf)
5670 int return_regnum = find_unused_clobbered_reg();
5673 return_reg = gen_rtx_REG (Pmode, return_regnum);
5675 addr = plus_constant (frame_pointer,
5676 offset + RETURN_REGNUM * UNITS_PER_WORD);
5677 addr = gen_rtx_MEM (Pmode, addr);
5678 set_mem_alias_set (addr, s390_sr_alias_set);
5679 emit_move_insn (return_reg, addr);
5682 /* ??? As references to the base register are not made
5683 explicit in insn RTX code, we have to add a barrier here
5684 to prevent incorrect scheduling. */
5686 emit_insn (gen_blockage());
5688 insn = restore_gprs (frame_pointer, offset,
5689 cfun->machine->first_restore_gpr,
5690 cfun->machine->last_save_gpr);
5694 /* Return to caller. */
5696 p = rtvec_alloc (2);
5698 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5699 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5700 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5704 /* Return the size in bytes of a function argument of
5705 type TYPE and/or mode MODE. At least one of TYPE or
5706 MODE must be specified. */
5709 s390_function_arg_size (enum machine_mode mode, tree type)
5712 return int_size_in_bytes (type);
5714 /* No type info available for some library calls ... */
5715 if (mode != BLKmode)
5716 return GET_MODE_SIZE (mode);
5718 /* If we have neither type nor mode, abort */
5722 /* Return true if a function argument of type TYPE and mode MODE
5723 is to be passed in a floating-point register, if available. */
5726 s390_function_arg_float (enum machine_mode mode, tree type)
5728 /* Soft-float changes the ABI: no floating-point registers are used. */
5729 if (TARGET_SOFT_FLOAT)
5732 /* No type info available for some library calls ... */
5734 return mode == SFmode || mode == DFmode;
5736 /* The ABI says that record types with a single member are treated
5737 just like that member would be. */
5738 while (TREE_CODE (type) == RECORD_TYPE)
5740 tree field, single = NULL_TREE;
5742 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5744 if (TREE_CODE (field) != FIELD_DECL)
5747 if (single == NULL_TREE)
5748 single = TREE_TYPE (field);
5753 if (single == NULL_TREE)
5759 return TREE_CODE (type) == REAL_TYPE;
5762 /* Return 1 if a function argument of type TYPE and mode MODE
5763 is to be passed by reference. The ABI specifies that only
5764 structures of size 1, 2, 4, or 8 bytes are passed by value,
5765 all other structures (and complex numbers) are passed by
5769 s390_function_arg_pass_by_reference (enum machine_mode mode, tree type)
5771 int size = s390_function_arg_size (mode, type);
5775 if (AGGREGATE_TYPE_P (type) &&
5776 size != 1 && size != 2 && size != 4 && size != 8
5777 && !s390_function_arg_float (mode, type))
5780 if (TREE_CODE (type) == COMPLEX_TYPE)
5787 /* Update the data in CUM to advance over an argument of mode MODE and
5788 data type TYPE. (TYPE is null for libcalls where that information
5789 may not be available.). The boolean NAMED specifies whether the
5790 argument is a named argument (as opposed to an unnamed argument
5791 matching an ellipsis). */
5794 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5795 tree type, int named ATTRIBUTE_UNUSED)
5797 if (s390_function_arg_pass_by_reference (mode, type))
5801 else if (s390_function_arg_float (mode, type))
5807 int size = s390_function_arg_size (mode, type);
5808 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5812 /* Define where to put the arguments to a function.
5813 Value is zero to push the argument on the stack,
5814 or a hard register in which to store the argument.
5816 MODE is the argument's machine mode.
5817 TYPE is the data type of the argument (as a tree).
5818 This is null for libcalls where that information may
5820 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5821 the preceding args and about the function being called.
5822 NAMED is nonzero if this argument is a named parameter
5823 (otherwise it is an extra parameter matching an ellipsis).
5825 On S/390, we use general purpose registers 2 through 6 to
5826 pass integer, pointer, and certain structure arguments, and
5827 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5828 to pass floating point arguments. All remaining arguments
5829 are pushed to the stack. */
5832 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
5833 int named ATTRIBUTE_UNUSED)
5835 if (s390_function_arg_pass_by_reference (mode, type))
5838 if (s390_function_arg_float (mode, type))
5840 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5843 return gen_rtx (REG, mode, cum->fprs + 16);
5847 int size = s390_function_arg_size (mode, type);
5848 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5850 if (cum->gprs + n_gprs > 5)
5853 return gen_rtx (REG, mode, cum->gprs + 2);
5858 /* Create and return the va_list datatype.
5860 On S/390, va_list is an array type equivalent to
5862 typedef struct __va_list_tag
5866 void *__overflow_arg_area;
5867 void *__reg_save_area;
5871 where __gpr and __fpr hold the number of general purpose
5872 or floating point arguments used up to now, respectively,
5873 __overflow_arg_area points to the stack location of the
5874 next argument passed on the stack, and __reg_save_area
5875 always points to the start of the register area in the
5876 call frame of the current function. The function prologue
5877 saves all registers used for argument passing into this
5878 area if the function uses variable arguments. */
5881 s390_build_va_list (void)
5883 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5885 record = lang_hooks.types.make_type (RECORD_TYPE);
5888 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5890 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5891 long_integer_type_node);
5892 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5893 long_integer_type_node);
5894 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5896 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5899 DECL_FIELD_CONTEXT (f_gpr) = record;
5900 DECL_FIELD_CONTEXT (f_fpr) = record;
5901 DECL_FIELD_CONTEXT (f_ovf) = record;
5902 DECL_FIELD_CONTEXT (f_sav) = record;
5904 TREE_CHAIN (record) = type_decl;
5905 TYPE_NAME (record) = type_decl;
5906 TYPE_FIELDS (record) = f_gpr;
5907 TREE_CHAIN (f_gpr) = f_fpr;
5908 TREE_CHAIN (f_fpr) = f_ovf;
5909 TREE_CHAIN (f_ovf) = f_sav;
5911 layout_type (record);
5913 /* The correct type is an array type of one element. */
5914 return build_array_type (record, build_index_type (size_zero_node));
5917 /* Implement va_start by filling the va_list structure VALIST.
5918 STDARG_P is always true, and ignored.
5919 NEXTARG points to the first anonymous stack argument.
5921 The following global variables are used to initialize
5922 the va_list structure:
5924 current_function_args_info:
5925 holds number of gprs and fprs used for named arguments.
5926 current_function_arg_offset_rtx:
5927 holds the offset of the first anonymous stack argument
5928 (relative to the virtual arg pointer). */
5931 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
5933 HOST_WIDE_INT n_gpr, n_fpr;
5935 tree f_gpr, f_fpr, f_ovf, f_sav;
5936 tree gpr, fpr, ovf, sav, t;
5938 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5939 f_fpr = TREE_CHAIN (f_gpr);
5940 f_ovf = TREE_CHAIN (f_fpr);
5941 f_sav = TREE_CHAIN (f_ovf);
5943 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5944 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5945 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5946 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5947 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5949 /* Count number of gp and fp argument registers used. */
5951 n_gpr = current_function_args_info.gprs;
5952 n_fpr = current_function_args_info.fprs;
5954 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5955 TREE_SIDE_EFFECTS (t) = 1;
5956 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5958 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5959 TREE_SIDE_EFFECTS (t) = 1;
5960 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5962 /* Find the overflow area. */
5963 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5965 off = INTVAL (current_function_arg_offset_rtx);
5966 off = off < 0 ? 0 : off;
5967 if (TARGET_DEBUG_ARG)
5968 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5969 (int)n_gpr, (int)n_fpr, off);
5971 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5973 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5974 TREE_SIDE_EFFECTS (t) = 1;
5975 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5977 /* Find the register save area. */
5978 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5979 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5980 build_int_2 (-STACK_POINTER_OFFSET, -1));
5981 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5982 TREE_SIDE_EFFECTS (t) = 1;
5983 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5986 /* Implement va_arg by updating the va_list structure
5987 VALIST as required to retrieve an argument of type
5988 TYPE, and returning that argument.
5990 Generates code equivalent to:
5992 if (integral value) {
5993 if (size <= 4 && args.gpr < 5 ||
5994 size > 4 && args.gpr < 4 )
5995 ret = args.reg_save_area[args.gpr+8]
5997 ret = *args.overflow_arg_area++;
5998 } else if (float value) {
6000 ret = args.reg_save_area[args.fpr+64]
6002 ret = *args.overflow_arg_area++;
6003 } else if (aggregate value) {
6005 ret = *args.reg_save_area[args.gpr]
6007 ret = **args.overflow_arg_area++;
6011 s390_va_arg (tree valist, tree type)
6013 tree f_gpr, f_fpr, f_ovf, f_sav;
6014 tree gpr, fpr, ovf, sav, reg, t, u;
6015 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6016 rtx lab_false, lab_over, addr_rtx, r;
6018 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6019 f_fpr = TREE_CHAIN (f_gpr);
6020 f_ovf = TREE_CHAIN (f_fpr);
6021 f_sav = TREE_CHAIN (f_ovf);
6023 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6024 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6025 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6026 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6027 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6029 size = int_size_in_bytes (type);
6031 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
6033 if (TARGET_DEBUG_ARG)
6035 fprintf (stderr, "va_arg: aggregate type");
6039 /* Aggregates are passed by reference. */
6043 sav_ofs = 2 * UNITS_PER_WORD;
6044 sav_scale = UNITS_PER_WORD;
6045 size = UNITS_PER_WORD;
6048 else if (s390_function_arg_float (TYPE_MODE (type), type))
6050 if (TARGET_DEBUG_ARG)
6052 fprintf (stderr, "va_arg: float type");
6056 /* FP args go in FP registers, if present. */
6060 sav_ofs = 16 * UNITS_PER_WORD;
6062 /* TARGET_64BIT has up to 4 parameter in fprs */
6063 max_reg = TARGET_64BIT ? 3 : 1;
6067 if (TARGET_DEBUG_ARG)
6069 fprintf (stderr, "va_arg: other type");
6073 /* Otherwise into GP registers. */
6076 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6077 sav_ofs = 2 * UNITS_PER_WORD;
6079 if (size < UNITS_PER_WORD)
6080 sav_ofs += UNITS_PER_WORD - size;
6082 sav_scale = UNITS_PER_WORD;
6089 /* Pull the value out of the saved registers ... */
6091 lab_false = gen_label_rtx ();
6092 lab_over = gen_label_rtx ();
6093 addr_rtx = gen_reg_rtx (Pmode);
6095 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6097 GT, const1_rtx, Pmode, 0, lab_false);
6100 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6104 u = build (MULT_EXPR, long_integer_type_node,
6105 reg, build_int_2 (sav_scale, 0));
6106 TREE_SIDE_EFFECTS (u) = 1;
6108 t = build (PLUS_EXPR, ptr_type_node, t, u);
6109 TREE_SIDE_EFFECTS (t) = 1;
6111 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6113 emit_move_insn (addr_rtx, r);
6116 emit_jump_insn (gen_jump (lab_over));
6118 emit_label (lab_false);
6120 /* ... Otherwise out of the overflow area. */
6122 t = save_expr (ovf);
6125 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6126 if (size < UNITS_PER_WORD)
6128 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6129 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6130 TREE_SIDE_EFFECTS (t) = 1;
6131 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6133 t = save_expr (ovf);
6136 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6138 emit_move_insn (addr_rtx, r);
6140 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6141 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6142 TREE_SIDE_EFFECTS (t) = 1;
6143 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6145 emit_label (lab_over);
6147 /* If less than max_regs a registers are retrieved out
6148 of register save area, increment. */
6150 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6151 build_int_2 (n_reg, 0));
6152 TREE_SIDE_EFFECTS (u) = 1;
6153 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6157 r = gen_rtx_MEM (Pmode, addr_rtx);
6158 set_mem_alias_set (r, get_varargs_alias_set ());
6159 emit_move_insn (addr_rtx, r);
6171 S390_BUILTIN_THREAD_POINTER,
6172 S390_BUILTIN_SET_THREAD_POINTER,
6177 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6182 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6188 s390_init_builtins (void)
6192 ftype = build_function_type (ptr_type_node, void_list_node);
6193 builtin_function ("__builtin_thread_pointer", ftype,
6194 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6197 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6198 builtin_function ("__builtin_set_thread_pointer", ftype,
6199 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6203 /* Expand an expression EXP that calls a built-in function,
6204 with result going to TARGET if that's convenient
6205 (and in mode MODE if that's convenient).
6206 SUBTARGET may be used as the target for computing one of EXP's operands.
6207 IGNORE is nonzero if the value is to be ignored. */
6210 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6211 enum machine_mode mode ATTRIBUTE_UNUSED,
6212 int ignore ATTRIBUTE_UNUSED)
6216 unsigned int const *code_for_builtin =
6217 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6219 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6220 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6221 tree arglist = TREE_OPERAND (exp, 1);
6222 enum insn_code icode;
6223 rtx op[MAX_ARGS], pat;
6227 if (fcode >= S390_BUILTIN_max)
6228 internal_error ("bad builtin fcode");
6229 icode = code_for_builtin[fcode];
6231 internal_error ("bad builtin fcode");
6233 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6235 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6237 arglist = TREE_CHAIN (arglist), arity++)
6239 const struct insn_operand_data *insn_op;
6241 tree arg = TREE_VALUE (arglist);
6242 if (arg == error_mark_node)
6244 if (arity > MAX_ARGS)
6247 insn_op = &insn_data[icode].operand[arity + nonvoid];
6249 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6251 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6252 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6257 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6259 || GET_MODE (target) != tmode
6260 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6261 target = gen_reg_rtx (tmode);
6267 pat = GEN_FCN (icode) (target);
6271 pat = GEN_FCN (icode) (target, op[0]);
6273 pat = GEN_FCN (icode) (op[0]);
6276 pat = GEN_FCN (icode) (target, op[0], op[1]);
6292 /* Output assembly code for the trampoline template to
6295 On S/390, we use gpr 1 internally in the trampoline code;
6296 gpr 0 is used to hold the static chain. */
6299 s390_trampoline_template (FILE *file)
6303 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6304 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6305 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6306 fprintf (file, "br\t%s\n", reg_names[1]);
6307 fprintf (file, "0:\t.quad\t0\n");
6308 fprintf (file, ".quad\t0\n");
6312 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6313 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6314 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6315 fprintf (file, "br\t%s\n", reg_names[1]);
6316 fprintf (file, ".long\t0\n");
6317 fprintf (file, ".long\t0\n");
6321 /* Emit RTL insns to initialize the variable parts of a trampoline.
6322 FNADDR is an RTX for the address of the function's pure code.
6323 CXT is an RTX for the static chain value for the function. */
6326 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
6328 emit_move_insn (gen_rtx
6330 memory_address (Pmode,
6331 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6332 emit_move_insn (gen_rtx
6334 memory_address (Pmode,
6335 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6338 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6339 LOW and HIGH, independent of the host word size. */
6342 s390_gen_rtx_const_DI (int high, int low)
6344 #if HOST_BITS_PER_WIDE_INT >= 64
6346 val = (HOST_WIDE_INT)high;
6348 val |= (HOST_WIDE_INT)low;
6350 return GEN_INT (val);
6352 #if HOST_BITS_PER_WIDE_INT >= 32
6353 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6360 /* Output assembler code to FILE to increment profiler label # LABELNO
6361 for profiling a function entry. */
6364 s390_function_profiler (FILE *file, int labelno)
6369 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6371 fprintf (file, "# function profiler \n");
6373 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6374 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6375 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6377 op[2] = gen_rtx_REG (Pmode, 1);
6378 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6379 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
6381 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6384 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
6385 op[4] = gen_rtx_CONST (Pmode, op[4]);
6390 output_asm_insn ("stg\t%0,%1", op);
6391 output_asm_insn ("larl\t%2,%3", op);
6392 output_asm_insn ("brasl\t%0,%4", op);
6393 output_asm_insn ("lg\t%0,%1", op);
6397 op[6] = gen_label_rtx ();
6399 output_asm_insn ("st\t%0,%1", op);
6400 output_asm_insn ("bras\t%2,%l6", op);
6401 output_asm_insn (".long\t%4", op);
6402 output_asm_insn (".long\t%3", op);
6403 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
6404 output_asm_insn ("l\t%0,0(%2)", op);
6405 output_asm_insn ("l\t%2,4(%2)", op);
6406 output_asm_insn ("basr\t%0,%0", op);
6407 output_asm_insn ("l\t%0,%1", op);
6411 op[5] = gen_label_rtx ();
6412 op[6] = gen_label_rtx ();
6414 output_asm_insn ("st\t%0,%1", op);
6415 output_asm_insn ("bras\t%2,%l6", op);
6416 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
6417 output_asm_insn (".long\t%4-%l5", op);
6418 output_asm_insn (".long\t%3-%l5", op);
6419 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
6420 output_asm_insn ("lr\t%0,%2", op);
6421 output_asm_insn ("a\t%0,0(%2)", op);
6422 output_asm_insn ("a\t%2,4(%2)", op);
6423 output_asm_insn ("basr\t%0,%0", op);
6424 output_asm_insn ("l\t%0,%1", op);
6428 /* Select section for constant in constant pool. In 32-bit mode,
6429 constants go in the function section; in 64-bit mode in .rodata. */
6432 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
6433 rtx x ATTRIBUTE_UNUSED,
6434 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6437 readonly_data_section ();
6439 function_section (current_function_decl);
6442 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6443 into its SYMBOL_REF_FLAGS. */
6446 s390_encode_section_info (tree decl, rtx rtl, int first)
6448 default_encode_section_info (decl, rtl, first);
6450 /* If a variable has a forced alignment to < 2 bytes, mark it with
6451 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6452 if (TREE_CODE (decl) == VAR_DECL
6453 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
6454 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
6457 /* Output thunk to FILE that implements a C++ virtual function call (with
6458 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6459 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6460 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6461 relative to the resulting this pointer. */
6464 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
6465 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
6471 /* Operand 0 is the target function. */
6472 op[0] = XEXP (DECL_RTL (function), 0);
6473 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
6476 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
6477 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
6478 op[0] = gen_rtx_CONST (Pmode, op[0]);
6481 /* Operand 1 is the 'this' pointer. */
6482 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6483 op[1] = gen_rtx_REG (Pmode, 3);
6485 op[1] = gen_rtx_REG (Pmode, 2);
6487 /* Operand 2 is the delta. */
6488 op[2] = GEN_INT (delta);
6490 /* Operand 3 is the vcall_offset. */
6491 op[3] = GEN_INT (vcall_offset);
6493 /* Operand 4 is the temporary register. */
6494 op[4] = gen_rtx_REG (Pmode, 1);
6496 /* Operands 5 to 8 can be used as labels. */
6502 /* Operand 9 can be used for temporary register. */
6505 /* Generate code. */
6508 /* Setup literal pool pointer if required. */
6509 if ((!DISP_IN_RANGE (delta)
6510 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6511 || (!DISP_IN_RANGE (vcall_offset)
6512 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6514 op[5] = gen_label_rtx ();
6515 output_asm_insn ("larl\t%4,%5", op);
6518 /* Add DELTA to this pointer. */
6521 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6522 output_asm_insn ("la\t%1,%2(%1)", op);
6523 else if (DISP_IN_RANGE (delta))
6524 output_asm_insn ("lay\t%1,%2(%1)", op);
6525 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6526 output_asm_insn ("aghi\t%1,%2", op);
6529 op[6] = gen_label_rtx ();
6530 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6534 /* Perform vcall adjustment. */
6537 if (DISP_IN_RANGE (vcall_offset))
6539 output_asm_insn ("lg\t%4,0(%1)", op);
6540 output_asm_insn ("ag\t%1,%3(%4)", op);
6542 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6544 output_asm_insn ("lghi\t%4,%3", op);
6545 output_asm_insn ("ag\t%4,0(%1)", op);
6546 output_asm_insn ("ag\t%1,0(%4)", op);
6550 op[7] = gen_label_rtx ();
6551 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6552 output_asm_insn ("ag\t%4,0(%1)", op);
6553 output_asm_insn ("ag\t%1,0(%4)", op);
6557 /* Jump to target. */
6558 output_asm_insn ("jg\t%0", op);
6560 /* Output literal pool if required. */
6563 output_asm_insn (".align\t4", op);
6564 targetm.asm_out.internal_label (file, "L",
6565 CODE_LABEL_NUMBER (op[5]));
6569 targetm.asm_out.internal_label (file, "L",
6570 CODE_LABEL_NUMBER (op[6]));
6571 output_asm_insn (".long\t%2", op);
6575 targetm.asm_out.internal_label (file, "L",
6576 CODE_LABEL_NUMBER (op[7]));
6577 output_asm_insn (".long\t%3", op);
6582 /* Setup base pointer if required. */
6584 || (!DISP_IN_RANGE (delta)
6585 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6586 || (!DISP_IN_RANGE (delta)
6587 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6589 op[5] = gen_label_rtx ();
6590 output_asm_insn ("basr\t%4,0", op);
6591 targetm.asm_out.internal_label (file, "L",
6592 CODE_LABEL_NUMBER (op[5]));
6595 /* Add DELTA to this pointer. */
6598 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6599 output_asm_insn ("la\t%1,%2(%1)", op);
6600 else if (DISP_IN_RANGE (delta))
6601 output_asm_insn ("lay\t%1,%2(%1)", op);
6602 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6603 output_asm_insn ("ahi\t%1,%2", op);
6606 op[6] = gen_label_rtx ();
6607 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6611 /* Perform vcall adjustment. */
6614 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6616 output_asm_insn ("lg\t%4,0(%1)", op);
6617 output_asm_insn ("a\t%1,%3(%4)", op);
6619 else if (DISP_IN_RANGE (vcall_offset))
6621 output_asm_insn ("lg\t%4,0(%1)", op);
6622 output_asm_insn ("ay\t%1,%3(%4)", op);
6624 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6626 output_asm_insn ("lhi\t%4,%3", op);
6627 output_asm_insn ("a\t%4,0(%1)", op);
6628 output_asm_insn ("a\t%1,0(%4)", op);
6632 op[7] = gen_label_rtx ();
6633 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6634 output_asm_insn ("a\t%4,0(%1)", op);
6635 output_asm_insn ("a\t%1,0(%4)", op);
6638 /* We had to clobber the base pointer register.
6639 Re-setup the base pointer (with a different base). */
6640 op[5] = gen_label_rtx ();
6641 output_asm_insn ("basr\t%4,0", op);
6642 targetm.asm_out.internal_label (file, "L",
6643 CODE_LABEL_NUMBER (op[5]));
6646 /* Jump to target. */
6647 op[8] = gen_label_rtx ();
6650 output_asm_insn ("l\t%4,%8-%5(%4)", op);
6652 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6653 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6654 else if (flag_pic == 1)
6656 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6657 output_asm_insn ("l\t%4,%0(%4)", op);
6659 else if (flag_pic == 2)
6661 op[9] = gen_rtx_REG (Pmode, 0);
6662 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6663 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6664 output_asm_insn ("ar\t%4,%9", op);
6665 output_asm_insn ("l\t%4,0(%4)", op);
6668 output_asm_insn ("br\t%4", op);
6670 /* Output literal pool. */
6671 output_asm_insn (".align\t4", op);
6673 if (nonlocal && flag_pic == 2)
6674 output_asm_insn (".long\t%0", op);
6677 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6678 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
6681 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
6683 output_asm_insn (".long\t%0", op);
6685 output_asm_insn (".long\t%0-%5", op);
6689 targetm.asm_out.internal_label (file, "L",
6690 CODE_LABEL_NUMBER (op[6]));
6691 output_asm_insn (".long\t%2", op);
6695 targetm.asm_out.internal_label (file, "L",
6696 CODE_LABEL_NUMBER (op[7]));
6697 output_asm_insn (".long\t%3", op);
6703 s390_valid_pointer_mode (enum machine_mode mode)
6705 return (mode == SImode || (TARGET_64BIT && mode == DImode));
6708 /* How to allocate a 'struct machine_function'. */
6710 static struct machine_function *
6711 s390_init_machine_status (void)
6713 return ggc_alloc_cleared (sizeof (struct machine_function));
6716 #include "gt-s390.h"