1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
58 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
59 unsigned HOST_WIDE_INT));
60 static void s390_encode_section_info PARAMS ((tree, rtx, int));
61 static bool s390_cannot_force_const_mem PARAMS ((rtx));
62 static rtx s390_delegitimize_address PARAMS ((rtx));
63 static void s390_init_builtins PARAMS ((void));
64 static rtx s390_expand_builtin PARAMS ((tree, rtx, rtx,
65 enum machine_mode, int));
66 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree));
68 static enum attr_type s390_safe_attr_type PARAMS ((rtx));
70 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
71 static int s390_adjust_priority PARAMS ((rtx, int));
72 static int s390_issue_rate PARAMS ((void));
73 static int s390_use_dfa_pipeline_interface PARAMS ((void));
74 static int s390_first_cycle_multipass_dfa_lookahead PARAMS ((void));
75 static int s390_sched_reorder2 PARAMS ((FILE *, int, rtx *, int *, int));
76 static bool s390_rtx_costs PARAMS ((rtx, int, int, int *));
77 static int s390_address_cost PARAMS ((rtx));
78 static void s390_reorg PARAMS ((void));
81 #undef TARGET_ASM_ALIGNED_HI_OP
82 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
83 #undef TARGET_ASM_ALIGNED_DI_OP
84 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
85 #undef TARGET_ASM_INTEGER
86 #define TARGET_ASM_INTEGER s390_assemble_integer
88 #undef TARGET_ASM_OPEN_PAREN
89 #define TARGET_ASM_OPEN_PAREN ""
91 #undef TARGET_ASM_CLOSE_PAREN
92 #define TARGET_ASM_CLOSE_PAREN ""
94 #undef TARGET_ASM_SELECT_RTX_SECTION
95 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
97 #undef TARGET_ENCODE_SECTION_INFO
98 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
101 #undef TARGET_HAVE_TLS
102 #define TARGET_HAVE_TLS true
104 #undef TARGET_CANNOT_FORCE_CONST_MEM
105 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
107 #undef TARGET_DELEGITIMIZE_ADDRESS
108 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
110 #undef TARGET_INIT_BUILTINS
111 #define TARGET_INIT_BUILTINS s390_init_builtins
112 #undef TARGET_EXPAND_BUILTIN
113 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
115 #undef TARGET_ASM_OUTPUT_MI_THUNK
116 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
117 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
118 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
120 #undef TARGET_SCHED_ADJUST_COST
121 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
122 #undef TARGET_SCHED_ADJUST_PRIORITY
123 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
124 #undef TARGET_SCHED_ISSUE_RATE
125 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
126 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
127 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
128 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
129 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
130 #undef TARGET_SCHED_REORDER2
131 #define TARGET_SCHED_REORDER2 s390_sched_reorder2
133 #undef TARGET_RTX_COSTS
134 #define TARGET_RTX_COSTS s390_rtx_costs
135 #undef TARGET_ADDRESS_COST
136 #define TARGET_ADDRESS_COST s390_address_cost
138 #undef TARGET_MACHINE_DEPENDENT_REORG
139 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141 struct gcc_target targetm = TARGET_INITIALIZER;
143 extern int reload_completed;
145 /* The alias set for prologue/epilogue register save/restore. */
146 static int s390_sr_alias_set = 0;
148 /* Save information from a "cmpxx" operation until the branch or scc is
150 rtx s390_compare_op0, s390_compare_op1;
152 /* Structure used to hold the components of a S/390 memory
153 address. A legitimate address on S/390 is of the general
155 base + index + displacement
156 where any of the components is optional.
158 base and index are registers of the class ADDR_REGS,
159 displacement is an unsigned 12-bit immediate constant. */
169 /* Which cpu are we tuning for. */
170 enum processor_type s390_tune;
171 enum processor_flags s390_tune_flags;
172 /* Which instruction set architecture to use. */
173 enum processor_type s390_arch;
174 enum processor_flags s390_arch_flags;
176 /* Strings to hold which cpu and instruction set architecture to use. */
177 const char *s390_tune_string; /* for -mtune=<xxx> */
178 const char *s390_arch_string; /* for -march=<xxx> */
180 /* Define the structure for the machine field in struct function. */
182 struct machine_function GTY(())
184 /* Label of start of initial literal pool. */
185 rtx literal_pool_label;
187 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
190 /* Number of first and last gpr to be saved, restored. */
192 int first_restore_gpr;
195 /* Size of stack frame. */
196 HOST_WIDE_INT frame_size;
198 /* Some local-dynamic TLS symbol name. */
199 const char *some_ld_name;
202 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
203 static int s390_branch_condition_mask PARAMS ((rtx));
204 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
205 static int check_mode PARAMS ((rtx, enum machine_mode *));
206 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
207 static int s390_short_displacement PARAMS ((rtx));
208 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
209 static rtx get_thread_pointer PARAMS ((void));
210 static rtx legitimize_tls_address PARAMS ((rtx, rtx));
211 static const char *get_some_local_dynamic_name PARAMS ((void));
212 static int get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
213 static int reg_used_in_mem_p PARAMS ((int, rtx));
214 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
215 static int s390_split_branches PARAMS ((rtx, bool *));
216 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
217 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
218 static int find_base_register_in_addr PARAMS ((struct s390_address *));
219 static bool find_base_register_ref PARAMS ((rtx));
220 static void replace_base_register_ref PARAMS ((rtx *, rtx));
221 static void s390_optimize_prolog PARAMS ((int));
222 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
223 static int find_unused_clobbered_reg PARAMS ((void));
224 static void s390_frame_info PARAMS ((void));
225 static rtx save_fpr PARAMS ((rtx, int, int));
226 static rtx restore_fpr PARAMS ((rtx, int, int));
227 static rtx save_gprs PARAMS ((rtx, int, int, int));
228 static rtx restore_gprs PARAMS ((rtx, int, int, int));
229 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
230 static bool s390_function_arg_float PARAMS ((enum machine_mode, tree));
231 static struct machine_function * s390_init_machine_status PARAMS ((void));
233 /* Check whether integer displacement is in range. */
234 #define DISP_IN_RANGE(d) \
235 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
236 : ((d) >= 0 && (d) <= 4095))
238 /* Return true if SET either doesn't set the CC register, or else
239 the source and destination have matching CC modes and that
240 CC mode is at least as constrained as REQ_MODE. */
243 s390_match_ccmode_set (set, req_mode)
245 enum machine_mode req_mode;
247 enum machine_mode set_mode;
249 if (GET_CODE (set) != SET)
252 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
255 set_mode = GET_MODE (SET_DEST (set));
268 if (req_mode != set_mode)
273 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
274 && req_mode != CCSRmode && req_mode != CCURmode)
280 if (req_mode != CCAmode)
288 return (GET_MODE (SET_SRC (set)) == set_mode);
291 /* Return true if every SET in INSN that sets the CC register
292 has source and destination with matching CC modes and that
293 CC mode is at least as constrained as REQ_MODE.
294 If REQ_MODE is VOIDmode, always return false. */
297 s390_match_ccmode (insn, req_mode)
299 enum machine_mode req_mode;
303 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
304 if (req_mode == VOIDmode)
307 if (GET_CODE (PATTERN (insn)) == SET)
308 return s390_match_ccmode_set (PATTERN (insn), req_mode);
310 if (GET_CODE (PATTERN (insn)) == PARALLEL)
311 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
313 rtx set = XVECEXP (PATTERN (insn), 0, i);
314 if (GET_CODE (set) == SET)
315 if (!s390_match_ccmode_set (set, req_mode))
322 /* If a test-under-mask instruction can be used to implement
323 (compare (and ... OP1) OP2), return the CC mode required
324 to do that. Otherwise, return VOIDmode.
325 MIXED is true if the instruction can distinguish between
326 CC1 and CC2 for mixed selected bits (TMxx), it is false
327 if the instruction cannot (TM). */
330 s390_tm_ccmode (op1, op2, mixed)
337 /* ??? Fixme: should work on CONST_DOUBLE as well. */
338 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
341 /* Selected bits all zero: CC0. */
342 if (INTVAL (op2) == 0)
345 /* Selected bits all one: CC3. */
346 if (INTVAL (op2) == INTVAL (op1))
349 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
352 bit1 = exact_log2 (INTVAL (op2));
353 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
354 if (bit0 != -1 && bit1 != -1)
355 return bit0 > bit1 ? CCT1mode : CCT2mode;
361 /* Given a comparison code OP (EQ, NE, etc.) and the operands
362 OP0 and OP1 of a COMPARE, return the mode to be used for the
366 s390_select_ccmode (code, op0, op1)
375 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
376 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
378 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
379 || GET_CODE (op1) == NEG)
382 if (GET_CODE (op0) == AND)
384 /* Check whether we can potentially do it via TM. */
385 enum machine_mode ccmode;
386 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
387 if (ccmode != VOIDmode)
389 /* Relax CCTmode to CCZmode to allow fall-back to AND
390 if that turns out to be beneficial. */
391 return ccmode == CCTmode ? CCZmode : ccmode;
395 if (register_operand (op0, HImode)
396 && GET_CODE (op1) == CONST_INT
397 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
399 if (register_operand (op0, QImode)
400 && GET_CODE (op1) == CONST_INT
401 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
410 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
411 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
413 if (INTVAL (XEXP((op0), 1)) < 0)
426 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
427 && GET_CODE (op1) != CONST_INT)
433 if (GET_CODE (op0) == PLUS)
436 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
437 && GET_CODE (op1) != CONST_INT)
443 if (GET_CODE (op0) == MINUS)
446 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
447 && GET_CODE (op1) != CONST_INT)
456 /* Return branch condition mask to implement a branch
457 specified by CODE. */
460 s390_branch_condition_mask (code)
463 const int CC0 = 1 << 3;
464 const int CC1 = 1 << 2;
465 const int CC2 = 1 << 1;
466 const int CC3 = 1 << 0;
468 if (GET_CODE (XEXP (code, 0)) != REG
469 || REGNO (XEXP (code, 0)) != CC_REGNUM
470 || XEXP (code, 1) != const0_rtx)
473 switch (GET_MODE (XEXP (code, 0)))
476 switch (GET_CODE (code))
479 case NE: return CC1 | CC2 | CC3;
486 switch (GET_CODE (code))
489 case NE: return CC0 | CC2 | CC3;
496 switch (GET_CODE (code))
499 case NE: return CC0 | CC1 | CC3;
506 switch (GET_CODE (code))
509 case NE: return CC0 | CC1 | CC2;
516 switch (GET_CODE (code))
518 case EQ: return CC0 | CC2;
519 case NE: return CC1 | CC3;
526 switch (GET_CODE (code))
528 case LTU: return CC2 | CC3; /* carry */
529 case GEU: return CC0 | CC1; /* no carry */
536 switch (GET_CODE (code))
538 case GTU: return CC0 | CC1; /* borrow */
539 case LEU: return CC2 | CC3; /* no borrow */
546 switch (GET_CODE (code))
549 case NE: return CC1 | CC2 | CC3;
550 case LTU: return CC1;
551 case GTU: return CC2;
552 case LEU: return CC0 | CC1;
553 case GEU: return CC0 | CC2;
560 switch (GET_CODE (code))
563 case NE: return CC2 | CC1 | CC3;
564 case LTU: return CC2;
565 case GTU: return CC1;
566 case LEU: return CC0 | CC2;
567 case GEU: return CC0 | CC1;
574 switch (GET_CODE (code))
577 case NE: return CC1 | CC2 | CC3;
578 case LT: return CC1 | CC3;
580 case LE: return CC0 | CC1 | CC3;
581 case GE: return CC0 | CC2;
588 switch (GET_CODE (code))
591 case NE: return CC1 | CC2 | CC3;
593 case GT: return CC2 | CC3;
594 case LE: return CC0 | CC1;
595 case GE: return CC0 | CC2 | CC3;
602 switch (GET_CODE (code))
605 case NE: return CC1 | CC2 | CC3;
608 case LE: return CC0 | CC1;
609 case GE: return CC0 | CC2;
610 case UNORDERED: return CC3;
611 case ORDERED: return CC0 | CC1 | CC2;
612 case UNEQ: return CC0 | CC3;
613 case UNLT: return CC1 | CC3;
614 case UNGT: return CC2 | CC3;
615 case UNLE: return CC0 | CC1 | CC3;
616 case UNGE: return CC0 | CC2 | CC3;
617 case LTGT: return CC1 | CC2;
624 switch (GET_CODE (code))
627 case NE: return CC2 | CC1 | CC3;
630 case LE: return CC0 | CC2;
631 case GE: return CC0 | CC1;
632 case UNORDERED: return CC3;
633 case ORDERED: return CC0 | CC2 | CC1;
634 case UNEQ: return CC0 | CC3;
635 case UNLT: return CC2 | CC3;
636 case UNGT: return CC1 | CC3;
637 case UNLE: return CC0 | CC2 | CC3;
638 case UNGE: return CC0 | CC1 | CC3;
639 case LTGT: return CC2 | CC1;
650 /* If INV is false, return assembler mnemonic string to implement
651 a branch specified by CODE. If INV is true, return mnemonic
652 for the corresponding inverted branch. */
655 s390_branch_condition_mnemonic (code, inv)
659 static const char *const mnemonic[16] =
661 NULL, "o", "h", "nle",
662 "l", "nhe", "lh", "ne",
663 "e", "nlh", "he", "nl",
664 "le", "nh", "no", NULL
667 int mask = s390_branch_condition_mask (code);
672 if (mask < 1 || mask > 14)
675 return mnemonic[mask];
678 /* If OP is an integer constant of mode MODE with exactly one
679 HImode subpart unequal to DEF, return the number of that
680 subpart. As a special case, all HImode subparts of OP are
681 equal to DEF, return zero. Otherwise, return -1. */
684 s390_single_hi (op, mode, def)
686 enum machine_mode mode;
689 if (GET_CODE (op) == CONST_INT)
691 unsigned HOST_WIDE_INT value = 0;
692 int n_parts = GET_MODE_SIZE (mode) / 2;
695 for (i = 0; i < n_parts; i++)
698 value = (unsigned HOST_WIDE_INT) INTVAL (op);
702 if ((value & 0xffff) != (unsigned)(def & 0xffff))
711 return part == -1 ? 0 : (n_parts - 1 - part);
714 else if (GET_CODE (op) == CONST_DOUBLE
715 && GET_MODE (op) == VOIDmode)
717 unsigned HOST_WIDE_INT value = 0;
718 int n_parts = GET_MODE_SIZE (mode) / 2;
721 for (i = 0; i < n_parts; i++)
724 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
725 else if (i == HOST_BITS_PER_WIDE_INT / 16)
726 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
730 if ((value & 0xffff) != (unsigned)(def & 0xffff))
739 return part == -1 ? 0 : (n_parts - 1 - part);
745 /* Extract the HImode part number PART from integer
746 constant OP of mode MODE. */
749 s390_extract_hi (op, mode, part)
751 enum machine_mode mode;
754 int n_parts = GET_MODE_SIZE (mode) / 2;
755 if (part < 0 || part >= n_parts)
758 part = n_parts - 1 - part;
760 if (GET_CODE (op) == CONST_INT)
762 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
763 return ((value >> (16 * part)) & 0xffff);
765 else if (GET_CODE (op) == CONST_DOUBLE
766 && GET_MODE (op) == VOIDmode)
768 unsigned HOST_WIDE_INT value;
769 if (part < HOST_BITS_PER_WIDE_INT / 16)
770 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
772 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
773 part -= HOST_BITS_PER_WIDE_INT / 16;
775 return ((value >> (16 * part)) & 0xffff);
781 /* If OP is an integer constant of mode MODE with exactly one
782 QImode subpart unequal to DEF, return the number of that
783 subpart. As a special case, all QImode subparts of OP are
784 equal to DEF, return zero. Otherwise, return -1. */
787 s390_single_qi (op, mode, def)
789 enum machine_mode mode;
792 if (GET_CODE (op) == CONST_INT)
794 unsigned HOST_WIDE_INT value = 0;
795 int n_parts = GET_MODE_SIZE (mode);
798 for (i = 0; i < n_parts; i++)
801 value = (unsigned HOST_WIDE_INT) INTVAL (op);
805 if ((value & 0xff) != (unsigned)(def & 0xff))
814 return part == -1 ? 0 : (n_parts - 1 - part);
817 else if (GET_CODE (op) == CONST_DOUBLE
818 && GET_MODE (op) == VOIDmode)
820 unsigned HOST_WIDE_INT value = 0;
821 int n_parts = GET_MODE_SIZE (mode);
824 for (i = 0; i < n_parts; i++)
827 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
828 else if (i == HOST_BITS_PER_WIDE_INT / 8)
829 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
833 if ((value & 0xff) != (unsigned)(def & 0xff))
842 return part == -1 ? 0 : (n_parts - 1 - part);
848 /* Extract the QImode part number PART from integer
849 constant OP of mode MODE. */
852 s390_extract_qi (op, mode, part)
854 enum machine_mode mode;
857 int n_parts = GET_MODE_SIZE (mode);
858 if (part < 0 || part >= n_parts)
861 part = n_parts - 1 - part;
863 if (GET_CODE (op) == CONST_INT)
865 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
866 return ((value >> (8 * part)) & 0xff);
868 else if (GET_CODE (op) == CONST_DOUBLE
869 && GET_MODE (op) == VOIDmode)
871 unsigned HOST_WIDE_INT value;
872 if (part < HOST_BITS_PER_WIDE_INT / 8)
873 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
875 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
876 part -= HOST_BITS_PER_WIDE_INT / 8;
878 return ((value >> (8 * part)) & 0xff);
884 /* Check whether we can (and want to) split a double-word
885 move in mode MODE from SRC to DST into two single-word
886 moves, moving the subword FIRST_SUBWORD first. */
889 s390_split_ok_p (dst, src, mode, first_subword)
892 enum machine_mode mode;
895 /* Floating point registers cannot be split. */
896 if (FP_REG_P (src) || FP_REG_P (dst))
899 /* We don't need to split if operands are directly accessable. */
900 if (s_operand (src, mode) || s_operand (dst, mode))
903 /* Non-offsettable memory references cannot be split. */
904 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
905 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
908 /* Moving the first subword must not clobber a register
909 needed to move the second subword. */
910 if (register_operand (dst, mode))
912 rtx subreg = operand_subword (dst, first_subword, 0, mode);
913 if (reg_overlap_mentioned_p (subreg, src))
921 /* Change optimizations to be performed, depending on the
924 LEVEL is the optimization level specified; 2 if `-O2' is
925 specified, 1 if `-O' is specified, and 0 if neither is specified.
927 SIZE is nonzero if `-Os' is specified and zero otherwise. */
930 optimization_options (level, size)
931 int level ATTRIBUTE_UNUSED;
932 int size ATTRIBUTE_UNUSED;
934 /* ??? There are apparently still problems with -fcaller-saves. */
935 flag_caller_saves = 0;
937 /* By default, always emit DWARF-2 unwind info. This allows debugging
938 without maintaining a stack frame back-chain. */
939 flag_asynchronous_unwind_tables = 1;
948 const char *const name; /* processor name or nickname. */
949 const enum processor_type processor;
950 const enum processor_flags flags;
952 const processor_alias_table[] =
954 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
955 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
956 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
957 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
958 | PF_LONG_DISPLACEMENT},
961 int const pta_size = ARRAY_SIZE (processor_alias_table);
963 /* Acquire a unique set number for our register saves and restores. */
964 s390_sr_alias_set = new_alias_set ();
966 /* Set up function hooks. */
967 init_machine_status = s390_init_machine_status;
969 /* Architecture mode defaults according to ABI. */
970 if (!(target_flags_explicit & MASK_ZARCH))
973 target_flags |= MASK_ZARCH;
975 target_flags &= ~MASK_ZARCH;
978 /* Determine processor architectural level. */
979 if (!s390_arch_string)
980 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
982 for (i = 0; i < pta_size; i++)
983 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
985 s390_arch = processor_alias_table[i].processor;
986 s390_arch_flags = processor_alias_table[i].flags;
990 error ("Unknown cpu used in -march=%s.", s390_arch_string);
992 /* Determine processor to tune for. */
993 if (!s390_tune_string)
995 s390_tune = s390_arch;
996 s390_tune_flags = s390_arch_flags;
997 s390_tune_string = s390_arch_string;
1001 for (i = 0; i < pta_size; i++)
1002 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1004 s390_tune = processor_alias_table[i].processor;
1005 s390_tune_flags = processor_alias_table[i].flags;
1009 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1012 /* Sanity checks. */
1013 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1014 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1015 if (TARGET_64BIT && !TARGET_ZARCH)
1016 error ("64-bit ABI not supported in ESA/390 mode.");
1019 /* Map for smallest class containing reg regno. */
1021 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1022 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1023 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1024 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1025 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1026 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1027 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1028 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1029 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1030 ADDR_REGS, NO_REGS, ADDR_REGS
1033 /* Return attribute type of insn. */
1035 static enum attr_type
1036 s390_safe_attr_type (insn)
1039 if (recog_memoized (insn) >= 0)
1040 return get_attr_type (insn);
1045 /* Return true if OP a (const_int 0) operand.
1046 OP is the current operation.
1047 MODE is the current operation mode. */
1050 const0_operand (op, mode)
1052 enum machine_mode mode;
1054 return op == CONST0_RTX (mode);
1057 /* Return true if OP is constant.
1058 OP is the current operation.
1059 MODE is the current operation mode. */
1062 consttable_operand (op, mode)
1064 enum machine_mode mode ATTRIBUTE_UNUSED;
1066 return CONSTANT_P (op);
1069 /* Return true if the mode of operand OP matches MODE.
1070 If MODE is set to VOIDmode, set it to the mode of OP. */
1073 check_mode (op, mode)
1075 enum machine_mode *mode;
1077 if (*mode == VOIDmode)
1078 *mode = GET_MODE (op);
1081 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1087 /* Return true if OP a valid operand for the LARL instruction.
1088 OP is the current operation.
1089 MODE is the current operation mode. */
1092 larl_operand (op, mode)
1094 enum machine_mode mode;
1096 if (! check_mode (op, &mode))
1099 /* Allow labels and local symbols. */
1100 if (GET_CODE (op) == LABEL_REF)
1102 if (GET_CODE (op) == SYMBOL_REF)
1103 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1104 && SYMBOL_REF_TLS_MODEL (op) == 0
1105 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1107 /* Everything else must have a CONST, so strip it. */
1108 if (GET_CODE (op) != CONST)
1112 /* Allow adding *even* in-range constants. */
1113 if (GET_CODE (op) == PLUS)
1115 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1116 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1118 #if HOST_BITS_PER_WIDE_INT > 32
1119 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1120 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1126 /* Labels and local symbols allowed here as well. */
1127 if (GET_CODE (op) == LABEL_REF)
1129 if (GET_CODE (op) == SYMBOL_REF)
1130 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1131 && SYMBOL_REF_TLS_MODEL (op) == 0
1132 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1134 /* Now we must have a @GOTENT offset or @PLT stub
1135 or an @INDNTPOFF TLS offset. */
1136 if (GET_CODE (op) == UNSPEC
1137 && XINT (op, 1) == 111)
1139 if (GET_CODE (op) == UNSPEC
1140 && XINT (op, 1) == 113)
1142 if (GET_CODE (op) == UNSPEC
1143 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1149 /* Helper routine to implement s_operand and s_imm_operand.
1150 OP is the current operation.
1151 MODE is the current operation mode.
1152 ALLOW_IMMEDIATE specifies whether immediate operands should
1153 be accepted or not. */
1156 general_s_operand (op, mode, allow_immediate)
1158 enum machine_mode mode;
1159 int allow_immediate;
1161 struct s390_address addr;
1163 /* Call general_operand first, so that we don't have to
1164 check for many special cases. */
1165 if (!general_operand (op, mode))
1168 /* Just like memory_operand, allow (subreg (mem ...))
1170 if (reload_completed
1171 && GET_CODE (op) == SUBREG
1172 && GET_CODE (SUBREG_REG (op)) == MEM)
1173 op = SUBREG_REG (op);
1175 switch (GET_CODE (op))
1177 /* Constants that we are sure will be forced to the
1178 literal pool in reload are OK as s-operand. Note
1179 that we cannot call s390_preferred_reload_class here
1180 because it might not be known yet at this point
1181 whether the current function is a leaf or not. */
1184 if (!allow_immediate || reload_completed)
1186 if (!legitimate_reload_constant_p (op))
1192 /* Memory operands are OK unless they already use an
1195 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1197 if (s390_decompose_address (XEXP (op, 0), &addr)
1209 /* Return true if OP is a valid S-type operand.
1210 OP is the current operation.
1211 MODE is the current operation mode. */
1214 s_operand (op, mode)
1216 enum machine_mode mode;
1218 return general_s_operand (op, mode, 0);
1221 /* Return true if OP is a valid S-type operand or an immediate
1222 operand that can be addressed as S-type operand by forcing
1223 it into the literal pool.
1224 OP is the current operation.
1225 MODE is the current operation mode. */
1228 s_imm_operand (op, mode)
1230 enum machine_mode mode;
1232 return general_s_operand (op, mode, 1);
1235 /* Return true if DISP is a valid short displacement. */
1238 s390_short_displacement (disp)
1241 /* No displacement is OK. */
1245 /* Integer displacement in range. */
1246 if (GET_CODE (disp) == CONST_INT)
1247 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1249 /* GOT offset is not OK, the GOT can be large. */
1250 if (GET_CODE (disp) == CONST
1251 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1252 && XINT (XEXP (disp, 0), 1) == 110)
1255 /* All other symbolic constants are literal pool references,
1256 which are OK as the literal pool must be small. */
1257 if (GET_CODE (disp) == CONST)
1263 /* Return true if OP is a valid operand for a C constraint. */
1266 s390_extra_constraint (op, c)
1270 struct s390_address addr;
1275 if (GET_CODE (op) != MEM)
1277 if (!s390_decompose_address (XEXP (op, 0), &addr))
1282 if (TARGET_LONG_DISPLACEMENT)
1284 if (!s390_short_displacement (addr.disp))
1290 if (GET_CODE (op) != MEM)
1293 if (TARGET_LONG_DISPLACEMENT)
1295 if (!s390_decompose_address (XEXP (op, 0), &addr))
1297 if (!s390_short_displacement (addr.disp))
1303 if (!TARGET_LONG_DISPLACEMENT)
1305 if (GET_CODE (op) != MEM)
1307 if (!s390_decompose_address (XEXP (op, 0), &addr))
1311 if (s390_short_displacement (addr.disp))
1316 if (!TARGET_LONG_DISPLACEMENT)
1318 if (GET_CODE (op) != MEM)
1320 /* Any invalid address here will be fixed up by reload,
1321 so accept it for the most generic constraint. */
1322 if (s390_decompose_address (XEXP (op, 0), &addr)
1323 && s390_short_displacement (addr.disp))
1328 if (TARGET_LONG_DISPLACEMENT)
1330 if (!s390_decompose_address (op, &addr))
1332 if (!s390_short_displacement (addr.disp))
1338 if (!TARGET_LONG_DISPLACEMENT)
1340 /* Any invalid address here will be fixed up by reload,
1341 so accept it for the most generic constraint. */
1342 if (s390_decompose_address (op, &addr)
1343 && s390_short_displacement (addr.disp))
1354 /* Compute a (partial) cost for rtx X. Return true if the complete
1355 cost has been computed, and false if subexpressions should be
1356 scanned. In either case, *TOTAL contains the cost result. */
1359 s390_rtx_costs (x, code, outer_code, total)
1361 int code, outer_code;
1367 if (GET_CODE (XEXP (x, 0)) == MINUS
1368 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1375 /* Force_const_mem does not work out of reload, because the
1376 saveable_obstack is set to reload_obstack, which does not
1377 live long enough. Because of this we cannot use force_const_mem
1378 in addsi3. This leads to problems with gen_add2_insn with a
1379 constant greater than a short. Because of that we give an
1380 addition of greater constants a cost of 3 (reload1.c 10096). */
1381 /* ??? saveable_obstack no longer exists. */
1382 if (outer_code == PLUS
1383 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1384 *total = COSTS_N_INSNS (3);
1405 *total = COSTS_N_INSNS (1);
1409 if (GET_MODE (XEXP (x, 0)) == DImode)
1410 *total = COSTS_N_INSNS (40);
1412 *total = COSTS_N_INSNS (7);
1419 *total = COSTS_N_INSNS (33);
1427 /* Return the cost of an address rtx ADDR. */
1430 s390_address_cost (addr)
1433 struct s390_address ad;
1434 if (!s390_decompose_address (addr, &ad))
1437 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1440 /* Return true if OP is a valid operand for the BRAS instruction.
1441 OP is the current operation.
1442 MODE is the current operation mode. */
1445 bras_sym_operand (op, mode)
1447 enum machine_mode mode ATTRIBUTE_UNUSED;
1449 register enum rtx_code code = GET_CODE (op);
1451 /* Allow SYMBOL_REFs. */
1452 if (code == SYMBOL_REF)
1455 /* Allow @PLT stubs. */
1457 && GET_CODE (XEXP (op, 0)) == UNSPEC
1458 && XINT (XEXP (op, 0), 1) == 113)
1463 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1464 otherwise return 0. */
1467 tls_symbolic_operand (op)
1470 if (GET_CODE (op) != SYMBOL_REF)
1472 return SYMBOL_REF_TLS_MODEL (op);
1475 /* Return true if OP is a load multiple operation. It is known to be a
1476 PARALLEL and the first section will be tested.
1477 OP is the current operation.
1478 MODE is the current operation mode. */
1481 load_multiple_operation (op, mode)
1483 enum machine_mode mode ATTRIBUTE_UNUSED;
1485 int count = XVECLEN (op, 0);
1486 unsigned int dest_regno;
1491 /* Perform a quick check so we don't blow up below. */
1493 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1494 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1495 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1498 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1499 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1501 /* Check, is base, or base + displacement. */
1503 if (GET_CODE (src_addr) == REG)
1505 else if (GET_CODE (src_addr) == PLUS
1506 && GET_CODE (XEXP (src_addr, 0)) == REG
1507 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1509 off = INTVAL (XEXP (src_addr, 1));
1510 src_addr = XEXP (src_addr, 0);
1515 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1518 for (i = 1; i < count; i++)
1520 rtx elt = XVECEXP (op, 0, i);
1522 if (GET_CODE (elt) != SET
1523 || GET_CODE (SET_DEST (elt)) != REG
1524 || GET_MODE (SET_DEST (elt)) != Pmode
1525 || REGNO (SET_DEST (elt)) != dest_regno + i
1526 || GET_CODE (SET_SRC (elt)) != MEM
1527 || GET_MODE (SET_SRC (elt)) != Pmode
1528 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1529 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1530 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1531 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1532 != off + i * UNITS_PER_WORD)
1539 /* Return true if OP is a store multiple operation. It is known to be a
1540 PARALLEL and the first section will be tested.
1541 OP is the current operation.
1542 MODE is the current operation mode. */
1545 store_multiple_operation (op, mode)
1547 enum machine_mode mode ATTRIBUTE_UNUSED;
1549 int count = XVECLEN (op, 0);
1550 unsigned int src_regno;
1554 /* Perform a quick check so we don't blow up below. */
1556 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1557 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1558 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1561 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1562 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1564 /* Check, is base, or base + displacement. */
1566 if (GET_CODE (dest_addr) == REG)
1568 else if (GET_CODE (dest_addr) == PLUS
1569 && GET_CODE (XEXP (dest_addr, 0)) == REG
1570 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1572 off = INTVAL (XEXP (dest_addr, 1));
1573 dest_addr = XEXP (dest_addr, 0);
1578 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1581 for (i = 1; i < count; i++)
1583 rtx elt = XVECEXP (op, 0, i);
1585 if (GET_CODE (elt) != SET
1586 || GET_CODE (SET_SRC (elt)) != REG
1587 || GET_MODE (SET_SRC (elt)) != Pmode
1588 || REGNO (SET_SRC (elt)) != src_regno + i
1589 || GET_CODE (SET_DEST (elt)) != MEM
1590 || GET_MODE (SET_DEST (elt)) != Pmode
1591 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1592 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1593 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1594 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1595 != off + i * UNITS_PER_WORD)
1602 /* Return true if OP contains a symbol reference */
1605 symbolic_reference_mentioned_p (op)
1608 register const char *fmt;
1611 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1614 fmt = GET_RTX_FORMAT (GET_CODE (op));
1615 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1621 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1622 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1626 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1633 /* Return true if OP contains a reference to a thread-local symbol. */
1636 tls_symbolic_reference_mentioned_p (op)
1639 register const char *fmt;
1642 if (GET_CODE (op) == SYMBOL_REF)
1643 return tls_symbolic_operand (op);
1645 fmt = GET_RTX_FORMAT (GET_CODE (op));
1646 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1652 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1653 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1657 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1665 /* Return true if OP is a legitimate general operand when
1666 generating PIC code. It is given that flag_pic is on
1667 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1670 legitimate_pic_operand_p (op)
1673 /* Accept all non-symbolic constants. */
1674 if (!SYMBOLIC_CONST (op))
1677 /* Reject everything else; must be handled
1678 via emit_symbolic_move. */
1682 /* Returns true if the constant value OP is a legitimate general operand.
1683 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1686 legitimate_constant_p (op)
1689 /* Accept all non-symbolic constants. */
1690 if (!SYMBOLIC_CONST (op))
1693 /* Accept immediate LARL operands. */
1694 if (TARGET_64BIT && larl_operand (op, VOIDmode))
1697 /* Thread-local symbols are never legal constants. This is
1698 so that emit_call knows that computing such addresses
1699 might require a function call. */
1700 if (TLS_SYMBOLIC_CONST (op))
1703 /* In the PIC case, symbolic constants must *not* be
1704 forced into the literal pool. We accept them here,
1705 so that they will be handled by emit_symbolic_move. */
1709 /* All remaining non-PIC symbolic constants are
1710 forced into the literal pool. */
1714 /* Determine if it's legal to put X into the constant pool. This
1715 is not possible if X contains the address of a symbol that is
1716 not constant (TLS) or not known at final link time (PIC). */
1719 s390_cannot_force_const_mem (x)
1722 switch (GET_CODE (x))
1726 /* Accept all non-symbolic constants. */
1730 /* Labels are OK iff we are non-PIC. */
1731 return flag_pic != 0;
1734 /* 'Naked' TLS symbol references are never OK,
1735 non-TLS symbols are OK iff we are non-PIC. */
1736 if (tls_symbolic_operand (x))
1739 return flag_pic != 0;
1742 return s390_cannot_force_const_mem (XEXP (x, 0));
1745 return s390_cannot_force_const_mem (XEXP (x, 0))
1746 || s390_cannot_force_const_mem (XEXP (x, 1));
1749 switch (XINT (x, 1))
1751 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1760 case UNSPEC_GOTNTPOFF:
1761 case UNSPEC_INDNTPOFF:
1774 /* Returns true if the constant value OP is a legitimate general
1775 operand during and after reload. The difference to
1776 legitimate_constant_p is that this function will not accept
1777 a constant that would need to be forced to the literal pool
1778 before it can be used as operand. */
1781 legitimate_reload_constant_p (op)
1784 /* Accept la(y) operands. */
1785 if (GET_CODE (op) == CONST_INT
1786 && DISP_IN_RANGE (INTVAL (op)))
1789 /* Accept l(g)hi operands. */
1790 if (GET_CODE (op) == CONST_INT
1791 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1794 /* Accept lliXX operands. */
1796 && s390_single_hi (op, DImode, 0) >= 0)
1799 /* Accept larl operands. */
1801 && larl_operand (op, VOIDmode))
1804 /* Everything else cannot be handled without reload. */
1808 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1809 return the class of reg to actually use. */
1812 s390_preferred_reload_class (op, class)
1814 enum reg_class class;
1816 /* This can happen if a floating point constant is being
1817 reloaded into an integer register. Leave well alone. */
1818 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1819 && class != FP_REGS)
1822 switch (GET_CODE (op))
1824 /* Constants we cannot reload must be forced into the
1829 if (legitimate_reload_constant_p (op))
1834 /* If a symbolic constant or a PLUS is reloaded,
1835 it is most likely being used as an address, so
1836 prefer ADDR_REGS. If 'class' is not a superset
1837 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1842 if (reg_class_subset_p (ADDR_REGS, class))
1854 /* Return the register class of a scratch register needed to
1855 load IN into a register of class CLASS in MODE.
1857 We need a temporary when loading a PLUS expression which
1858 is not a legitimate operand of the LOAD ADDRESS instruction. */
1861 s390_secondary_input_reload_class (class, mode, in)
1862 enum reg_class class ATTRIBUTE_UNUSED;
1863 enum machine_mode mode;
1866 if (s390_plus_operand (in, mode))
1872 /* Return the register class of a scratch register needed to
1873 store a register of class CLASS in MODE into OUT:
1875 We need a temporary when storing a double-word to a
1876 non-offsettable memory address. */
1879 s390_secondary_output_reload_class (class, mode, out)
1880 enum reg_class class;
1881 enum machine_mode mode;
1884 if ((TARGET_64BIT ? mode == TImode
1885 : (mode == DImode || mode == DFmode))
1886 && reg_classes_intersect_p (GENERAL_REGS, class)
1887 && GET_CODE (out) == MEM
1888 && !offsettable_memref_p (out)
1889 && !s_operand (out, VOIDmode))
1895 /* Return true if OP is a PLUS that is not a legitimate
1896 operand for the LA instruction.
1897 OP is the current operation.
1898 MODE is the current operation mode. */
1901 s390_plus_operand (op, mode)
1903 enum machine_mode mode;
1905 if (!check_mode (op, &mode) || mode != Pmode)
1908 if (GET_CODE (op) != PLUS)
1911 if (legitimate_la_operand_p (op))
1917 /* Generate code to load SRC, which is PLUS that is not a
1918 legitimate operand for the LA instruction, into TARGET.
1919 SCRATCH may be used as scratch register. */
1922 s390_expand_plus_operand (target, src, scratch)
1923 register rtx target;
1925 register rtx scratch;
1928 struct s390_address ad;
1930 /* src must be a PLUS; get its two operands. */
1931 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1934 /* Check if any of the two operands is already scheduled
1935 for replacement by reload. This can happen e.g. when
1936 float registers occur in an address. */
1937 sum1 = find_replacement (&XEXP (src, 0));
1938 sum2 = find_replacement (&XEXP (src, 1));
1939 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1941 /* If the address is already strictly valid, there's nothing to do. */
1942 if (!s390_decompose_address (src, &ad)
1943 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1944 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1946 /* Otherwise, one of the operands cannot be an address register;
1947 we reload its value into the scratch register. */
1948 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1950 emit_move_insn (scratch, sum1);
1953 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1955 emit_move_insn (scratch, sum2);
1959 /* According to the way these invalid addresses are generated
1960 in reload.c, it should never happen (at least on s390) that
1961 *neither* of the PLUS components, after find_replacements
1962 was applied, is an address register. */
1963 if (sum1 == scratch && sum2 == scratch)
1969 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1972 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1973 is only ever performed on addresses, so we can mark the
1974 sum as legitimate for LA in any case. */
1975 s390_load_address (target, src);
1979 /* Decompose a RTL expression ADDR for a memory address into
1980 its components, returned in OUT.
1982 Returns 0 if ADDR is not a valid memory address, nonzero
1983 otherwise. If OUT is NULL, don't return the components,
1984 but check for validity only.
1986 Note: Only addresses in canonical form are recognized.
1987 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1988 canonical form so that they will be recognized. */
1991 s390_decompose_address (addr, out)
1993 struct s390_address *out;
1995 rtx base = NULL_RTX;
1996 rtx indx = NULL_RTX;
1997 rtx disp = NULL_RTX;
1998 int pointer = FALSE;
2000 /* Decompose address into base + index + displacement. */
2002 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2005 else if (GET_CODE (addr) == PLUS)
2007 rtx op0 = XEXP (addr, 0);
2008 rtx op1 = XEXP (addr, 1);
2009 enum rtx_code code0 = GET_CODE (op0);
2010 enum rtx_code code1 = GET_CODE (op1);
2012 if (code0 == REG || code0 == UNSPEC)
2014 if (code1 == REG || code1 == UNSPEC)
2016 indx = op0; /* index + base */
2022 base = op0; /* base + displacement */
2027 else if (code0 == PLUS)
2029 indx = XEXP (op0, 0); /* index + base + disp */
2030 base = XEXP (op0, 1);
2041 disp = addr; /* displacement */
2044 /* Prefer to use pointer as base, not index. */
2047 int base_ptr = GET_CODE (base) == UNSPEC
2048 || (REG_P (base) && REG_POINTER (base));
2049 int indx_ptr = GET_CODE (indx) == UNSPEC
2050 || (REG_P (indx) && REG_POINTER (indx));
2052 if (!base_ptr && indx_ptr)
2060 /* Validate base register. */
2063 if (GET_CODE (base) == UNSPEC)
2065 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
2067 base = XVECEXP (base, 0, 0);
2071 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2074 if (REGNO (base) == BASE_REGISTER
2075 || REGNO (base) == STACK_POINTER_REGNUM
2076 || REGNO (base) == FRAME_POINTER_REGNUM
2077 || ((reload_completed || reload_in_progress)
2078 && frame_pointer_needed
2079 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2080 || REGNO (base) == ARG_POINTER_REGNUM
2081 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
2082 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
2084 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2088 /* Validate index register. */
2091 if (GET_CODE (indx) == UNSPEC)
2093 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
2095 indx = XVECEXP (indx, 0, 0);
2099 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2102 if (REGNO (indx) == BASE_REGISTER
2103 || REGNO (indx) == STACK_POINTER_REGNUM
2104 || REGNO (indx) == FRAME_POINTER_REGNUM
2105 || ((reload_completed || reload_in_progress)
2106 && frame_pointer_needed
2107 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2108 || REGNO (indx) == ARG_POINTER_REGNUM
2109 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
2110 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
2112 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2116 /* Validate displacement. */
2119 /* Allow integer constant in range. */
2120 if (GET_CODE (disp) == CONST_INT)
2122 /* If the argument pointer is involved, the displacement will change
2123 later anyway as the argument pointer gets eliminated. This could
2124 make a valid displacement invalid, but it is more likely to make
2125 an invalid displacement valid, because we sometimes access the
2126 register save area via negative offsets to the arg pointer.
2127 Thus we don't check the displacement for validity here. If after
2128 elimination the displacement turns out to be invalid after all,
2129 this is fixed up by reload in any case. */
2130 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
2132 if (!DISP_IN_RANGE (INTVAL (disp)))
2137 /* In the small-PIC case, the linker converts @GOT12
2138 and @GOTNTPOFF offsets to possible displacements. */
2139 else if (GET_CODE (disp) == CONST
2140 && GET_CODE (XEXP (disp, 0)) == UNSPEC
2141 && (XINT (XEXP (disp, 0), 1) == 110
2142 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
2150 /* Accept chunkfied literal pool symbol references. */
2151 else if (GET_CODE (disp) == CONST
2152 && GET_CODE (XEXP (disp, 0)) == MINUS
2153 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
2154 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
2159 /* Likewise if a constant offset is present. */
2160 else if (GET_CODE (disp) == CONST
2161 && GET_CODE (XEXP (disp, 0)) == PLUS
2162 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
2163 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
2164 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
2165 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
2170 /* We can convert literal pool addresses to
2171 displacements by basing them off the base register. */
2174 /* In some cases, we can accept an additional
2175 small constant offset. Split these off here. */
2177 unsigned int offset = 0;
2179 if (GET_CODE (disp) == CONST
2180 && GET_CODE (XEXP (disp, 0)) == PLUS
2181 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2183 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2184 disp = XEXP (XEXP (disp, 0), 0);
2187 /* Now we must have a literal pool address. */
2188 if (GET_CODE (disp) != SYMBOL_REF
2189 || !CONSTANT_POOL_ADDRESS_P (disp))
2192 /* If we have an offset, make sure it does not
2193 exceed the size of the constant pool entry. */
2194 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2197 /* Either base or index must be free to
2198 hold the base register. */
2202 /* Convert the address. */
2204 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2206 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2208 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
2209 disp = gen_rtx_CONST (Pmode, disp);
2212 disp = plus_constant (disp, offset);
2226 out->pointer = pointer;
2232 /* Return nonzero if ADDR is a valid memory address.
2233 STRICT specifies whether strict register checking applies. */
2236 legitimate_address_p (mode, addr, strict)
2237 enum machine_mode mode ATTRIBUTE_UNUSED;
2241 struct s390_address ad;
2242 if (!s390_decompose_address (addr, &ad))
2247 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2249 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2254 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2256 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2263 /* Return 1 if OP is a valid operand for the LA instruction.
2264 In 31-bit, we need to prove that the result is used as an
2265 address, as LA performs only a 31-bit addition. */
2268 legitimate_la_operand_p (op)
2271 struct s390_address addr;
2272 if (!s390_decompose_address (op, &addr))
2275 if (TARGET_64BIT || addr.pointer)
2281 /* Return 1 if OP is a valid operand for the LA instruction,
2282 and we prefer to use LA over addition to compute it. */
2285 preferred_la_operand_p (op)
2288 struct s390_address addr;
2289 if (!s390_decompose_address (op, &addr))
2292 if (!TARGET_64BIT && !addr.pointer)
2298 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2299 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2305 /* Emit a forced load-address operation to load SRC into DST.
2306 This will use the LOAD ADDRESS instruction even in situations
2307 where legitimate_la_operand_p (SRC) returns false. */
2310 s390_load_address (dst, src)
2315 emit_move_insn (dst, src);
2317 emit_insn (gen_force_la_31 (dst, src));
2320 /* Return a legitimate reference for ORIG (an address) using the
2321 register REG. If REG is 0, a new pseudo is generated.
2323 There are two types of references that must be handled:
2325 1. Global data references must load the address from the GOT, via
2326 the PIC reg. An insn is emitted to do this load, and the reg is
2329 2. Static data references, constant pool addresses, and code labels
2330 compute the address as an offset from the GOT, whose base is in
2331 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2332 differentiate them from global data objects. The returned
2333 address is the PIC reg + an unspec constant.
2335 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2336 reg also appears in the address. */
2339 legitimize_pic_address (orig, reg)
2347 if (GET_CODE (addr) == LABEL_REF
2348 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2350 /* This is a local symbol. */
2351 if (TARGET_64BIT && larl_operand (addr, VOIDmode))
2353 /* Access local symbols PC-relative via LARL.
2354 This is the same as in the non-PIC case, so it is
2355 handled automatically ... */
2359 /* Access local symbols relative to the literal pool. */
2361 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2363 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 100);
2364 addr = gen_rtx_CONST (Pmode, addr);
2365 addr = force_const_mem (Pmode, addr);
2366 emit_move_insn (temp, addr);
2368 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2369 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2370 new = gen_rtx_PLUS (Pmode, base, temp);
2374 emit_move_insn (reg, new);
2379 else if (GET_CODE (addr) == SYMBOL_REF)
2382 reg = gen_reg_rtx (Pmode);
2386 /* Assume GOT offset < 4k. This is handled the same way
2387 in both 31- and 64-bit code (@GOT12). */
2389 if (reload_in_progress || reload_completed)
2390 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2392 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
2393 new = gen_rtx_CONST (Pmode, new);
2394 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2395 new = gen_rtx_MEM (Pmode, new);
2396 RTX_UNCHANGING_P (new) = 1;
2397 emit_move_insn (reg, new);
2400 else if (TARGET_64BIT)
2402 /* If the GOT offset might be >= 4k, we determine the position
2403 of the GOT entry via a PC-relative LARL (@GOTENT). */
2405 rtx temp = gen_reg_rtx (Pmode);
2407 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
2408 new = gen_rtx_CONST (Pmode, new);
2409 emit_move_insn (temp, new);
2411 new = gen_rtx_MEM (Pmode, temp);
2412 RTX_UNCHANGING_P (new) = 1;
2413 emit_move_insn (reg, new);
2418 /* If the GOT offset might be >= 4k, we have to load it
2419 from the literal pool (@GOT). */
2421 rtx temp = gen_reg_rtx (Pmode);
2423 if (reload_in_progress || reload_completed)
2424 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2426 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 112);
2427 addr = gen_rtx_CONST (Pmode, addr);
2428 addr = force_const_mem (Pmode, addr);
2429 emit_move_insn (temp, addr);
2431 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2432 new = gen_rtx_MEM (Pmode, new);
2433 RTX_UNCHANGING_P (new) = 1;
2434 emit_move_insn (reg, new);
2440 if (GET_CODE (addr) == CONST)
2442 addr = XEXP (addr, 0);
2443 if (GET_CODE (addr) == UNSPEC)
2445 if (XVECLEN (addr, 0) != 1)
2447 switch (XINT (addr, 1))
2449 /* If someone moved an @GOT or lt-relative UNSPEC
2450 out of the literal pool, force them back in. */
2454 new = force_const_mem (Pmode, orig);
2457 /* @GOTENT is OK as is. */
2461 /* @PLT is OK as is on 64-bit, must be converted to
2462 lt-relative PLT on 31-bit. */
2466 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2468 addr = XVECEXP (addr, 0, 0);
2469 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 114);
2470 addr = gen_rtx_CONST (Pmode, addr);
2471 addr = force_const_mem (Pmode, addr);
2472 emit_move_insn (temp, addr);
2474 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2475 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2476 new = gen_rtx_PLUS (Pmode, base, temp);
2480 emit_move_insn (reg, new);
2486 /* Everything else cannot happen. */
2491 else if (GET_CODE (addr) != PLUS)
2494 if (GET_CODE (addr) == PLUS)
2496 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2497 /* Check first to see if this is a constant offset
2498 from a local symbol reference. */
2499 if ((GET_CODE (op0) == LABEL_REF
2500 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2501 && GET_CODE (op1) == CONST_INT)
2503 if (TARGET_64BIT && larl_operand (op0, VOIDmode))
2505 if (INTVAL (op1) & 1)
2507 /* LARL can't handle odd offsets, so emit a
2508 pair of LARL and LA. */
2509 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2511 if (!DISP_IN_RANGE (INTVAL (op1)))
2513 int even = INTVAL (op1) - 1;
2514 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2515 op0 = gen_rtx_CONST (Pmode, op0);
2519 emit_move_insn (temp, op0);
2520 new = gen_rtx_PLUS (Pmode, temp, op1);
2524 emit_move_insn (reg, new);
2530 /* If the offset is even, we can just use LARL.
2531 This will happen automatically. */
2536 /* Access local symbols relative to the literal pool. */
2538 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2540 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0), 100);
2541 addr = gen_rtx_PLUS (Pmode, addr, op1);
2542 addr = gen_rtx_CONST (Pmode, addr);
2543 addr = force_const_mem (Pmode, addr);
2544 emit_move_insn (temp, addr);
2546 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2547 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2548 new = gen_rtx_PLUS (Pmode, base, temp);
2552 emit_move_insn (reg, new);
2558 /* Now, check whether it is an LT-relative symbol plus offset
2559 that was pulled out of the literal pool. Force it back in. */
2561 else if (GET_CODE (op0) == UNSPEC
2562 && GET_CODE (op1) == CONST_INT)
2564 if (XVECLEN (op0, 0) != 1)
2566 if (XINT (op0, 1) != 100)
2569 new = force_const_mem (Pmode, orig);
2572 /* Otherwise, compute the sum. */
2575 base = legitimize_pic_address (XEXP (addr, 0), reg);
2576 new = legitimize_pic_address (XEXP (addr, 1),
2577 base == reg ? NULL_RTX : reg);
2578 if (GET_CODE (new) == CONST_INT)
2579 new = plus_constant (base, INTVAL (new));
2582 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2584 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2585 new = XEXP (new, 1);
2587 new = gen_rtx_PLUS (Pmode, base, new);
2590 if (GET_CODE (new) == CONST)
2591 new = XEXP (new, 0);
2592 new = force_operand (new, 0);
2599 /* Load the thread pointer into a register. */
2602 get_thread_pointer ()
2606 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2607 tp = force_reg (Pmode, tp);
2608 mark_reg_pointer (tp, BITS_PER_WORD);
2613 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2615 static GTY(()) rtx s390_tls_symbol;
2617 s390_tls_get_offset ()
2619 if (!s390_tls_symbol)
2620 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2622 return s390_tls_symbol;
2625 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2626 this (thread-local) address. REG may be used as temporary. */
2629 legitimize_tls_address (addr, reg)
2633 rtx new, tls_call, temp, base, r2, insn;
2635 if (GET_CODE (addr) == SYMBOL_REF)
2636 switch (tls_symbolic_operand (addr))
2638 case TLS_MODEL_GLOBAL_DYNAMIC:
2640 r2 = gen_rtx_REG (Pmode, 2);
2641 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2642 new = gen_rtx_CONST (Pmode, tls_call);
2643 new = force_const_mem (Pmode, new);
2644 emit_move_insn (r2, new);
2645 emit_call_insn (gen_call_value_tls (r2, tls_call));
2646 insn = get_insns ();
2649 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2650 temp = gen_reg_rtx (Pmode);
2651 emit_libcall_block (insn, temp, r2, new);
2653 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2656 s390_load_address (reg, new);
2661 case TLS_MODEL_LOCAL_DYNAMIC:
2663 r2 = gen_rtx_REG (Pmode, 2);
2664 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2665 new = gen_rtx_CONST (Pmode, tls_call);
2666 new = force_const_mem (Pmode, new);
2667 emit_move_insn (r2, new);
2668 emit_call_insn (gen_call_value_tls (r2, tls_call));
2669 insn = get_insns ();
2672 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2673 temp = gen_reg_rtx (Pmode);
2674 emit_libcall_block (insn, temp, r2, new);
2676 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2677 base = gen_reg_rtx (Pmode);
2678 s390_load_address (base, new);
2680 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2681 new = gen_rtx_CONST (Pmode, new);
2682 new = force_const_mem (Pmode, new);
2683 temp = gen_reg_rtx (Pmode);
2684 emit_move_insn (temp, new);
2686 new = gen_rtx_PLUS (Pmode, base, temp);
2689 s390_load_address (reg, new);
2694 case TLS_MODEL_INITIAL_EXEC:
2697 /* Assume GOT offset < 4k. This is handled the same way
2698 in both 31- and 64-bit code. */
2700 if (reload_in_progress || reload_completed)
2701 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2703 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2704 new = gen_rtx_CONST (Pmode, new);
2705 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2706 new = gen_rtx_MEM (Pmode, new);
2707 RTX_UNCHANGING_P (new) = 1;
2708 temp = gen_reg_rtx (Pmode);
2709 emit_move_insn (temp, new);
2711 else if (TARGET_64BIT)
2713 /* If the GOT offset might be >= 4k, we determine the position
2714 of the GOT entry via a PC-relative LARL. */
2716 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2717 new = gen_rtx_CONST (Pmode, new);
2718 temp = gen_reg_rtx (Pmode);
2719 emit_move_insn (temp, new);
2721 new = gen_rtx_MEM (Pmode, temp);
2722 RTX_UNCHANGING_P (new) = 1;
2723 temp = gen_reg_rtx (Pmode);
2724 emit_move_insn (temp, new);
2728 /* If the GOT offset might be >= 4k, we have to load it
2729 from the literal pool. */
2731 if (reload_in_progress || reload_completed)
2732 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2734 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2735 new = gen_rtx_CONST (Pmode, new);
2736 new = force_const_mem (Pmode, new);
2737 temp = gen_reg_rtx (Pmode);
2738 emit_move_insn (temp, new);
2740 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2741 new = gen_rtx_MEM (Pmode, new);
2742 RTX_UNCHANGING_P (new) = 1;
2744 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2745 temp = gen_reg_rtx (Pmode);
2746 emit_insn (gen_rtx_SET (Pmode, temp, new));
2750 /* In position-dependent code, load the absolute address of
2751 the GOT entry from the literal pool. */
2753 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2754 new = gen_rtx_CONST (Pmode, new);
2755 new = force_const_mem (Pmode, new);
2756 temp = gen_reg_rtx (Pmode);
2757 emit_move_insn (temp, new);
2760 new = gen_rtx_MEM (Pmode, new);
2761 RTX_UNCHANGING_P (new) = 1;
2763 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2764 temp = gen_reg_rtx (Pmode);
2765 emit_insn (gen_rtx_SET (Pmode, temp, new));
2768 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2771 s390_load_address (reg, new);
2776 case TLS_MODEL_LOCAL_EXEC:
2777 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2778 new = gen_rtx_CONST (Pmode, new);
2779 new = force_const_mem (Pmode, new);
2780 temp = gen_reg_rtx (Pmode);
2781 emit_move_insn (temp, new);
2783 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2786 s390_load_address (reg, new);
2795 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2797 switch (XINT (XEXP (addr, 0), 1))
2799 case UNSPEC_INDNTPOFF:
2812 abort (); /* for now ... */
2817 /* Emit insns to move operands[1] into operands[0]. */
2820 emit_symbolic_move (operands)
2823 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2825 if (GET_CODE (operands[0]) == MEM)
2826 operands[1] = force_reg (Pmode, operands[1]);
2827 else if (TLS_SYMBOLIC_CONST (operands[1]))
2828 operands[1] = legitimize_tls_address (operands[1], temp);
2830 operands[1] = legitimize_pic_address (operands[1], temp);
2833 /* Try machine-dependent ways of modifying an illegitimate address X
2834 to be legitimate. If we find one, return the new, valid address.
2836 OLDX is the address as it was before break_out_memory_refs was called.
2837 In some cases it is useful to look at this to decide what needs to be done.
2839 MODE is the mode of the operand pointed to by X.
2841 When -fpic is used, special handling is needed for symbolic references.
2842 See comments by legitimize_pic_address for details. */
2845 legitimize_address (x, oldx, mode)
2847 register rtx oldx ATTRIBUTE_UNUSED;
2848 enum machine_mode mode ATTRIBUTE_UNUSED;
2850 rtx constant_term = const0_rtx;
2852 if (TLS_SYMBOLIC_CONST (x))
2854 x = legitimize_tls_address (x, 0);
2856 if (legitimate_address_p (mode, x, FALSE))
2861 if (SYMBOLIC_CONST (x)
2862 || (GET_CODE (x) == PLUS
2863 && (SYMBOLIC_CONST (XEXP (x, 0))
2864 || SYMBOLIC_CONST (XEXP (x, 1)))))
2865 x = legitimize_pic_address (x, 0);
2867 if (legitimate_address_p (mode, x, FALSE))
2871 x = eliminate_constant_term (x, &constant_term);
2873 /* Optimize loading of large displacements by splitting them
2874 into the multiple of 4K and the rest; this allows the
2875 former to be CSE'd if possible.
2877 Don't do this if the displacement is added to a register
2878 pointing into the stack frame, as the offsets will
2879 change later anyway. */
2881 if (GET_CODE (constant_term) == CONST_INT
2882 && !TARGET_LONG_DISPLACEMENT
2883 && !DISP_IN_RANGE (INTVAL (constant_term))
2884 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2886 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2887 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2889 rtx temp = gen_reg_rtx (Pmode);
2890 rtx val = force_operand (GEN_INT (upper), temp);
2892 emit_move_insn (temp, val);
2894 x = gen_rtx_PLUS (Pmode, x, temp);
2895 constant_term = GEN_INT (lower);
2898 if (GET_CODE (x) == PLUS)
2900 if (GET_CODE (XEXP (x, 0)) == REG)
2902 register rtx temp = gen_reg_rtx (Pmode);
2903 register rtx val = force_operand (XEXP (x, 1), temp);
2905 emit_move_insn (temp, val);
2907 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2910 else if (GET_CODE (XEXP (x, 1)) == REG)
2912 register rtx temp = gen_reg_rtx (Pmode);
2913 register rtx val = force_operand (XEXP (x, 0), temp);
2915 emit_move_insn (temp, val);
2917 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2921 if (constant_term != const0_rtx)
2922 x = gen_rtx_PLUS (Pmode, x, constant_term);
2927 /* Emit code to move LEN bytes from DST to SRC. */
2930 s390_expand_movstr (dst, src, len)
2935 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2936 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2937 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2938 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2941 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2943 if (INTVAL (len) > 0)
2944 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2947 else if (TARGET_MVCLE)
2949 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2950 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2951 rtx reg0 = gen_reg_rtx (double_mode);
2952 rtx reg1 = gen_reg_rtx (double_mode);
2954 emit_move_insn (gen_highpart (single_mode, reg0),
2955 force_operand (XEXP (dst, 0), NULL_RTX));
2956 emit_move_insn (gen_highpart (single_mode, reg1),
2957 force_operand (XEXP (src, 0), NULL_RTX));
2959 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2960 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2962 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2967 rtx dst_addr, src_addr, count, blocks, temp;
2968 rtx end_label = gen_label_rtx ();
2969 enum machine_mode mode;
2972 mode = GET_MODE (len);
2973 if (mode == VOIDmode)
2976 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2980 dst_addr = gen_reg_rtx (Pmode);
2981 src_addr = gen_reg_rtx (Pmode);
2982 count = gen_reg_rtx (mode);
2983 blocks = gen_reg_rtx (mode);
2985 convert_move (count, len, 1);
2986 emit_cmp_and_jump_insns (count, const0_rtx,
2987 EQ, NULL_RTX, mode, 1, end_label);
2989 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2990 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2991 dst = change_address (dst, VOIDmode, dst_addr);
2992 src = change_address (src, VOIDmode, src_addr);
2994 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2996 emit_move_insn (count, temp);
2998 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3000 emit_move_insn (blocks, temp);
3002 expand_start_loop (1);
3003 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3004 make_tree (type, blocks),
3005 make_tree (type, const0_rtx)));
3007 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
3008 s390_load_address (dst_addr,
3009 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3010 s390_load_address (src_addr,
3011 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3013 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3015 emit_move_insn (blocks, temp);
3019 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
3020 emit_label (end_label);
3024 /* Emit code to clear LEN bytes at DST. */
3027 s390_expand_clrstr (dst, len)
3031 rtx (*gen_short) PARAMS ((rtx, rtx)) =
3032 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
3033 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
3034 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
3037 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3039 if (INTVAL (len) > 0)
3040 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
3043 else if (TARGET_MVCLE)
3045 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3046 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3047 rtx reg0 = gen_reg_rtx (double_mode);
3048 rtx reg1 = gen_reg_rtx (double_mode);
3050 emit_move_insn (gen_highpart (single_mode, reg0),
3051 force_operand (XEXP (dst, 0), NULL_RTX));
3052 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3054 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
3055 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
3057 emit_insn ((*gen_long) (reg0, reg1, reg0));
3062 rtx dst_addr, src_addr, count, blocks, temp;
3063 rtx end_label = gen_label_rtx ();
3064 enum machine_mode mode;
3067 mode = GET_MODE (len);
3068 if (mode == VOIDmode)
3071 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3075 dst_addr = gen_reg_rtx (Pmode);
3076 src_addr = gen_reg_rtx (Pmode);
3077 count = gen_reg_rtx (mode);
3078 blocks = gen_reg_rtx (mode);
3080 convert_move (count, len, 1);
3081 emit_cmp_and_jump_insns (count, const0_rtx,
3082 EQ, NULL_RTX, mode, 1, end_label);
3084 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3085 dst = change_address (dst, VOIDmode, dst_addr);
3087 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3089 emit_move_insn (count, temp);
3091 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3093 emit_move_insn (blocks, temp);
3095 expand_start_loop (1);
3096 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3097 make_tree (type, blocks),
3098 make_tree (type, const0_rtx)));
3100 emit_insn ((*gen_short) (dst, GEN_INT (255)));
3101 s390_load_address (dst_addr,
3102 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3104 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3106 emit_move_insn (blocks, temp);
3110 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
3111 emit_label (end_label);
3115 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3116 and return the result in TARGET. */
3119 s390_expand_cmpstr (target, op0, op1, len)
3125 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
3126 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
3127 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
3128 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
3129 rtx (*gen_result) PARAMS ((rtx)) =
3130 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3132 op0 = protect_from_queue (op0, 0);
3133 op1 = protect_from_queue (op1, 0);
3134 len = protect_from_queue (len, 0);
3136 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3138 if (INTVAL (len) > 0)
3140 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
3141 emit_insn ((*gen_result) (target));
3144 emit_move_insn (target, const0_rtx);
3147 else /* if (TARGET_MVCLE) */
3149 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3150 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3151 rtx reg0 = gen_reg_rtx (double_mode);
3152 rtx reg1 = gen_reg_rtx (double_mode);
3154 emit_move_insn (gen_highpart (single_mode, reg0),
3155 force_operand (XEXP (op0, 0), NULL_RTX));
3156 emit_move_insn (gen_highpart (single_mode, reg1),
3157 force_operand (XEXP (op1, 0), NULL_RTX));
3159 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3160 convert_move (gen_lowpart (single_mode, reg1), len, 1);
3162 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
3163 emit_insn ((*gen_result) (target));
3167 /* Deactivate for now as profile code cannot cope with
3168 CC being live across basic block boundaries. */
3171 rtx addr0, addr1, count, blocks, temp;
3172 rtx end_label = gen_label_rtx ();
3173 enum machine_mode mode;
3176 mode = GET_MODE (len);
3177 if (mode == VOIDmode)
3180 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3184 addr0 = gen_reg_rtx (Pmode);
3185 addr1 = gen_reg_rtx (Pmode);
3186 count = gen_reg_rtx (mode);
3187 blocks = gen_reg_rtx (mode);
3189 convert_move (count, len, 1);
3190 emit_cmp_and_jump_insns (count, const0_rtx,
3191 EQ, NULL_RTX, mode, 1, end_label);
3193 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3194 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3195 op0 = change_address (op0, VOIDmode, addr0);
3196 op1 = change_address (op1, VOIDmode, addr1);
3198 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3200 emit_move_insn (count, temp);
3202 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3204 emit_move_insn (blocks, temp);
3206 expand_start_loop (1);
3207 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3208 make_tree (type, blocks),
3209 make_tree (type, const0_rtx)));
3211 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
3212 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3213 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3214 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3215 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3216 emit_jump_insn (temp);
3218 s390_load_address (addr0,
3219 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3220 s390_load_address (addr1,
3221 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3223 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3225 emit_move_insn (blocks, temp);
3229 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
3230 emit_label (end_label);
3232 emit_insn ((*gen_result) (target));
3237 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3238 We need to emit DTP-relative relocations. */
3241 s390_output_dwarf_dtprel (file, size, x)
3249 fputs ("\t.long\t", file);
3252 fputs ("\t.quad\t", file);
3257 output_addr_const (file, x);
3258 fputs ("@DTPOFF", file);
3261 /* In the name of slightly smaller debug output, and to cater to
3262 general assembler losage, recognize various UNSPEC sequences
3263 and turn them back into a direct symbol reference. */
3266 s390_delegitimize_address (orig_x)
3271 if (GET_CODE (x) != MEM)
3275 if (GET_CODE (x) == PLUS
3276 && GET_CODE (XEXP (x, 1)) == CONST
3277 && GET_CODE (XEXP (x, 0)) == REG
3278 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3280 y = XEXP (XEXP (x, 1), 0);
3281 if (GET_CODE (y) == UNSPEC
3282 && XINT (y, 1) == 110)
3283 return XVECEXP (y, 0, 0);
3287 if (GET_CODE (x) == CONST)
3290 if (GET_CODE (y) == UNSPEC
3291 && XINT (y, 1) == 111)
3292 return XVECEXP (y, 0, 0);
3299 /* Locate some local-dynamic symbol still in use by this function
3300 so that we can print its name in local-dynamic base patterns. */
3303 get_some_local_dynamic_name ()
3307 if (cfun->machine->some_ld_name)
3308 return cfun->machine->some_ld_name;
3310 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3312 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3313 return cfun->machine->some_ld_name;
3319 get_some_local_dynamic_name_1 (px, data)
3321 void *data ATTRIBUTE_UNUSED;
3325 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3327 x = get_pool_constant (x);
3328 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3331 if (GET_CODE (x) == SYMBOL_REF
3332 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3334 cfun->machine->some_ld_name = XSTR (x, 0);
3341 /* Output symbolic constant X in assembler syntax to
3342 stdio stream FILE. */
3345 s390_output_symbolic_const (file, x)
3349 switch (GET_CODE (x))
3354 s390_output_symbolic_const (file, XEXP (x, 0));
3358 s390_output_symbolic_const (file, XEXP (x, 0));
3359 fprintf (file, "+");
3360 s390_output_symbolic_const (file, XEXP (x, 1));
3364 s390_output_symbolic_const (file, XEXP (x, 0));
3365 fprintf (file, "-");
3366 s390_output_symbolic_const (file, XEXP (x, 1));
3373 output_addr_const (file, x);
3377 if (XVECLEN (x, 0) != 1)
3378 output_operand_lossage ("invalid UNSPEC as operand (1)");
3379 switch (XINT (x, 1))
3383 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3384 fprintf (file, "-");
3385 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3388 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3389 fprintf (file, "-");
3390 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3393 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3394 fprintf (file, "@GOT12");
3397 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3398 fprintf (file, "@GOTENT");
3401 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3402 fprintf (file, "@GOT");
3405 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3406 fprintf (file, "@PLT");
3409 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3410 fprintf (file, "@PLT-");
3411 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3414 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3415 fprintf (file, "@TLSGD");
3418 assemble_name (file, get_some_local_dynamic_name ());
3419 fprintf (file, "@TLSLDM");
3422 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3423 fprintf (file, "@DTPOFF");
3426 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3427 fprintf (file, "@NTPOFF");
3429 case UNSPEC_GOTNTPOFF:
3430 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3431 fprintf (file, "@GOTNTPOFF");
3433 case UNSPEC_INDNTPOFF:
3434 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3435 fprintf (file, "@INDNTPOFF");
3438 output_operand_lossage ("invalid UNSPEC as operand (2)");
3444 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3449 /* Output address operand ADDR in assembler syntax to
3450 stdio stream FILE. */
3453 print_operand_address (file, addr)
3457 struct s390_address ad;
3459 if (!s390_decompose_address (addr, &ad)
3460 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3461 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3462 output_operand_lossage ("Cannot decompose address.");
3465 s390_output_symbolic_const (file, ad.disp);
3467 fprintf (file, "0");
3469 if (ad.base && ad.indx)
3470 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3471 reg_names[REGNO (ad.base)]);
3473 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3476 /* Output operand X in assembler syntax to stdio stream FILE.
3477 CODE specified the format flag. The following format flags
3480 'C': print opcode suffix for branch condition.
3481 'D': print opcode suffix for inverse branch condition.
3482 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3483 'O': print only the displacement of a memory reference.
3484 'R': print only the base register of a memory reference.
3485 'N': print the second word of a DImode operand.
3486 'M': print the second word of a TImode operand.
3488 'b': print integer X as if it's an unsigned byte.
3489 'x': print integer X as if it's an unsigned word.
3490 'h': print integer X as if it's a signed word. */
3493 print_operand (file, x, code)
3501 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3505 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3509 if (GET_CODE (x) == SYMBOL_REF)
3511 fprintf (file, "%s", ":tls_load:");
3512 output_addr_const (file, x);
3514 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3516 fprintf (file, "%s", ":tls_gdcall:");
3517 output_addr_const (file, XVECEXP (x, 0, 0));
3519 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3521 fprintf (file, "%s", ":tls_ldcall:");
3522 assemble_name (file, get_some_local_dynamic_name ());
3530 struct s390_address ad;
3532 if (GET_CODE (x) != MEM
3533 || !s390_decompose_address (XEXP (x, 0), &ad)
3534 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3539 s390_output_symbolic_const (file, ad.disp);
3541 fprintf (file, "0");
3547 struct s390_address ad;
3549 if (GET_CODE (x) != MEM
3550 || !s390_decompose_address (XEXP (x, 0), &ad)
3551 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3556 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3558 fprintf (file, "0");
3563 if (GET_CODE (x) == REG)
3564 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3565 else if (GET_CODE (x) == MEM)
3566 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3572 if (GET_CODE (x) == REG)
3573 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3574 else if (GET_CODE (x) == MEM)
3575 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3581 switch (GET_CODE (x))
3584 fprintf (file, "%s", reg_names[REGNO (x)]);
3588 output_address (XEXP (x, 0));
3595 s390_output_symbolic_const (file, x);
3600 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3601 else if (code == 'x')
3602 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3603 else if (code == 'h')
3604 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3606 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3610 if (GET_MODE (x) != VOIDmode)
3613 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3614 else if (code == 'x')
3615 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3616 else if (code == 'h')
3617 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3623 fatal_insn ("UNKNOWN in print_operand !?", x);
3628 /* Target hook for assembling integer objects. We need to define it
3629 here to work a round a bug in some versions of GAS, which couldn't
3630 handle values smaller than INT_MIN when printed in decimal. */
3633 s390_assemble_integer (x, size, aligned_p)
3638 if (size == 8 && aligned_p
3639 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3641 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3645 return default_assemble_integer (x, size, aligned_p);
3648 /* Returns true if register REGNO is used for forming
3649 a memory address in expression X. */
3652 reg_used_in_mem_p (regno, x)
3656 enum rtx_code code = GET_CODE (x);
3662 if (refers_to_regno_p (regno, regno+1,
3666 else if (code == SET
3667 && GET_CODE (SET_DEST (x)) == PC)
3669 if (refers_to_regno_p (regno, regno+1,
3674 fmt = GET_RTX_FORMAT (code);
3675 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3678 && reg_used_in_mem_p (regno, XEXP (x, i)))
3681 else if (fmt[i] == 'E')
3682 for (j = 0; j < XVECLEN (x, i); j++)
3683 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3689 /* Returns true if expression DEP_RTX sets an address register
3690 used by instruction INSN to address memory. */
3693 addr_generation_dependency_p (dep_rtx, insn)
3699 if (GET_CODE (dep_rtx) == INSN)
3700 dep_rtx = PATTERN (dep_rtx);
3702 if (GET_CODE (dep_rtx) == SET)
3704 target = SET_DEST (dep_rtx);
3705 if (GET_CODE (target) == STRICT_LOW_PART)
3706 target = XEXP (target, 0);
3707 while (GET_CODE (target) == SUBREG)
3708 target = SUBREG_REG (target);
3710 if (GET_CODE (target) == REG)
3712 int regno = REGNO (target);
3714 if (s390_safe_attr_type (insn) == TYPE_LA)
3716 pat = PATTERN (insn);
3717 if (GET_CODE (pat) == PARALLEL)
3719 if (XVECLEN (pat, 0) != 2)
3721 pat = XVECEXP (pat, 0, 0);
3723 if (GET_CODE (pat) == SET)
3724 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3728 else if (get_attr_atype (insn) == ATYPE_AGEN)
3729 return reg_used_in_mem_p (regno, PATTERN (insn));
3735 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3738 s390_agen_dep_p(dep_insn, insn)
3742 rtx dep_rtx = PATTERN (dep_insn);
3745 if (GET_CODE (dep_rtx) == SET
3746 && addr_generation_dependency_p (dep_rtx, insn))
3748 else if (GET_CODE (dep_rtx) == PARALLEL)
3750 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3752 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3759 /* Return the modified cost of the dependency of instruction INSN
3760 on instruction DEP_INSN through the link LINK. COST is the
3761 default cost of that dependency.
3763 Data dependencies are all handled without delay. However, if a
3764 register is modified and subsequently used as base or index
3765 register of a memory reference, at least 4 cycles need to pass
3766 between setting and using the register to avoid pipeline stalls.
3767 An exception is the LA instruction. An address generated by LA can
3768 be used by introducing only a one cycle stall on the pipeline. */
3771 s390_adjust_cost (insn, link, dep_insn, cost)
3780 /* If the dependence is an anti-dependence, there is no cost. For an
3781 output dependence, there is sometimes a cost, but it doesn't seem
3782 worth handling those few cases. */
3784 if (REG_NOTE_KIND (link) != 0)
3787 /* If we can't recognize the insns, we can't really do anything. */
3788 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3791 /* DFA based scheduling checks address dependency in md file. */
3792 if (s390_use_dfa_pipeline_interface ())
3794 /* Operand forward in case of lr, load and la. */
3795 if (s390_tune == PROCESSOR_2084_Z990
3797 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3798 || s390_safe_attr_type (dep_insn) == TYPE_LR
3799 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3804 dep_rtx = PATTERN (dep_insn);
3806 if (GET_CODE (dep_rtx) == SET
3807 && addr_generation_dependency_p (dep_rtx, insn))
3808 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3809 else if (GET_CODE (dep_rtx) == PARALLEL)
3811 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3813 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3814 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3820 /* A C statement (sans semicolon) to update the integer scheduling priority
3821 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3822 reduce the priority to execute INSN later. Do not define this macro if
3823 you do not need to adjust the scheduling priorities of insns.
3825 A STD instruction should be scheduled earlier,
3826 in order to use the bypass. */
3829 s390_adjust_priority (insn, priority)
3830 rtx insn ATTRIBUTE_UNUSED;
3833 if (! INSN_P (insn))
3836 if (s390_tune != PROCESSOR_2084_Z990)
3839 switch (s390_safe_attr_type (insn))
3843 priority = priority << 3;
3846 priority = priority << 1;
3854 /* The number of instructions that can be issued per cycle. */
3859 if (s390_tune == PROCESSOR_2084_Z990)
3864 /* If the following function returns TRUE, we will use the the DFA
3868 s390_use_dfa_pipeline_interface ()
3870 if (s390_tune == PROCESSOR_2064_Z900
3871 || s390_tune == PROCESSOR_2084_Z990)
3878 s390_first_cycle_multipass_dfa_lookahead ()
3880 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3883 /* Called after issuing each insn.
3884 Triggers default sort algorithm to better slot instructions. */
3887 s390_sched_reorder2 (dump, sched_verbose, ready, pn_ready, clock_var)
3888 FILE *dump ATTRIBUTE_UNUSED;
3889 int sched_verbose ATTRIBUTE_UNUSED;
3890 rtx *ready ATTRIBUTE_UNUSED;
3891 int *pn_ready ATTRIBUTE_UNUSED;
3892 int clock_var ATTRIBUTE_UNUSED;
3894 return s390_issue_rate();
3898 /* Split all branches that exceed the maximum distance.
3899 Returns true if this created a new literal pool entry.
3901 Code generated by this routine is allowed to use
3902 TEMP_REG as temporary scratch register. If this is
3903 done, TEMP_USED is set to true. */
3906 s390_split_branches (temp_reg, temp_used)
3910 int new_literal = 0;
3911 rtx insn, pat, tmp, target;
3914 /* We need correct insn addresses. */
3916 shorten_branches (get_insns ());
3918 /* Find all branches that exceed 64KB, and split them. */
3920 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3922 if (GET_CODE (insn) != JUMP_INSN)
3925 pat = PATTERN (insn);
3926 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3927 pat = XVECEXP (pat, 0, 0);
3928 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3931 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3933 label = &SET_SRC (pat);
3935 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3937 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3938 label = &XEXP (SET_SRC (pat), 1);
3939 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3940 label = &XEXP (SET_SRC (pat), 2);
3947 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3954 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3955 INSN_ADDRESSES_NEW (tmp, -1);
3962 tmp = force_const_mem (Pmode, *label);
3963 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3964 INSN_ADDRESSES_NEW (tmp, -1);
3971 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3972 tmp = gen_rtx_CONST (SImode, tmp);
3973 tmp = force_const_mem (SImode, tmp);
3974 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3975 INSN_ADDRESSES_NEW (tmp, -1);
3977 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3978 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3981 if (!validate_change (insn, label, target, 0))
3989 /* Find a literal pool symbol referenced in RTX X, and store
3990 it at REF. Will abort if X contains references to more than
3991 one such pool symbol; multiple references to the same symbol
3992 are allowed, however.
3994 The rtx pointed to by REF must be initialized to NULL_RTX
3995 by the caller before calling this routine. */
3998 find_constant_pool_ref (x, ref)
4005 if (GET_CODE (x) == SYMBOL_REF
4006 && CONSTANT_POOL_ADDRESS_P (x))
4008 if (*ref == NULL_RTX)
4014 fmt = GET_RTX_FORMAT (GET_CODE (x));
4015 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4019 find_constant_pool_ref (XEXP (x, i), ref);
4021 else if (fmt[i] == 'E')
4023 for (j = 0; j < XVECLEN (x, i); j++)
4024 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4029 /* Replace every reference to the literal pool symbol REF
4030 in X by the address ADDR. Fix up MEMs as required. */
4033 replace_constant_pool_ref (x, ref, addr)
4044 /* Literal pool references can only occur inside a MEM ... */
4045 if (GET_CODE (*x) == MEM)
4047 rtx memref = XEXP (*x, 0);
4051 *x = replace_equiv_address (*x, addr);
4055 if (GET_CODE (memref) == CONST
4056 && GET_CODE (XEXP (memref, 0)) == PLUS
4057 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4058 && XEXP (XEXP (memref, 0), 0) == ref)
4060 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4061 *x = replace_equiv_address (*x, plus_constant (addr, off));
4066 /* ... or a load-address type pattern. */
4067 if (GET_CODE (*x) == SET)
4069 rtx addrref = SET_SRC (*x);
4073 SET_SRC (*x) = addr;
4077 if (GET_CODE (addrref) == CONST
4078 && GET_CODE (XEXP (addrref, 0)) == PLUS
4079 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4080 && XEXP (XEXP (addrref, 0), 0) == ref)
4082 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4083 SET_SRC (*x) = plus_constant (addr, off);
4088 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4089 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4093 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
4095 else if (fmt[i] == 'E')
4097 for (j = 0; j < XVECLEN (*x, i); j++)
4098 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
4103 /* Check whether ADDR is an address that uses the base register,
4104 without actually constituting a literal pool access. (This happens
4105 in 31-bit PIC mode, where the base register is used as anchor for
4106 relative addressing of local symbols.)
4108 Returns 1 if the base register occupies the base slot,
4109 returns 2 if the base register occupies the index slot,
4110 returns 0 if the address is not of this form. */
4113 find_base_register_in_addr (addr)
4114 struct s390_address *addr;
4116 /* If DISP is complex, we might have a literal pool reference. */
4117 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
4120 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
4123 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
4129 /* Return true if X contains an address that uses the base register,
4130 without actually constituting a literal pool access. */
4133 find_base_register_ref (x)
4137 struct s390_address addr;
4141 /* Addresses can only occur inside a MEM ... */
4142 if (GET_CODE (x) == MEM)
4144 if (s390_decompose_address (XEXP (x, 0), &addr)
4145 && find_base_register_in_addr (&addr))
4149 /* ... or a load-address type pattern. */
4150 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
4152 if (s390_decompose_address (SET_SRC (x), &addr)
4153 && find_base_register_in_addr (&addr))
4157 fmt = GET_RTX_FORMAT (GET_CODE (x));
4158 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4162 retv |= find_base_register_ref (XEXP (x, i));
4164 else if (fmt[i] == 'E')
4166 for (j = 0; j < XVECLEN (x, i); j++)
4167 retv |= find_base_register_ref (XVECEXP (x, i, j));
4174 /* If X contains an address that uses the base register,
4175 without actually constituting a literal pool access,
4176 replace the base register with REPL in all such cases.
4178 Handles both MEMs and load address patterns. */
4181 replace_base_register_ref (x, repl)
4185 struct s390_address addr;
4190 /* Addresses can only occur inside a MEM ... */
4191 if (GET_CODE (*x) == MEM)
4193 if (s390_decompose_address (XEXP (*x, 0), &addr)
4194 && (pos = find_base_register_in_addr (&addr)))
4201 new_addr = addr.base;
4203 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
4205 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
4207 *x = replace_equiv_address (*x, new_addr);
4212 /* ... or a load-address type pattern. */
4213 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
4215 if (s390_decompose_address (SET_SRC (*x), &addr)
4216 && (pos = find_base_register_in_addr (&addr)))
4223 new_addr = addr.base;
4225 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
4227 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
4229 SET_SRC (*x) = new_addr;
4234 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4235 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4239 replace_base_register_ref (&XEXP (*x, i), repl);
4241 else if (fmt[i] == 'E')
4243 for (j = 0; j < XVECLEN (*x, i); j++)
4244 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
4250 /* We keep a list of constants we which we have to add to internal
4251 constant tables in the middle of large functions. */
4253 #define NR_C_MODES 6
4254 enum machine_mode constant_modes[NR_C_MODES] =
4262 rtx (*gen_consttable[NR_C_MODES])(rtx) =
4264 gen_consttable_df, gen_consttable_di,
4265 gen_consttable_sf, gen_consttable_si,
4272 struct constant *next;
4277 struct constant_pool
4279 struct constant_pool *next;
4284 struct constant *constants[NR_C_MODES];
4290 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
4291 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
4292 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
4294 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
4295 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
4296 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
4297 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
4298 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4299 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4300 static void s390_add_anchor PARAMS ((struct constant_pool *));
4301 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
4302 static void s390_free_pool PARAMS ((struct constant_pool *));
4304 /* Create new constant pool covering instructions starting at INSN
4305 and chain it to the end of POOL_LIST. */
4307 static struct constant_pool *
4308 s390_start_pool (pool_list, insn)
4309 struct constant_pool **pool_list;
4312 struct constant_pool *pool, **prev;
4315 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4317 for (i = 0; i < NR_C_MODES; i++)
4318 pool->constants[i] = NULL;
4320 pool->label = gen_label_rtx ();
4321 pool->first_insn = insn;
4322 pool->pool_insn = NULL_RTX;
4323 pool->insns = BITMAP_XMALLOC ();
4325 pool->anchor = FALSE;
4327 for (prev = pool_list; *prev; prev = &(*prev)->next)
4334 /* End range of instructions covered by POOL at INSN and emit
4335 placeholder insn representing the pool. */
4338 s390_end_pool (pool, insn)
4339 struct constant_pool *pool;
4342 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4345 insn = get_last_insn ();
4347 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4348 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4351 /* Add INSN to the list of insns covered by POOL. */
4354 s390_add_pool_insn (pool, insn)
4355 struct constant_pool *pool;
4358 bitmap_set_bit (pool->insns, INSN_UID (insn));
4361 /* Return pool out of POOL_LIST that covers INSN. */
4363 static struct constant_pool *
4364 s390_find_pool (pool_list, insn)
4365 struct constant_pool *pool_list;
4368 struct constant_pool *pool;
4370 for (pool = pool_list; pool; pool = pool->next)
4371 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4377 /* Add constant VAL of mode MODE to the constant pool POOL. */
4380 s390_add_constant (pool, val, mode)
4381 struct constant_pool *pool;
4383 enum machine_mode mode;
4388 for (i = 0; i < NR_C_MODES; i++)
4389 if (constant_modes[i] == mode)
4391 if (i == NR_C_MODES)
4394 for (c = pool->constants[i]; c != NULL; c = c->next)
4395 if (rtx_equal_p (val, c->value))
4400 c = (struct constant *) xmalloc (sizeof *c);
4402 c->label = gen_label_rtx ();
4403 c->next = pool->constants[i];
4404 pool->constants[i] = c;
4405 pool->size += GET_MODE_SIZE (mode);
4409 /* Find constant VAL of mode MODE in the constant pool POOL.
4410 Return an RTX describing the distance from the start of
4411 the pool to the location of the new constant. */
4414 s390_find_constant (pool, val, mode)
4415 struct constant_pool *pool;
4417 enum machine_mode mode;
4423 for (i = 0; i < NR_C_MODES; i++)
4424 if (constant_modes[i] == mode)
4426 if (i == NR_C_MODES)
4429 for (c = pool->constants[i]; c != NULL; c = c->next)
4430 if (rtx_equal_p (val, c->value))
4436 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4437 gen_rtx_LABEL_REF (Pmode, pool->label));
4438 offset = gen_rtx_CONST (Pmode, offset);
4442 /* Set 'anchor' flag in POOL. */
4445 s390_add_anchor (pool)
4446 struct constant_pool *pool;
4450 pool->anchor = TRUE;
4455 /* Dump out the constants in POOL. */
4458 s390_dump_pool (pool)
4459 struct constant_pool *pool;
4465 /* Pool start insn switches to proper section
4466 and guarantees necessary alignment. */
4468 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4470 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4471 INSN_ADDRESSES_NEW (insn, -1);
4473 insn = emit_label_after (pool->label, insn);
4474 INSN_ADDRESSES_NEW (insn, -1);
4476 /* Emit anchor if we need one. */
4479 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
4480 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
4481 anchor = gen_rtx_CONST (VOIDmode, anchor);
4482 insn = emit_insn_after (gen_consttable_si (anchor), insn);
4483 INSN_ADDRESSES_NEW (insn, -1);
4486 /* Dump constants in descending alignment requirement order,
4487 ensuring proper alignment for every constant. */
4488 for (i = 0; i < NR_C_MODES; i++)
4489 for (c = pool->constants[i]; c; c = c->next)
4491 /* Convert 104 unspecs to pool-relative references. */
4492 rtx value = c->value;
4493 if (GET_CODE (value) == CONST
4494 && GET_CODE (XEXP (value, 0)) == UNSPEC
4495 && XINT (XEXP (value, 0), 1) == 104
4496 && XVECLEN (XEXP (value, 0), 0) == 1)
4498 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4499 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4500 value = gen_rtx_CONST (VOIDmode, value);
4503 insn = emit_label_after (c->label, insn);
4504 INSN_ADDRESSES_NEW (insn, -1);
4505 insn = emit_insn_after (gen_consttable[i] (value), insn);
4506 INSN_ADDRESSES_NEW (insn, -1);
4509 /* Pool end insn switches back to previous section
4510 and guarantees necessary alignment. */
4512 insn = emit_insn_after (gen_pool_end_64 (), insn);
4514 insn = emit_insn_after (gen_pool_end_31 (), insn);
4515 INSN_ADDRESSES_NEW (insn, -1);
4517 insn = emit_barrier_after (insn);
4518 INSN_ADDRESSES_NEW (insn, -1);
4520 /* Remove placeholder insn. */
4521 remove_insn (pool->pool_insn);
4526 /* Free all memory used by POOL. */
4529 s390_free_pool (pool)
4530 struct constant_pool *pool;
4534 for (i = 0; i < NR_C_MODES; i++)
4536 struct constant *c = pool->constants[i];
4539 struct constant *next = c->next;
4545 BITMAP_XFREE (pool->insns);
4550 /* Chunkify the literal pool if required.
4552 Code generated by this routine is allowed to use
4553 TEMP_REG as temporary scratch register. If this is
4554 done, TEMP_USED is set to true. */
4556 #define S390_POOL_CHUNK_MIN 0xc00
4557 #define S390_POOL_CHUNK_MAX 0xe00
4559 static struct constant_pool *
4560 s390_chunkify_start (temp_reg, temp_used)
4564 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4566 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4571 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
4572 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4575 /* Do we need to chunkify the literal pool? */
4577 if (get_pool_size () < S390_POOL_CHUNK_MAX)
4580 /* We need correct insn addresses. */
4582 shorten_branches (get_insns ());
4584 /* Scan all insns and move literals to pool chunks.
4585 Also, emit anchor reload insns before every insn that uses
4586 the literal pool base register as anchor pointer. */
4588 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4590 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4592 rtx pool_ref = NULL_RTX;
4593 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4597 curr_pool = s390_start_pool (&pool_list, insn);
4599 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
4600 get_pool_mode (pool_ref));
4601 s390_add_pool_insn (curr_pool, insn);
4604 else if (!TARGET_64BIT && flag_pic
4605 && find_base_register_ref (PATTERN (insn)))
4607 rtx new = gen_reload_anchor (temp_reg, base_reg);
4608 new = emit_insn_before (new, insn);
4609 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
4614 curr_pool = s390_start_pool (&pool_list, new);
4616 s390_add_anchor (curr_pool);
4617 s390_add_pool_insn (curr_pool, insn);
4621 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4623 s390_add_pool_insn (curr_pool, insn);
4626 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4627 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4632 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4635 s390_end_pool (curr_pool, NULL_RTX);
4640 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4641 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4644 /* We will later have to insert base register reload insns.
4645 Those will have an effect on code size, which we need to
4646 consider here. This calculation makes rather pessimistic
4647 worst-case assumptions. */
4648 if (GET_CODE (insn) == CODE_LABEL)
4651 if (chunk_size < S390_POOL_CHUNK_MIN
4652 && curr_pool->size < S390_POOL_CHUNK_MIN)
4655 /* Pool chunks can only be inserted after BARRIERs ... */
4656 if (GET_CODE (insn) == BARRIER)
4658 s390_end_pool (curr_pool, insn);
4663 /* ... so if we don't find one in time, create one. */
4664 else if ((chunk_size > S390_POOL_CHUNK_MAX
4665 || curr_pool->size > S390_POOL_CHUNK_MAX))
4667 rtx label, jump, barrier;
4669 /* We can insert the barrier only after a 'real' insn. */
4670 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4672 if (get_attr_length (insn) == 0)
4675 /* Don't separate insns created by s390_split_branches. */
4676 if (GET_CODE (insn) == INSN
4677 && GET_CODE (PATTERN (insn)) == SET
4678 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
4681 label = gen_label_rtx ();
4682 jump = emit_jump_insn_after (gen_jump (label), insn);
4683 barrier = emit_barrier_after (jump);
4684 insn = emit_label_after (label, barrier);
4685 JUMP_LABEL (jump) = label;
4686 LABEL_NUSES (label) = 1;
4688 INSN_ADDRESSES_NEW (jump, -1);
4689 INSN_ADDRESSES_NEW (barrier, -1);
4690 INSN_ADDRESSES_NEW (insn, -1);
4692 s390_end_pool (curr_pool, barrier);
4700 s390_end_pool (curr_pool, NULL_RTX);
4703 /* Find all labels that are branched into
4704 from an insn belonging to a different chunk. */
4706 far_labels = BITMAP_XMALLOC ();
4708 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4710 /* Labels marked with LABEL_PRESERVE_P can be target
4711 of non-local jumps, so we have to mark them.
4712 The same holds for named labels.
4714 Don't do that, however, if it is the label before
4717 if (GET_CODE (insn) == CODE_LABEL
4718 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4720 rtx vec_insn = next_real_insn (insn);
4721 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4722 PATTERN (vec_insn) : NULL_RTX;
4724 || !(GET_CODE (vec_pat) == ADDR_VEC
4725 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4726 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4729 /* If we have a direct jump (conditional or unconditional)
4730 or a casesi jump, check all potential targets. */
4731 else if (GET_CODE (insn) == JUMP_INSN)
4733 rtx pat = PATTERN (insn);
4734 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4735 pat = XVECEXP (pat, 0, 0);
4737 if (GET_CODE (pat) == SET)
4739 rtx label = JUMP_LABEL (insn);
4742 if (s390_find_pool (pool_list, label)
4743 != s390_find_pool (pool_list, insn))
4744 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4747 else if (GET_CODE (pat) == PARALLEL
4748 && XVECLEN (pat, 0) == 2
4749 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4750 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4751 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4753 /* Find the jump table used by this casesi jump. */
4754 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4755 rtx vec_insn = next_real_insn (vec_label);
4756 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4757 PATTERN (vec_insn) : NULL_RTX;
4759 && (GET_CODE (vec_pat) == ADDR_VEC
4760 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4762 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4764 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4766 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4768 if (s390_find_pool (pool_list, label)
4769 != s390_find_pool (pool_list, insn))
4770 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4777 /* Insert base register reload insns before every pool. */
4779 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4781 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4782 rtx insn = curr_pool->first_insn;
4783 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4786 /* Insert base register reload insns at every far label. */
4788 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4789 if (GET_CODE (insn) == CODE_LABEL
4790 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4792 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4795 rtx new_insn = gen_reload_base (base_reg, pool->label);
4796 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4801 BITMAP_XFREE (far_labels);
4804 /* Recompute insn addresses. */
4806 init_insn_lengths ();
4807 shorten_branches (get_insns ());
4812 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4813 After we have decided to use this list, finish implementing
4814 all changes to the current function as required.
4816 Code generated by this routine is allowed to use
4817 TEMP_REG as temporary scratch register. */
4820 s390_chunkify_finish (pool_list, temp_reg)
4821 struct constant_pool *pool_list;
4824 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4825 struct constant_pool *curr_pool = NULL;
4829 /* Replace all literal pool references. */
4831 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4833 curr_pool = s390_find_pool (pool_list, insn);
4837 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4839 rtx addr, pool_ref = NULL_RTX;
4840 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4843 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4844 get_pool_mode (pool_ref));
4845 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4846 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4847 INSN_CODE (insn) = -1;
4850 else if (!TARGET_64BIT && flag_pic
4851 && find_base_register_ref (PATTERN (insn)))
4853 replace_base_register_ref (&PATTERN (insn), temp_reg);
4858 /* Dump out all literal pools. */
4860 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4861 s390_dump_pool (curr_pool);
4863 /* Free pool list. */
4867 struct constant_pool *next = pool_list->next;
4868 s390_free_pool (pool_list);
4873 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4874 We have decided we cannot use this list, so revert all changes
4875 to the current function that were done by s390_chunkify_start. */
4878 s390_chunkify_cancel (pool_list)
4879 struct constant_pool *pool_list;
4881 struct constant_pool *curr_pool = NULL;
4884 /* Remove all pool placeholder insns. */
4886 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4888 /* Did we insert an extra barrier? Remove it. */
4889 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4890 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4891 rtx label = NEXT_INSN (curr_pool->pool_insn);
4893 if (jump && GET_CODE (jump) == JUMP_INSN
4894 && barrier && GET_CODE (barrier) == BARRIER
4895 && label && GET_CODE (label) == CODE_LABEL
4896 && GET_CODE (PATTERN (jump)) == SET
4897 && SET_DEST (PATTERN (jump)) == pc_rtx
4898 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4899 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4902 remove_insn (barrier);
4903 remove_insn (label);
4906 remove_insn (curr_pool->pool_insn);
4909 /* Remove all base/anchor register reload insns. */
4911 for (insn = get_insns (); insn; )
4913 rtx next_insn = NEXT_INSN (insn);
4915 if (GET_CODE (insn) == INSN
4916 && GET_CODE (PATTERN (insn)) == SET
4917 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4918 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4919 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4925 /* Free pool list. */
4929 struct constant_pool *next = pool_list->next;
4930 s390_free_pool (pool_list);
4936 /* Index of constant pool chunk that is currently being processed.
4937 Set to -1 before function output has started. */
4938 int s390_pool_count = -1;
4940 /* Number of elements of current constant pool. */
4941 int s390_nr_constants;
4943 /* Output main constant pool to stdio stream FILE. */
4946 s390_output_constant_pool (start_label, end_label)
4952 readonly_data_section ();
4953 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4954 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4955 CODE_LABEL_NUMBER (start_label));
4959 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4960 CODE_LABEL_NUMBER (start_label));
4961 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4964 s390_pool_count = 0;
4965 output_constant_pool (current_function_name, current_function_decl);
4966 s390_pool_count = -1;
4968 function_section (current_function_decl);
4971 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4972 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4973 CODE_LABEL_NUMBER (end_label));
4977 /* Rework the prolog/epilog to avoid saving/restoring
4978 registers unnecessarily. If TEMP_REGNO is nonnegative,
4979 it specifies the number of a caller-saved register used
4980 as temporary scratch register by code emitted during
4981 machine dependent reorg. */
4984 s390_optimize_prolog (temp_regno)
4987 int save_first, save_last, restore_first, restore_last;
4989 rtx insn, new_insn, next_insn;
4991 /* Recompute regs_ever_live data for special registers. */
4992 regs_ever_live[BASE_REGISTER] = 0;
4993 regs_ever_live[RETURN_REGNUM] = 0;
4994 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4996 /* If there is (possibly) any pool entry, we need to
4997 load the base register.
4998 ??? FIXME: this should be more precise. */
4999 if (get_pool_size ())
5000 regs_ever_live[BASE_REGISTER] = 1;
5002 /* In non-leaf functions, the prolog/epilog code relies
5003 on RETURN_REGNUM being saved in any case. */
5004 if (!current_function_is_leaf)
5005 regs_ever_live[RETURN_REGNUM] = 1;
5007 /* We need to save/restore the temporary register. */
5008 if (temp_regno >= 0)
5009 regs_ever_live[temp_regno] = 1;
5012 /* Find first and last gpr to be saved. */
5014 for (i = 6; i < 16; i++)
5015 if (regs_ever_live[i])
5017 || i == STACK_POINTER_REGNUM
5018 || i == RETURN_REGNUM
5019 || i == BASE_REGISTER
5020 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5023 for (j = 15; j > i; j--)
5024 if (regs_ever_live[j])
5026 || j == STACK_POINTER_REGNUM
5027 || j == RETURN_REGNUM
5028 || j == BASE_REGISTER
5029 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
5034 /* Nothing to save/restore. */
5035 save_first = restore_first = -1;
5036 save_last = restore_last = -1;
5040 /* Save/restore from i to j. */
5041 save_first = restore_first = i;
5042 save_last = restore_last = j;
5045 /* Varargs functions need to save gprs 2 to 6. */
5046 if (current_function_stdarg)
5054 /* If all special registers are in fact used, there's nothing we
5055 can do, so no point in walking the insn list. */
5056 if (i <= BASE_REGISTER && j >= BASE_REGISTER
5057 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
5061 /* Search for prolog/epilog insns and replace them. */
5063 for (insn = get_insns (); insn; insn = next_insn)
5065 int first, last, off;
5066 rtx set, base, offset;
5068 next_insn = NEXT_INSN (insn);
5070 if (GET_CODE (insn) != INSN)
5072 if (GET_CODE (PATTERN (insn)) != PARALLEL)
5075 if (store_multiple_operation (PATTERN (insn), VOIDmode))
5077 set = XVECEXP (PATTERN (insn), 0, 0);
5078 first = REGNO (SET_SRC (set));
5079 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5080 offset = const0_rtx;
5081 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5082 off = INTVAL (offset) - first * UNITS_PER_WORD;
5084 if (GET_CODE (base) != REG || off < 0)
5086 if (first > BASE_REGISTER && first > RETURN_REGNUM)
5088 if (last < BASE_REGISTER && last < RETURN_REGNUM)
5091 if (save_first != -1)
5093 new_insn = save_gprs (base, off, save_first, save_last);
5094 new_insn = emit_insn_before (new_insn, insn);
5095 INSN_ADDRESSES_NEW (new_insn, -1);
5101 if (load_multiple_operation (PATTERN (insn), VOIDmode))
5103 set = XVECEXP (PATTERN (insn), 0, 0);
5104 first = REGNO (SET_DEST (set));
5105 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5106 offset = const0_rtx;
5107 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5108 off = INTVAL (offset) - first * UNITS_PER_WORD;
5110 if (GET_CODE (base) != REG || off < 0)
5112 if (first > BASE_REGISTER && first > RETURN_REGNUM)
5114 if (last < BASE_REGISTER && last < RETURN_REGNUM)
5117 if (restore_first != -1)
5119 new_insn = restore_gprs (base, off, restore_first, restore_last);
5120 new_insn = emit_insn_before (new_insn, insn);
5121 INSN_ADDRESSES_NEW (new_insn, -1);
5129 /* Check whether any insn in the function makes use of the original
5130 value of RETURN_REG (e.g. for __builtin_return_address).
5131 If so, insert an insn reloading that value.
5133 Return true if any such insn was found. */
5136 s390_fixup_clobbered_return_reg (return_reg)
5139 bool replacement_done = 0;
5142 /* If we never called __builtin_return_address, register 14
5143 might have been used as temp during the prolog; we do
5144 not want to touch those uses. */
5145 if (!has_hard_reg_initial_val (Pmode, REGNO (return_reg)))
5148 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5150 rtx reg, off, new_insn;
5152 if (GET_CODE (insn) != INSN)
5154 if (!reg_referenced_p (return_reg, PATTERN (insn)))
5156 if (GET_CODE (PATTERN (insn)) == PARALLEL
5157 && store_multiple_operation (PATTERN (insn), VOIDmode))
5160 if (frame_pointer_needed)
5161 reg = hard_frame_pointer_rtx;
5163 reg = stack_pointer_rtx;
5165 off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
5166 if (!DISP_IN_RANGE (INTVAL (off)))
5168 off = force_const_mem (Pmode, off);
5169 new_insn = gen_rtx_SET (Pmode, return_reg, off);
5170 new_insn = emit_insn_before (new_insn, insn);
5171 INSN_ADDRESSES_NEW (new_insn, -1);
5175 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
5176 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
5177 new_insn = emit_insn_before (new_insn, insn);
5178 INSN_ADDRESSES_NEW (new_insn, -1);
5180 replacement_done = 1;
5183 return replacement_done;
5186 /* Perform machine-dependent processing. */
5191 bool fixed_up_clobbered_return_reg = 0;
5192 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5195 /* Make sure all splits have been performed; splits after
5196 machine_dependent_reorg might confuse insn length counts. */
5197 split_all_insns_noflow ();
5200 /* There are two problematic situations we need to correct:
5202 - the literal pool might be > 4096 bytes in size, so that
5203 some of its elements cannot be directly accessed
5205 - a branch target might be > 64K away from the branch, so that
5206 it is not possible to use a PC-relative instruction.
5208 To fix those, we split the single literal pool into multiple
5209 pool chunks, reloading the pool base register at various
5210 points throughout the function to ensure it always points to
5211 the pool chunk the following code expects, and / or replace
5212 PC-relative branches by absolute branches.
5214 However, the two problems are interdependent: splitting the
5215 literal pool can move a branch further away from its target,
5216 causing the 64K limit to overflow, and on the other hand,
5217 replacing a PC-relative branch by an absolute branch means
5218 we need to put the branch target address into the literal
5219 pool, possibly causing it to overflow.
5221 So, we loop trying to fix up both problems until we manage
5222 to satisfy both conditions at the same time. Note that the
5223 loop is guaranteed to terminate as every pass of the loop
5224 strictly decreases the total number of PC-relative branches
5225 in the function. (This is not completely true as there
5226 might be branch-over-pool insns introduced by chunkify_start.
5227 Those never need to be split however.) */
5231 struct constant_pool *pool_list;
5233 /* Try to chunkify the literal pool. */
5234 pool_list = s390_chunkify_start (temp_reg, &temp_used);
5236 /* Split out-of-range branches. If this has created new
5237 literal pool entries, cancel current chunk list and
5239 if (s390_split_branches (temp_reg, &temp_used))
5242 s390_chunkify_cancel (pool_list);
5247 /* Check whether we have clobbered a use of the return
5248 register (e.g. for __builtin_return_address). If so,
5249 add insns reloading the register where necessary. */
5250 if (temp_used && !fixed_up_clobbered_return_reg
5251 && s390_fixup_clobbered_return_reg (temp_reg))
5253 fixed_up_clobbered_return_reg = 1;
5255 /* The fixup insns might have caused a jump to overflow. */
5257 s390_chunkify_cancel (pool_list);
5262 /* If we made it up to here, both conditions are satisfied.
5263 Finish up pool chunkification if required. */
5265 s390_chunkify_finish (pool_list, temp_reg);
5270 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
5274 /* Return an RTL expression representing the value of the return address
5275 for the frame COUNT steps up from the current frame. FRAME is the
5276 frame pointer of that frame. */
5279 s390_return_addr_rtx (count, frame)
5285 /* For the current frame, we use the initial value of RETURN_REGNUM.
5286 This works both in leaf and non-leaf functions. */
5289 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
5291 /* For frames farther back, we read the stack slot where the
5292 corresponding RETURN_REGNUM value was saved. */
5294 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5295 addr = memory_address (Pmode, addr);
5296 return gen_rtx_MEM (Pmode, addr);
5299 /* Find first call clobbered register unsused in a function.
5300 This could be used as base register in a leaf function
5301 or for holding the return address before epilogue. */
5304 find_unused_clobbered_reg ()
5307 for (i = 0; i < 6; i++)
5308 if (!regs_ever_live[i])
5313 /* Fill FRAME with info about frame of current function. */
5318 char gprs_ever_live[16];
5320 HOST_WIDE_INT fsize = get_frame_size ();
5322 if (fsize > 0x7fff0000)
5323 fatal_error ("Total size of local variables exceeds architecture limit.");
5325 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5326 cfun->machine->save_fprs_p = 0;
5328 for (i = 24; i < 32; i++)
5329 if (regs_ever_live[i] && !global_regs[i])
5331 cfun->machine->save_fprs_p = 1;
5335 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5337 /* Does function need to setup frame and save area. */
5339 if (! current_function_is_leaf
5340 || cfun->machine->frame_size > 0
5341 || current_function_calls_alloca
5342 || current_function_stdarg)
5343 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5345 /* Find first and last gpr to be saved. Note that at this point,
5346 we assume the return register and the base register always
5347 need to be saved. This is done because the usage of these
5348 register might change even after the prolog was emitted.
5349 If it turns out later that we really don't need them, the
5350 prolog/epilog code is modified again. */
5352 for (i = 0; i < 16; i++)
5353 gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5356 gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5357 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5358 gprs_ever_live[BASE_REGISTER] = 1;
5359 gprs_ever_live[RETURN_REGNUM] = 1;
5360 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5362 for (i = 6; i < 16; i++)
5363 if (gprs_ever_live[i])
5366 for (j = 15; j > i; j--)
5367 if (gprs_ever_live[j])
5371 /* Save / Restore from gpr i to j. */
5372 cfun->machine->first_save_gpr = i;
5373 cfun->machine->first_restore_gpr = i;
5374 cfun->machine->last_save_gpr = j;
5376 /* Varargs functions need to save gprs 2 to 6. */
5377 if (current_function_stdarg)
5378 cfun->machine->first_save_gpr = 2;
5381 /* Return offset between argument pointer and frame pointer
5382 initially after prologue. */
5385 s390_arg_frame_offset ()
5387 HOST_WIDE_INT fsize = get_frame_size ();
5390 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5393 for (i = 24; i < 32; i++)
5394 if (regs_ever_live[i] && !global_regs[i])
5400 fsize = fsize + save_fprs_p * 64;
5402 /* Does function need to setup frame and save area. */
5404 if (! current_function_is_leaf
5406 || current_function_calls_alloca
5407 || current_function_stdarg)
5408 fsize += STARTING_FRAME_OFFSET;
5409 return fsize + STACK_POINTER_OFFSET;
5412 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5413 to register BASE. Return generated insn. */
5416 save_fpr (base, offset, regnum)
5422 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5423 set_mem_alias_set (addr, s390_sr_alias_set);
5425 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5428 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5429 to register BASE. Return generated insn. */
5432 restore_fpr (base, offset, regnum)
5438 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5439 set_mem_alias_set (addr, s390_sr_alias_set);
5441 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5444 /* Generate insn to save registers FIRST to LAST into
5445 the register save area located at offset OFFSET
5446 relative to register BASE. */
5449 save_gprs (base, offset, first, last)
5455 rtx addr, insn, note;
5458 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5459 addr = gen_rtx_MEM (Pmode, addr);
5460 set_mem_alias_set (addr, s390_sr_alias_set);
5462 /* Special-case single register. */
5466 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5468 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5470 RTX_FRAME_RELATED_P (insn) = 1;
5475 insn = gen_store_multiple (addr,
5476 gen_rtx_REG (Pmode, first),
5477 GEN_INT (last - first + 1));
5480 /* We need to set the FRAME_RELATED flag on all SETs
5481 inside the store-multiple pattern.
5483 However, we must not emit DWARF records for registers 2..5
5484 if they are stored for use by variable arguments ...
5486 ??? Unfortunately, it is not enough to simply not the the
5487 FRAME_RELATED flags for those SETs, because the first SET
5488 of the PARALLEL is always treated as if it had the flag
5489 set, even if it does not. Therefore we emit a new pattern
5490 without those registers as REG_FRAME_RELATED_EXPR note. */
5494 rtx pat = PATTERN (insn);
5496 for (i = 0; i < XVECLEN (pat, 0); i++)
5497 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5498 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5500 RTX_FRAME_RELATED_P (insn) = 1;
5504 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5505 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5506 gen_rtx_REG (Pmode, 6),
5507 GEN_INT (last - 6 + 1));
5508 note = PATTERN (note);
5511 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5512 note, REG_NOTES (insn));
5514 for (i = 0; i < XVECLEN (note, 0); i++)
5515 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5516 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5518 RTX_FRAME_RELATED_P (insn) = 1;
5524 /* Generate insn to restore registers FIRST to LAST from
5525 the register save area located at offset OFFSET
5526 relative to register BASE. */
5529 restore_gprs (base, offset, first, last)
5537 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5538 addr = gen_rtx_MEM (Pmode, addr);
5539 set_mem_alias_set (addr, s390_sr_alias_set);
5541 /* Special-case single register. */
5545 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5547 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5552 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5554 GEN_INT (last - first + 1));
5558 /* Expand the prologue into a bunch of separate insns. */
5561 s390_emit_prologue ()
5565 rtx pool_start_label, pool_end_label;
5568 /* Compute frame_info. */
5572 /* Choose best register to use for temp use within prologue. */
5574 if (!current_function_is_leaf
5575 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
5576 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
5577 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5579 temp_reg = gen_rtx_REG (Pmode, 1);
5581 /* Save call saved gprs. */
5583 insn = save_gprs (stack_pointer_rtx, 0,
5584 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5587 /* Dump constant pool and set constant pool register. */
5589 pool_start_label = gen_label_rtx();
5590 pool_end_label = gen_label_rtx();
5591 cfun->machine->literal_pool_label = pool_start_label;
5594 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5595 pool_start_label, pool_end_label));
5597 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5598 pool_start_label, pool_end_label));
5600 /* Save fprs for variable args. */
5602 if (current_function_stdarg)
5604 /* Save fpr 0 and 2. */
5606 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5607 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5611 /* Save fpr 4 and 6. */
5613 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5614 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5618 /* Save fprs 4 and 6 if used (31 bit ABI). */
5622 /* Save fpr 4 and 6. */
5623 if (regs_ever_live[18] && !global_regs[18])
5625 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5626 RTX_FRAME_RELATED_P (insn) = 1;
5628 if (regs_ever_live[19] && !global_regs[19])
5630 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5631 RTX_FRAME_RELATED_P (insn) = 1;
5635 /* Decrement stack pointer. */
5637 if (cfun->machine->frame_size > 0)
5639 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5641 /* Save incoming stack pointer into temp reg. */
5643 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5645 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5648 /* Substract frame size from stack pointer. */
5650 if (DISP_IN_RANGE (INTVAL (frame_off)))
5652 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5653 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5655 insn = emit_insn (insn);
5659 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5660 frame_off = force_const_mem (Pmode, frame_off);
5662 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5665 RTX_FRAME_RELATED_P (insn) = 1;
5667 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5668 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5669 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5670 GEN_INT (-cfun->machine->frame_size))),
5673 /* Set backchain. */
5675 if (TARGET_BACKCHAIN)
5677 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5678 set_mem_alias_set (addr, s390_sr_alias_set);
5679 insn = emit_insn (gen_move_insn (addr, temp_reg));
5682 /* If we support asynchronous exceptions (e.g. for Java),
5683 we need to make sure the backchain pointer is set up
5684 before any possibly trapping memory access. */
5686 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5688 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5689 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5693 /* Save fprs 8 - 15 (64 bit ABI). */
5695 if (cfun->machine->save_fprs_p)
5697 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5699 for (i = 24; i < 32; i++)
5700 if (regs_ever_live[i] && !global_regs[i])
5702 rtx addr = plus_constant (stack_pointer_rtx,
5703 cfun->machine->frame_size - 64 + (i-24)*8);
5705 insn = save_fpr (temp_reg, (i-24)*8, i);
5706 RTX_FRAME_RELATED_P (insn) = 1;
5708 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5709 gen_rtx_SET (VOIDmode,
5710 gen_rtx_MEM (DFmode, addr),
5711 gen_rtx_REG (DFmode, i)),
5716 /* Set frame pointer, if needed. */
5718 if (frame_pointer_needed)
5720 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5721 RTX_FRAME_RELATED_P (insn) = 1;
5724 /* Set up got pointer, if needed. */
5726 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5728 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5729 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5733 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
5736 /* It can happen that the GOT pointer isn't really needed ... */
5737 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5742 got_symbol = gen_rtx_UNSPEC (VOIDmode,
5743 gen_rtvec (1, got_symbol), 100);
5744 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
5745 got_symbol = force_const_mem (Pmode, got_symbol);
5746 insn = emit_move_insn (pic_offset_table_rtx,
5748 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5751 got_symbol = gen_rtx_REG (Pmode, BASE_REGISTER);
5752 got_symbol = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol), 101);
5753 got_symbol = gen_rtx_PLUS (Pmode, got_symbol, pic_offset_table_rtx);
5754 insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5755 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5761 /* Expand the epilogue into a bunch of separate insns. */
5764 s390_emit_epilogue ()
5766 rtx frame_pointer, return_reg;
5767 int area_bottom, area_top, offset = 0;
5770 /* Check whether to use frame or stack pointer for restore. */
5772 frame_pointer = frame_pointer_needed ?
5773 hard_frame_pointer_rtx : stack_pointer_rtx;
5775 /* Compute which parts of the save area we need to access. */
5777 if (cfun->machine->first_restore_gpr != -1)
5779 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5780 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5784 area_bottom = INT_MAX;
5790 if (cfun->machine->save_fprs_p)
5792 if (area_bottom > -64)
5800 if (regs_ever_live[18] && !global_regs[18])
5802 if (area_bottom > STACK_POINTER_OFFSET - 16)
5803 area_bottom = STACK_POINTER_OFFSET - 16;
5804 if (area_top < STACK_POINTER_OFFSET - 8)
5805 area_top = STACK_POINTER_OFFSET - 8;
5807 if (regs_ever_live[19] && !global_regs[19])
5809 if (area_bottom > STACK_POINTER_OFFSET - 8)
5810 area_bottom = STACK_POINTER_OFFSET - 8;
5811 if (area_top < STACK_POINTER_OFFSET)
5812 area_top = STACK_POINTER_OFFSET;
5816 /* Check whether we can access the register save area.
5817 If not, increment the frame pointer as required. */
5819 if (area_top <= area_bottom)
5821 /* Nothing to restore. */
5823 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
5824 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
5826 /* Area is in range. */
5827 offset = cfun->machine->frame_size;
5831 rtx insn, frame_off;
5833 offset = area_bottom < 0 ? -area_bottom : 0;
5834 frame_off = GEN_INT (cfun->machine->frame_size - offset);
5836 if (DISP_IN_RANGE (INTVAL (frame_off)))
5838 insn = gen_rtx_SET (VOIDmode, frame_pointer,
5839 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
5840 insn = emit_insn (insn);
5844 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5845 frame_off = force_const_mem (Pmode, frame_off);
5847 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5851 /* Restore call saved fprs. */
5857 if (cfun->machine->save_fprs_p)
5858 for (i = 24; i < 32; i++)
5859 if (regs_ever_live[i] && !global_regs[i])
5860 restore_fpr (frame_pointer,
5861 offset - 64 + (i-24) * 8, i);
5865 if (regs_ever_live[18] && !global_regs[18])
5866 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5867 if (regs_ever_live[19] && !global_regs[19])
5868 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5871 /* Return register. */
5873 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5875 /* Restore call saved gprs. */
5877 if (cfun->machine->first_restore_gpr != -1)
5882 /* Check for global register and save them
5883 to stack location from where they get restored. */
5885 for (i = cfun->machine->first_restore_gpr;
5886 i <= cfun->machine->last_save_gpr;
5889 /* These registers are special and need to be
5890 restored in any case. */
5891 if (i == STACK_POINTER_REGNUM
5892 || i == RETURN_REGNUM
5893 || i == BASE_REGISTER
5894 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5899 addr = plus_constant (frame_pointer,
5900 offset + i * UNITS_PER_WORD);
5901 addr = gen_rtx_MEM (Pmode, addr);
5902 set_mem_alias_set (addr, s390_sr_alias_set);
5903 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5907 /* Fetch return address from stack before load multiple,
5908 this will do good for scheduling. */
5910 if (!current_function_is_leaf)
5912 int return_regnum = find_unused_clobbered_reg();
5915 return_reg = gen_rtx_REG (Pmode, return_regnum);
5917 addr = plus_constant (frame_pointer,
5918 offset + RETURN_REGNUM * UNITS_PER_WORD);
5919 addr = gen_rtx_MEM (Pmode, addr);
5920 set_mem_alias_set (addr, s390_sr_alias_set);
5921 emit_move_insn (return_reg, addr);
5924 /* ??? As references to the base register are not made
5925 explicit in insn RTX code, we have to add a barrier here
5926 to prevent incorrect scheduling. */
5928 emit_insn (gen_blockage());
5930 insn = restore_gprs (frame_pointer, offset,
5931 cfun->machine->first_restore_gpr,
5932 cfun->machine->last_save_gpr);
5936 /* Return to caller. */
5938 p = rtvec_alloc (2);
5940 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5941 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5942 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5946 /* Return the size in bytes of a function argument of
5947 type TYPE and/or mode MODE. At least one of TYPE or
5948 MODE must be specified. */
5951 s390_function_arg_size (mode, type)
5952 enum machine_mode mode;
5956 return int_size_in_bytes (type);
5958 /* No type info available for some library calls ... */
5959 if (mode != BLKmode)
5960 return GET_MODE_SIZE (mode);
5962 /* If we have neither type nor mode, abort */
5966 /* Return true if a function argument of type TYPE and mode MODE
5967 is to be passed in a floating-point register, if available. */
5970 s390_function_arg_float (mode, type)
5971 enum machine_mode mode;
5974 /* Soft-float changes the ABI: no floating-point registers are used. */
5975 if (TARGET_SOFT_FLOAT)
5978 /* No type info available for some library calls ... */
5980 return mode == SFmode || mode == DFmode;
5982 /* The ABI says that record types with a single member are treated
5983 just like that member would be. */
5984 while (TREE_CODE (type) == RECORD_TYPE)
5986 tree field, single = NULL_TREE;
5988 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5990 if (TREE_CODE (field) != FIELD_DECL)
5993 if (single == NULL_TREE)
5994 single = TREE_TYPE (field);
5999 if (single == NULL_TREE)
6005 return TREE_CODE (type) == REAL_TYPE;
6008 /* Return 1 if a function argument of type TYPE and mode MODE
6009 is to be passed by reference. The ABI specifies that only
6010 structures of size 1, 2, 4, or 8 bytes are passed by value,
6011 all other structures (and complex numbers) are passed by
6015 s390_function_arg_pass_by_reference (mode, type)
6016 enum machine_mode mode;
6019 int size = s390_function_arg_size (mode, type);
6023 if (AGGREGATE_TYPE_P (type) &&
6024 size != 1 && size != 2 && size != 4 && size != 8
6025 && !s390_function_arg_float (mode, type))
6028 if (TREE_CODE (type) == COMPLEX_TYPE)
6035 /* Update the data in CUM to advance over an argument of mode MODE and
6036 data type TYPE. (TYPE is null for libcalls where that information
6037 may not be available.). The boolean NAMED specifies whether the
6038 argument is a named argument (as opposed to an unnamed argument
6039 matching an ellipsis). */
6042 s390_function_arg_advance (cum, mode, type, named)
6043 CUMULATIVE_ARGS *cum;
6044 enum machine_mode mode;
6046 int named ATTRIBUTE_UNUSED;
6048 if (s390_function_arg_pass_by_reference (mode, type))
6052 else if (s390_function_arg_float (mode, type))
6058 int size = s390_function_arg_size (mode, type);
6059 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
6063 /* Define where to put the arguments to a function.
6064 Value is zero to push the argument on the stack,
6065 or a hard register in which to store the argument.
6067 MODE is the argument's machine mode.
6068 TYPE is the data type of the argument (as a tree).
6069 This is null for libcalls where that information may
6071 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6072 the preceding args and about the function being called.
6073 NAMED is nonzero if this argument is a named parameter
6074 (otherwise it is an extra parameter matching an ellipsis).
6076 On S/390, we use general purpose registers 2 through 6 to
6077 pass integer, pointer, and certain structure arguments, and
6078 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6079 to pass floating point arguments. All remaining arguments
6080 are pushed to the stack. */
6083 s390_function_arg (cum, mode, type, named)
6084 CUMULATIVE_ARGS *cum;
6085 enum machine_mode mode;
6087 int named ATTRIBUTE_UNUSED;
6089 if (s390_function_arg_pass_by_reference (mode, type))
6092 if (s390_function_arg_float (mode, type))
6094 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
6097 return gen_rtx (REG, mode, cum->fprs + 16);
6101 int size = s390_function_arg_size (mode, type);
6102 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
6104 if (cum->gprs + n_gprs > 5)
6107 return gen_rtx (REG, mode, cum->gprs + 2);
6112 /* Create and return the va_list datatype.
6114 On S/390, va_list is an array type equivalent to
6116 typedef struct __va_list_tag
6120 void *__overflow_arg_area;
6121 void *__reg_save_area;
6125 where __gpr and __fpr hold the number of general purpose
6126 or floating point arguments used up to now, respectively,
6127 __overflow_arg_area points to the stack location of the
6128 next argument passed on the stack, and __reg_save_area
6129 always points to the start of the register area in the
6130 call frame of the current function. The function prologue
6131 saves all registers used for argument passing into this
6132 area if the function uses variable arguments. */
6135 s390_build_va_list ()
6137 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
6139 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
6142 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6144 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
6145 long_integer_type_node);
6146 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
6147 long_integer_type_node);
6148 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
6150 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
6153 DECL_FIELD_CONTEXT (f_gpr) = record;
6154 DECL_FIELD_CONTEXT (f_fpr) = record;
6155 DECL_FIELD_CONTEXT (f_ovf) = record;
6156 DECL_FIELD_CONTEXT (f_sav) = record;
6158 TREE_CHAIN (record) = type_decl;
6159 TYPE_NAME (record) = type_decl;
6160 TYPE_FIELDS (record) = f_gpr;
6161 TREE_CHAIN (f_gpr) = f_fpr;
6162 TREE_CHAIN (f_fpr) = f_ovf;
6163 TREE_CHAIN (f_ovf) = f_sav;
6165 layout_type (record);
6167 /* The correct type is an array type of one element. */
6168 return build_array_type (record, build_index_type (size_zero_node));
6171 /* Implement va_start by filling the va_list structure VALIST.
6172 STDARG_P is always true, and ignored.
6173 NEXTARG points to the first anonymous stack argument.
6175 The following global variables are used to initialize
6176 the va_list structure:
6178 current_function_args_info:
6179 holds number of gprs and fprs used for named arguments.
6180 current_function_arg_offset_rtx:
6181 holds the offset of the first anonymous stack argument
6182 (relative to the virtual arg pointer). */
6185 s390_va_start (valist, nextarg)
6187 rtx nextarg ATTRIBUTE_UNUSED;
6189 HOST_WIDE_INT n_gpr, n_fpr;
6191 tree f_gpr, f_fpr, f_ovf, f_sav;
6192 tree gpr, fpr, ovf, sav, t;
6194 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6195 f_fpr = TREE_CHAIN (f_gpr);
6196 f_ovf = TREE_CHAIN (f_fpr);
6197 f_sav = TREE_CHAIN (f_ovf);
6199 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6200 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6201 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6202 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6203 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6205 /* Count number of gp and fp argument registers used. */
6207 n_gpr = current_function_args_info.gprs;
6208 n_fpr = current_function_args_info.fprs;
6210 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
6211 TREE_SIDE_EFFECTS (t) = 1;
6212 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6214 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
6215 TREE_SIDE_EFFECTS (t) = 1;
6216 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6218 /* Find the overflow area. */
6219 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6221 off = INTVAL (current_function_arg_offset_rtx);
6222 off = off < 0 ? 0 : off;
6223 if (TARGET_DEBUG_ARG)
6224 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6225 (int)n_gpr, (int)n_fpr, off);
6227 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
6229 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6230 TREE_SIDE_EFFECTS (t) = 1;
6231 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6233 /* Find the register save area. */
6234 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
6235 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
6236 build_int_2 (-STACK_POINTER_OFFSET, -1));
6237 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6238 TREE_SIDE_EFFECTS (t) = 1;
6239 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6242 /* Implement va_arg by updating the va_list structure
6243 VALIST as required to retrieve an argument of type
6244 TYPE, and returning that argument.
6246 Generates code equivalent to:
6248 if (integral value) {
6249 if (size <= 4 && args.gpr < 5 ||
6250 size > 4 && args.gpr < 4 )
6251 ret = args.reg_save_area[args.gpr+8]
6253 ret = *args.overflow_arg_area++;
6254 } else if (float value) {
6256 ret = args.reg_save_area[args.fpr+64]
6258 ret = *args.overflow_arg_area++;
6259 } else if (aggregate value) {
6261 ret = *args.reg_save_area[args.gpr]
6263 ret = **args.overflow_arg_area++;
6267 s390_va_arg (valist, type)
6271 tree f_gpr, f_fpr, f_ovf, f_sav;
6272 tree gpr, fpr, ovf, sav, reg, t, u;
6273 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6274 rtx lab_false, lab_over, addr_rtx, r;
6276 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6277 f_fpr = TREE_CHAIN (f_gpr);
6278 f_ovf = TREE_CHAIN (f_fpr);
6279 f_sav = TREE_CHAIN (f_ovf);
6281 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6282 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6283 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6284 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6285 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6287 size = int_size_in_bytes (type);
6289 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
6291 if (TARGET_DEBUG_ARG)
6293 fprintf (stderr, "va_arg: aggregate type");
6297 /* Aggregates are passed by reference. */
6301 sav_ofs = 2 * UNITS_PER_WORD;
6302 sav_scale = UNITS_PER_WORD;
6303 size = UNITS_PER_WORD;
6306 else if (s390_function_arg_float (TYPE_MODE (type), type))
6308 if (TARGET_DEBUG_ARG)
6310 fprintf (stderr, "va_arg: float type");
6314 /* FP args go in FP registers, if present. */
6318 sav_ofs = 16 * UNITS_PER_WORD;
6320 /* TARGET_64BIT has up to 4 parameter in fprs */
6321 max_reg = TARGET_64BIT ? 3 : 1;
6325 if (TARGET_DEBUG_ARG)
6327 fprintf (stderr, "va_arg: other type");
6331 /* Otherwise into GP registers. */
6334 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6335 sav_ofs = 2 * UNITS_PER_WORD;
6336 if (size < UNITS_PER_WORD)
6337 sav_ofs += UNITS_PER_WORD - size;
6339 sav_scale = UNITS_PER_WORD;
6346 /* Pull the value out of the saved registers ... */
6348 lab_false = gen_label_rtx ();
6349 lab_over = gen_label_rtx ();
6350 addr_rtx = gen_reg_rtx (Pmode);
6352 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6354 GT, const1_rtx, Pmode, 0, lab_false);
6357 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6361 u = build (MULT_EXPR, long_integer_type_node,
6362 reg, build_int_2 (sav_scale, 0));
6363 TREE_SIDE_EFFECTS (u) = 1;
6365 t = build (PLUS_EXPR, ptr_type_node, t, u);
6366 TREE_SIDE_EFFECTS (t) = 1;
6368 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6370 emit_move_insn (addr_rtx, r);
6373 emit_jump_insn (gen_jump (lab_over));
6375 emit_label (lab_false);
6377 /* ... Otherwise out of the overflow area. */
6379 t = save_expr (ovf);
6382 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6383 if (size < UNITS_PER_WORD)
6385 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6386 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6387 TREE_SIDE_EFFECTS (t) = 1;
6388 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6390 t = save_expr (ovf);
6393 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6395 emit_move_insn (addr_rtx, r);
6397 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6398 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6399 TREE_SIDE_EFFECTS (t) = 1;
6400 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6402 emit_label (lab_over);
6404 /* If less than max_regs a registers are retrieved out
6405 of register save area, increment. */
6407 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6408 build_int_2 (n_reg, 0));
6409 TREE_SIDE_EFFECTS (u) = 1;
6410 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6414 r = gen_rtx_MEM (Pmode, addr_rtx);
6415 set_mem_alias_set (r, get_varargs_alias_set ());
6416 emit_move_insn (addr_rtx, r);
6428 S390_BUILTIN_THREAD_POINTER,
6429 S390_BUILTIN_SET_THREAD_POINTER,
6434 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6439 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6445 s390_init_builtins ()
6449 ftype = build_function_type (ptr_type_node, void_list_node);
6450 builtin_function ("__builtin_thread_pointer", ftype,
6451 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6454 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6455 builtin_function ("__builtin_set_thread_pointer", ftype,
6456 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6460 /* Expand an expression EXP that calls a built-in function,
6461 with result going to TARGET if that's convenient
6462 (and in mode MODE if that's convenient).
6463 SUBTARGET may be used as the target for computing one of EXP's operands.
6464 IGNORE is nonzero if the value is to be ignored. */
6467 s390_expand_builtin (exp, target, subtarget, mode, ignore)
6470 rtx subtarget ATTRIBUTE_UNUSED;
6471 enum machine_mode mode ATTRIBUTE_UNUSED;
6472 int ignore ATTRIBUTE_UNUSED;
6476 unsigned int const *code_for_builtin =
6477 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6479 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6480 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6481 tree arglist = TREE_OPERAND (exp, 1);
6482 enum insn_code icode;
6483 rtx op[MAX_ARGS], pat;
6487 if (fcode >= S390_BUILTIN_max)
6488 internal_error ("bad builtin fcode");
6489 icode = code_for_builtin[fcode];
6491 internal_error ("bad builtin fcode");
6493 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6495 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6497 arglist = TREE_CHAIN (arglist), arity++)
6499 const struct insn_operand_data *insn_op;
6501 tree arg = TREE_VALUE (arglist);
6502 if (arg == error_mark_node)
6504 if (arity > MAX_ARGS)
6507 insn_op = &insn_data[icode].operand[arity + nonvoid];
6509 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6511 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6512 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6517 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6519 || GET_MODE (target) != tmode
6520 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6521 target = gen_reg_rtx (tmode);
6527 pat = GEN_FCN (icode) (target);
6531 pat = GEN_FCN (icode) (target, op[0]);
6533 pat = GEN_FCN (icode) (op[0]);
6536 pat = GEN_FCN (icode) (target, op[0], op[1]);
6552 /* Output assembly code for the trampoline template to
6555 On S/390, we use gpr 1 internally in the trampoline code;
6556 gpr 0 is used to hold the static chain. */
6559 s390_trampoline_template (file)
6564 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6565 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6566 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6567 fprintf (file, "br\t%s\n", reg_names[1]);
6568 fprintf (file, "0:\t.quad\t0\n");
6569 fprintf (file, ".quad\t0\n");
6573 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6574 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6575 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6576 fprintf (file, "br\t%s\n", reg_names[1]);
6577 fprintf (file, ".long\t0\n");
6578 fprintf (file, ".long\t0\n");
6582 /* Emit RTL insns to initialize the variable parts of a trampoline.
6583 FNADDR is an RTX for the address of the function's pure code.
6584 CXT is an RTX for the static chain value for the function. */
6587 s390_initialize_trampoline (addr, fnaddr, cxt)
6592 emit_move_insn (gen_rtx
6594 memory_address (Pmode,
6595 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6596 emit_move_insn (gen_rtx
6598 memory_address (Pmode,
6599 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6602 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6603 LOW and HIGH, independent of the host word size. */
6606 s390_gen_rtx_const_DI (high, low)
6610 #if HOST_BITS_PER_WIDE_INT >= 64
6612 val = (HOST_WIDE_INT)high;
6614 val |= (HOST_WIDE_INT)low;
6616 return GEN_INT (val);
6618 #if HOST_BITS_PER_WIDE_INT >= 32
6619 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6626 /* Output assembler code to FILE to increment profiler label # LABELNO
6627 for profiling a function entry. */
6630 s390_function_profiler (file, labelno)
6637 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6639 fprintf (file, "# function profiler \n");
6641 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6642 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6643 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6645 op[2] = gen_rtx_REG (Pmode, 1);
6646 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6647 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
6649 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6652 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
6653 op[4] = gen_rtx_CONST (Pmode, op[4]);
6658 output_asm_insn ("stg\t%0,%1", op);
6659 output_asm_insn ("larl\t%2,%3", op);
6660 output_asm_insn ("brasl\t%0,%4", op);
6661 output_asm_insn ("lg\t%0,%1", op);
6665 op[6] = gen_label_rtx ();
6667 output_asm_insn ("st\t%0,%1", op);
6668 output_asm_insn ("bras\t%2,%l6", op);
6669 output_asm_insn (".long\t%4", op);
6670 output_asm_insn (".long\t%3", op);
6671 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6672 output_asm_insn ("l\t%0,0(%2)", op);
6673 output_asm_insn ("l\t%2,4(%2)", op);
6674 output_asm_insn ("basr\t%0,%0", op);
6675 output_asm_insn ("l\t%0,%1", op);
6679 op[5] = gen_label_rtx ();
6680 op[6] = gen_label_rtx ();
6682 output_asm_insn ("st\t%0,%1", op);
6683 output_asm_insn ("bras\t%2,%l6", op);
6684 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[5]));
6685 output_asm_insn (".long\t%4-%l5", op);
6686 output_asm_insn (".long\t%3-%l5", op);
6687 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6688 output_asm_insn ("lr\t%0,%2", op);
6689 output_asm_insn ("a\t%0,0(%2)", op);
6690 output_asm_insn ("a\t%2,4(%2)", op);
6691 output_asm_insn ("basr\t%0,%0", op);
6692 output_asm_insn ("l\t%0,%1", op);
6696 /* Select section for constant in constant pool. In 32-bit mode,
6697 constants go in the function section; in 64-bit mode in .rodata. */
6700 s390_select_rtx_section (mode, x, align)
6701 enum machine_mode mode ATTRIBUTE_UNUSED;
6702 rtx x ATTRIBUTE_UNUSED;
6703 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
6706 readonly_data_section ();
6708 function_section (current_function_decl);
6711 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6712 into its SYMBOL_REF_FLAGS. */
6715 s390_encode_section_info (decl, rtl, first)
6720 default_encode_section_info (decl, rtl, first);
6722 /* If a variable has a forced alignment to < 2 bytes, mark it with
6723 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6724 if (TREE_CODE (decl) == VAR_DECL
6725 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
6726 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
6729 /* Output thunk to FILE that implements a C++ virtual function call (with
6730 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6731 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6732 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6733 relative to the resulting this pointer. */
6736 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
6738 tree thunk ATTRIBUTE_UNUSED;
6739 HOST_WIDE_INT delta;
6740 HOST_WIDE_INT vcall_offset;
6746 /* Operand 0 is the target function. */
6747 op[0] = XEXP (DECL_RTL (function), 0);
6748 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
6751 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
6752 TARGET_64BIT ? 113 : flag_pic == 2 ? 112 : 110);
6753 op[0] = gen_rtx_CONST (Pmode, op[0]);
6756 /* Operand 1 is the 'this' pointer. */
6757 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6758 op[1] = gen_rtx_REG (Pmode, 3);
6760 op[1] = gen_rtx_REG (Pmode, 2);
6762 /* Operand 2 is the delta. */
6763 op[2] = GEN_INT (delta);
6765 /* Operand 3 is the vcall_offset. */
6766 op[3] = GEN_INT (vcall_offset);
6768 /* Operand 4 is the temporary register. */
6769 op[4] = gen_rtx_REG (Pmode, 1);
6771 /* Operands 5 to 8 can be used as labels. */
6777 /* Operand 9 can be used for temporary register. */
6780 /* Generate code. */
6783 /* Setup literal pool pointer if required. */
6784 if ((!DISP_IN_RANGE (delta)
6785 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6786 || (!DISP_IN_RANGE (vcall_offset)
6787 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6789 op[5] = gen_label_rtx ();
6790 output_asm_insn ("larl\t%4,%5", op);
6793 /* Add DELTA to this pointer. */
6796 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6797 output_asm_insn ("la\t%1,%2(%1)", op);
6798 else if (DISP_IN_RANGE (delta))
6799 output_asm_insn ("lay\t%1,%2(%1)", op);
6800 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6801 output_asm_insn ("aghi\t%1,%2", op);
6804 op[6] = gen_label_rtx ();
6805 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6809 /* Perform vcall adjustment. */
6812 if (DISP_IN_RANGE (vcall_offset))
6814 output_asm_insn ("lg\t%4,0(%1)", op);
6815 output_asm_insn ("ag\t%1,%3(%4)", op);
6817 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6819 output_asm_insn ("lghi\t%4,%3", op);
6820 output_asm_insn ("ag\t%4,0(%1)", op);
6821 output_asm_insn ("ag\t%1,0(%4)", op);
6825 op[7] = gen_label_rtx ();
6826 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6827 output_asm_insn ("ag\t%4,0(%1)", op);
6828 output_asm_insn ("ag\t%1,0(%4)", op);
6832 /* Jump to target. */
6833 output_asm_insn ("jg\t%0", op);
6835 /* Output literal pool if required. */
6838 output_asm_insn (".align\t4", op);
6839 (*targetm.asm_out.internal_label) (file, "L",
6840 CODE_LABEL_NUMBER (op[5]));
6844 (*targetm.asm_out.internal_label) (file, "L",
6845 CODE_LABEL_NUMBER (op[6]));
6846 output_asm_insn (".long\t%2", op);
6850 (*targetm.asm_out.internal_label) (file, "L",
6851 CODE_LABEL_NUMBER (op[7]));
6852 output_asm_insn (".long\t%3", op);
6857 /* Setup base pointer if required. */
6859 || (!DISP_IN_RANGE (delta)
6860 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6861 || (!DISP_IN_RANGE (delta)
6862 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6864 op[5] = gen_label_rtx ();
6865 output_asm_insn ("basr\t%4,0", op);
6866 (*targetm.asm_out.internal_label) (file, "L",
6867 CODE_LABEL_NUMBER (op[5]));
6870 /* Add DELTA to this pointer. */
6873 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6874 output_asm_insn ("la\t%1,%2(%1)", op);
6875 else if (DISP_IN_RANGE (delta))
6876 output_asm_insn ("lay\t%1,%2(%1)", op);
6877 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6878 output_asm_insn ("ahi\t%1,%2", op);
6881 op[6] = gen_label_rtx ();
6882 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6886 /* Perform vcall adjustment. */
6889 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6891 output_asm_insn ("lg\t%4,0(%1)", op);
6892 output_asm_insn ("a\t%1,%3(%4)", op);
6894 else if (DISP_IN_RANGE (vcall_offset))
6896 output_asm_insn ("lg\t%4,0(%1)", op);
6897 output_asm_insn ("ay\t%1,%3(%4)", op);
6899 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6901 output_asm_insn ("lhi\t%4,%3", op);
6902 output_asm_insn ("a\t%4,0(%1)", op);
6903 output_asm_insn ("a\t%1,0(%4)", op);
6907 op[7] = gen_label_rtx ();
6908 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6909 output_asm_insn ("a\t%4,0(%1)", op);
6910 output_asm_insn ("a\t%1,0(%4)", op);
6913 /* We had to clobber the base pointer register.
6914 Re-setup the base pointer (with a different base). */
6915 op[5] = gen_label_rtx ();
6916 output_asm_insn ("basr\t%4,0", op);
6917 (*targetm.asm_out.internal_label) (file, "L",
6918 CODE_LABEL_NUMBER (op[5]));
6921 /* Jump to target. */
6922 op[8] = gen_label_rtx ();
6925 output_asm_insn ("l\t%4,%8-%5(%4)", op);
6927 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6928 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6929 else if (flag_pic == 1)
6931 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6932 output_asm_insn ("l\t%4,%0(%4)", op);
6934 else if (flag_pic == 2)
6936 op[9] = gen_rtx_REG (Pmode, 0);
6937 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6938 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6939 output_asm_insn ("ar\t%4,%9", op);
6940 output_asm_insn ("l\t%4,0(%4)", op);
6943 output_asm_insn ("br\t%4", op);
6945 /* Output literal pool. */
6946 output_asm_insn (".align\t4", op);
6948 if (nonlocal && flag_pic == 2)
6949 output_asm_insn (".long\t%0", op);
6952 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6953 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
6956 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[8]));
6958 output_asm_insn (".long\t%0", op);
6960 output_asm_insn (".long\t%0-%5", op);
6964 (*targetm.asm_out.internal_label) (file, "L",
6965 CODE_LABEL_NUMBER (op[6]));
6966 output_asm_insn (".long\t%2", op);
6970 (*targetm.asm_out.internal_label) (file, "L",
6971 CODE_LABEL_NUMBER (op[7]));
6972 output_asm_insn (".long\t%3", op);
6977 /* How to allocate a 'struct machine_function'. */
6979 static struct machine_function *
6980 s390_init_machine_status ()
6982 return ggc_alloc_cleared (sizeof (struct machine_function));
6985 #include "gt-s390.h"