1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
58 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
59 unsigned HOST_WIDE_INT));
60 static void s390_encode_section_info PARAMS ((tree, rtx, int));
61 static bool s390_cannot_force_const_mem PARAMS ((rtx));
62 static rtx s390_delegitimize_address PARAMS ((rtx));
63 static void s390_init_builtins PARAMS ((void));
64 static rtx s390_expand_builtin PARAMS ((tree, rtx, rtx,
65 enum machine_mode, int));
66 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
67 HOST_WIDE_INT, tree));
68 static enum attr_type s390_safe_attr_type PARAMS ((rtx));
70 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
71 static int s390_adjust_priority PARAMS ((rtx, int));
72 static int s390_issue_rate PARAMS ((void));
73 static int s390_use_dfa_pipeline_interface PARAMS ((void));
74 static int s390_first_cycle_multipass_dfa_lookahead PARAMS ((void));
75 static int s390_sched_reorder2 PARAMS ((FILE *, int, rtx *, int *, int));
76 static bool s390_rtx_costs PARAMS ((rtx, int, int, int *));
77 static int s390_address_cost PARAMS ((rtx));
78 static void s390_reorg PARAMS ((void));
79 static bool s390_valid_pointer_mode PARAMS ((enum machine_mode));
81 #undef TARGET_ASM_ALIGNED_HI_OP
82 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
83 #undef TARGET_ASM_ALIGNED_DI_OP
84 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
85 #undef TARGET_ASM_INTEGER
86 #define TARGET_ASM_INTEGER s390_assemble_integer
88 #undef TARGET_ASM_OPEN_PAREN
89 #define TARGET_ASM_OPEN_PAREN ""
91 #undef TARGET_ASM_CLOSE_PAREN
92 #define TARGET_ASM_CLOSE_PAREN ""
94 #undef TARGET_ASM_SELECT_RTX_SECTION
95 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
97 #undef TARGET_ENCODE_SECTION_INFO
98 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
101 #undef TARGET_HAVE_TLS
102 #define TARGET_HAVE_TLS true
104 #undef TARGET_CANNOT_FORCE_CONST_MEM
105 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
107 #undef TARGET_DELEGITIMIZE_ADDRESS
108 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
110 #undef TARGET_INIT_BUILTINS
111 #define TARGET_INIT_BUILTINS s390_init_builtins
112 #undef TARGET_EXPAND_BUILTIN
113 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
115 #undef TARGET_ASM_OUTPUT_MI_THUNK
116 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
117 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
118 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
120 #undef TARGET_SCHED_ADJUST_COST
121 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
122 #undef TARGET_SCHED_ADJUST_PRIORITY
123 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
124 #undef TARGET_SCHED_ISSUE_RATE
125 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
126 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
127 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
128 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
129 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
130 #undef TARGET_SCHED_REORDER2
131 #define TARGET_SCHED_REORDER2 s390_sched_reorder2
133 #undef TARGET_RTX_COSTS
134 #define TARGET_RTX_COSTS s390_rtx_costs
135 #undef TARGET_ADDRESS_COST
136 #define TARGET_ADDRESS_COST s390_address_cost
138 #undef TARGET_MACHINE_DEPENDENT_REORG
139 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141 #undef TARGET_VALID_POINTER_MODE
142 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
144 struct gcc_target targetm = TARGET_INITIALIZER;
146 extern int reload_completed;
148 /* The alias set for prologue/epilogue register save/restore. */
149 static int s390_sr_alias_set = 0;
151 /* Save information from a "cmpxx" operation until the branch or scc is
153 rtx s390_compare_op0, s390_compare_op1;
155 /* Structure used to hold the components of a S/390 memory
156 address. A legitimate address on S/390 is of the general
158 base + index + displacement
159 where any of the components is optional.
161 base and index are registers of the class ADDR_REGS,
162 displacement is an unsigned 12-bit immediate constant. */
172 /* Which cpu are we tuning for. */
173 enum processor_type s390_tune;
174 enum processor_flags s390_tune_flags;
175 /* Which instruction set architecture to use. */
176 enum processor_type s390_arch;
177 enum processor_flags s390_arch_flags;
179 /* Strings to hold which cpu and instruction set architecture to use. */
180 const char *s390_tune_string; /* for -mtune=<xxx> */
181 const char *s390_arch_string; /* for -march=<xxx> */
183 /* Define the structure for the machine field in struct function. */
185 struct machine_function GTY(())
187 /* Label of start of initial literal pool. */
188 rtx literal_pool_label;
190 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
193 /* Number of first and last gpr to be saved, restored. */
195 int first_restore_gpr;
198 /* Size of stack frame. */
199 HOST_WIDE_INT frame_size;
201 /* Some local-dynamic TLS symbol name. */
202 const char *some_ld_name;
205 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
206 static int s390_branch_condition_mask PARAMS ((rtx));
207 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
208 static int check_mode PARAMS ((rtx, enum machine_mode *));
209 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
210 static int s390_short_displacement PARAMS ((rtx));
211 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
212 static rtx get_thread_pointer PARAMS ((void));
213 static rtx legitimize_tls_address PARAMS ((rtx, rtx));
214 static const char *get_some_local_dynamic_name PARAMS ((void));
215 static int get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
216 static int reg_used_in_mem_p PARAMS ((int, rtx));
217 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
218 static int s390_split_branches PARAMS ((rtx, bool *));
219 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
220 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
221 static rtx find_ltrel_base PARAMS ((rtx));
222 static void replace_ltrel_base PARAMS ((rtx *, rtx));
223 static void s390_optimize_prolog PARAMS ((int));
224 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
225 static int find_unused_clobbered_reg PARAMS ((void));
226 static void s390_frame_info PARAMS ((void));
227 static rtx save_fpr PARAMS ((rtx, int, int));
228 static rtx restore_fpr PARAMS ((rtx, int, int));
229 static rtx save_gprs PARAMS ((rtx, int, int, int));
230 static rtx restore_gprs PARAMS ((rtx, int, int, int));
231 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
232 static bool s390_function_arg_float PARAMS ((enum machine_mode, tree));
233 static struct machine_function * s390_init_machine_status PARAMS ((void));
235 /* Check whether integer displacement is in range. */
236 #define DISP_IN_RANGE(d) \
237 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
238 : ((d) >= 0 && (d) <= 4095))
240 /* Return true if SET either doesn't set the CC register, or else
241 the source and destination have matching CC modes and that
242 CC mode is at least as constrained as REQ_MODE. */
245 s390_match_ccmode_set (set, req_mode)
247 enum machine_mode req_mode;
249 enum machine_mode set_mode;
251 if (GET_CODE (set) != SET)
254 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
257 set_mode = GET_MODE (SET_DEST (set));
270 if (req_mode != set_mode)
275 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
276 && req_mode != CCSRmode && req_mode != CCURmode)
282 if (req_mode != CCAmode)
290 return (GET_MODE (SET_SRC (set)) == set_mode);
293 /* Return true if every SET in INSN that sets the CC register
294 has source and destination with matching CC modes and that
295 CC mode is at least as constrained as REQ_MODE.
296 If REQ_MODE is VOIDmode, always return false. */
299 s390_match_ccmode (insn, req_mode)
301 enum machine_mode req_mode;
305 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
306 if (req_mode == VOIDmode)
309 if (GET_CODE (PATTERN (insn)) == SET)
310 return s390_match_ccmode_set (PATTERN (insn), req_mode);
312 if (GET_CODE (PATTERN (insn)) == PARALLEL)
313 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
315 rtx set = XVECEXP (PATTERN (insn), 0, i);
316 if (GET_CODE (set) == SET)
317 if (!s390_match_ccmode_set (set, req_mode))
324 /* If a test-under-mask instruction can be used to implement
325 (compare (and ... OP1) OP2), return the CC mode required
326 to do that. Otherwise, return VOIDmode.
327 MIXED is true if the instruction can distinguish between
328 CC1 and CC2 for mixed selected bits (TMxx), it is false
329 if the instruction cannot (TM). */
332 s390_tm_ccmode (op1, op2, mixed)
339 /* ??? Fixme: should work on CONST_DOUBLE as well. */
340 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
343 /* Selected bits all zero: CC0. */
344 if (INTVAL (op2) == 0)
347 /* Selected bits all one: CC3. */
348 if (INTVAL (op2) == INTVAL (op1))
351 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
354 bit1 = exact_log2 (INTVAL (op2));
355 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
356 if (bit0 != -1 && bit1 != -1)
357 return bit0 > bit1 ? CCT1mode : CCT2mode;
363 /* Given a comparison code OP (EQ, NE, etc.) and the operands
364 OP0 and OP1 of a COMPARE, return the mode to be used for the
368 s390_select_ccmode (code, op0, op1)
377 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
378 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
380 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
381 || GET_CODE (op1) == NEG)
382 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
385 if (GET_CODE (op0) == AND)
387 /* Check whether we can potentially do it via TM. */
388 enum machine_mode ccmode;
389 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
390 if (ccmode != VOIDmode)
392 /* Relax CCTmode to CCZmode to allow fall-back to AND
393 if that turns out to be beneficial. */
394 return ccmode == CCTmode ? CCZmode : ccmode;
398 if (register_operand (op0, HImode)
399 && GET_CODE (op1) == CONST_INT
400 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
402 if (register_operand (op0, QImode)
403 && GET_CODE (op1) == CONST_INT
404 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
413 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
414 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
416 if (INTVAL (XEXP((op0), 1)) < 0)
429 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
430 && GET_CODE (op1) != CONST_INT)
436 if (GET_CODE (op0) == PLUS
437 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
440 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
441 && GET_CODE (op1) != CONST_INT)
447 if (GET_CODE (op0) == MINUS
448 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
451 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
452 && GET_CODE (op1) != CONST_INT)
461 /* Return branch condition mask to implement a branch
462 specified by CODE. */
465 s390_branch_condition_mask (code)
468 const int CC0 = 1 << 3;
469 const int CC1 = 1 << 2;
470 const int CC2 = 1 << 1;
471 const int CC3 = 1 << 0;
473 if (GET_CODE (XEXP (code, 0)) != REG
474 || REGNO (XEXP (code, 0)) != CC_REGNUM
475 || XEXP (code, 1) != const0_rtx)
478 switch (GET_MODE (XEXP (code, 0)))
481 switch (GET_CODE (code))
484 case NE: return CC1 | CC2 | CC3;
491 switch (GET_CODE (code))
494 case NE: return CC0 | CC2 | CC3;
501 switch (GET_CODE (code))
504 case NE: return CC0 | CC1 | CC3;
511 switch (GET_CODE (code))
514 case NE: return CC0 | CC1 | CC2;
521 switch (GET_CODE (code))
523 case EQ: return CC0 | CC2;
524 case NE: return CC1 | CC3;
531 switch (GET_CODE (code))
533 case LTU: return CC2 | CC3; /* carry */
534 case GEU: return CC0 | CC1; /* no carry */
541 switch (GET_CODE (code))
543 case GTU: return CC0 | CC1; /* borrow */
544 case LEU: return CC2 | CC3; /* no borrow */
551 switch (GET_CODE (code))
554 case NE: return CC1 | CC2 | CC3;
555 case LTU: return CC1;
556 case GTU: return CC2;
557 case LEU: return CC0 | CC1;
558 case GEU: return CC0 | CC2;
565 switch (GET_CODE (code))
568 case NE: return CC2 | CC1 | CC3;
569 case LTU: return CC2;
570 case GTU: return CC1;
571 case LEU: return CC0 | CC2;
572 case GEU: return CC0 | CC1;
579 switch (GET_CODE (code))
582 case NE: return CC1 | CC2 | CC3;
583 case LT: return CC1 | CC3;
585 case LE: return CC0 | CC1 | CC3;
586 case GE: return CC0 | CC2;
593 switch (GET_CODE (code))
596 case NE: return CC1 | CC2 | CC3;
598 case GT: return CC2 | CC3;
599 case LE: return CC0 | CC1;
600 case GE: return CC0 | CC2 | CC3;
607 switch (GET_CODE (code))
610 case NE: return CC1 | CC2 | CC3;
613 case LE: return CC0 | CC1;
614 case GE: return CC0 | CC2;
615 case UNORDERED: return CC3;
616 case ORDERED: return CC0 | CC1 | CC2;
617 case UNEQ: return CC0 | CC3;
618 case UNLT: return CC1 | CC3;
619 case UNGT: return CC2 | CC3;
620 case UNLE: return CC0 | CC1 | CC3;
621 case UNGE: return CC0 | CC2 | CC3;
622 case LTGT: return CC1 | CC2;
629 switch (GET_CODE (code))
632 case NE: return CC2 | CC1 | CC3;
635 case LE: return CC0 | CC2;
636 case GE: return CC0 | CC1;
637 case UNORDERED: return CC3;
638 case ORDERED: return CC0 | CC2 | CC1;
639 case UNEQ: return CC0 | CC3;
640 case UNLT: return CC2 | CC3;
641 case UNGT: return CC1 | CC3;
642 case UNLE: return CC0 | CC2 | CC3;
643 case UNGE: return CC0 | CC1 | CC3;
644 case LTGT: return CC2 | CC1;
655 /* If INV is false, return assembler mnemonic string to implement
656 a branch specified by CODE. If INV is true, return mnemonic
657 for the corresponding inverted branch. */
660 s390_branch_condition_mnemonic (code, inv)
664 static const char *const mnemonic[16] =
666 NULL, "o", "h", "nle",
667 "l", "nhe", "lh", "ne",
668 "e", "nlh", "he", "nl",
669 "le", "nh", "no", NULL
672 int mask = s390_branch_condition_mask (code);
677 if (mask < 1 || mask > 14)
680 return mnemonic[mask];
683 /* If OP is an integer constant of mode MODE with exactly one
684 HImode subpart unequal to DEF, return the number of that
685 subpart. As a special case, all HImode subparts of OP are
686 equal to DEF, return zero. Otherwise, return -1. */
689 s390_single_hi (op, mode, def)
691 enum machine_mode mode;
694 if (GET_CODE (op) == CONST_INT)
696 unsigned HOST_WIDE_INT value = 0;
697 int n_parts = GET_MODE_SIZE (mode) / 2;
700 for (i = 0; i < n_parts; i++)
703 value = (unsigned HOST_WIDE_INT) INTVAL (op);
707 if ((value & 0xffff) != (unsigned)(def & 0xffff))
716 return part == -1 ? 0 : (n_parts - 1 - part);
719 else if (GET_CODE (op) == CONST_DOUBLE
720 && GET_MODE (op) == VOIDmode)
722 unsigned HOST_WIDE_INT value = 0;
723 int n_parts = GET_MODE_SIZE (mode) / 2;
726 for (i = 0; i < n_parts; i++)
729 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
730 else if (i == HOST_BITS_PER_WIDE_INT / 16)
731 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
735 if ((value & 0xffff) != (unsigned)(def & 0xffff))
744 return part == -1 ? 0 : (n_parts - 1 - part);
750 /* Extract the HImode part number PART from integer
751 constant OP of mode MODE. */
754 s390_extract_hi (op, mode, part)
756 enum machine_mode mode;
759 int n_parts = GET_MODE_SIZE (mode) / 2;
760 if (part < 0 || part >= n_parts)
763 part = n_parts - 1 - part;
765 if (GET_CODE (op) == CONST_INT)
767 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
768 return ((value >> (16 * part)) & 0xffff);
770 else if (GET_CODE (op) == CONST_DOUBLE
771 && GET_MODE (op) == VOIDmode)
773 unsigned HOST_WIDE_INT value;
774 if (part < HOST_BITS_PER_WIDE_INT / 16)
775 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
777 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
778 part -= HOST_BITS_PER_WIDE_INT / 16;
780 return ((value >> (16 * part)) & 0xffff);
786 /* If OP is an integer constant of mode MODE with exactly one
787 QImode subpart unequal to DEF, return the number of that
788 subpart. As a special case, all QImode subparts of OP are
789 equal to DEF, return zero. Otherwise, return -1. */
792 s390_single_qi (op, mode, def)
794 enum machine_mode mode;
797 if (GET_CODE (op) == CONST_INT)
799 unsigned HOST_WIDE_INT value = 0;
800 int n_parts = GET_MODE_SIZE (mode);
803 for (i = 0; i < n_parts; i++)
806 value = (unsigned HOST_WIDE_INT) INTVAL (op);
810 if ((value & 0xff) != (unsigned)(def & 0xff))
819 return part == -1 ? 0 : (n_parts - 1 - part);
822 else if (GET_CODE (op) == CONST_DOUBLE
823 && GET_MODE (op) == VOIDmode)
825 unsigned HOST_WIDE_INT value = 0;
826 int n_parts = GET_MODE_SIZE (mode);
829 for (i = 0; i < n_parts; i++)
832 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
833 else if (i == HOST_BITS_PER_WIDE_INT / 8)
834 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
838 if ((value & 0xff) != (unsigned)(def & 0xff))
847 return part == -1 ? 0 : (n_parts - 1 - part);
853 /* Extract the QImode part number PART from integer
854 constant OP of mode MODE. */
857 s390_extract_qi (op, mode, part)
859 enum machine_mode mode;
862 int n_parts = GET_MODE_SIZE (mode);
863 if (part < 0 || part >= n_parts)
866 part = n_parts - 1 - part;
868 if (GET_CODE (op) == CONST_INT)
870 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
871 return ((value >> (8 * part)) & 0xff);
873 else if (GET_CODE (op) == CONST_DOUBLE
874 && GET_MODE (op) == VOIDmode)
876 unsigned HOST_WIDE_INT value;
877 if (part < HOST_BITS_PER_WIDE_INT / 8)
878 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
880 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
881 part -= HOST_BITS_PER_WIDE_INT / 8;
883 return ((value >> (8 * part)) & 0xff);
889 /* Check whether we can (and want to) split a double-word
890 move in mode MODE from SRC to DST into two single-word
891 moves, moving the subword FIRST_SUBWORD first. */
894 s390_split_ok_p (dst, src, mode, first_subword)
897 enum machine_mode mode;
900 /* Floating point registers cannot be split. */
901 if (FP_REG_P (src) || FP_REG_P (dst))
904 /* We don't need to split if operands are directly accessible. */
905 if (s_operand (src, mode) || s_operand (dst, mode))
908 /* Non-offsettable memory references cannot be split. */
909 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
910 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
913 /* Moving the first subword must not clobber a register
914 needed to move the second subword. */
915 if (register_operand (dst, mode))
917 rtx subreg = operand_subword (dst, first_subword, 0, mode);
918 if (reg_overlap_mentioned_p (subreg, src))
926 /* Change optimizations to be performed, depending on the
929 LEVEL is the optimization level specified; 2 if `-O2' is
930 specified, 1 if `-O' is specified, and 0 if neither is specified.
932 SIZE is nonzero if `-Os' is specified and zero otherwise. */
935 optimization_options (level, size)
936 int level ATTRIBUTE_UNUSED;
937 int size ATTRIBUTE_UNUSED;
939 /* ??? There are apparently still problems with -fcaller-saves. */
940 flag_caller_saves = 0;
942 /* By default, always emit DWARF-2 unwind info. This allows debugging
943 without maintaining a stack frame back-chain. */
944 flag_asynchronous_unwind_tables = 1;
953 const char *const name; /* processor name or nickname. */
954 const enum processor_type processor;
955 const enum processor_flags flags;
957 const processor_alias_table[] =
959 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
960 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
961 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
962 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
963 | PF_LONG_DISPLACEMENT},
966 int const pta_size = ARRAY_SIZE (processor_alias_table);
968 /* Acquire a unique set number for our register saves and restores. */
969 s390_sr_alias_set = new_alias_set ();
971 /* Set up function hooks. */
972 init_machine_status = s390_init_machine_status;
974 /* Architecture mode defaults according to ABI. */
975 if (!(target_flags_explicit & MASK_ZARCH))
978 target_flags |= MASK_ZARCH;
980 target_flags &= ~MASK_ZARCH;
983 /* Determine processor architectural level. */
984 if (!s390_arch_string)
985 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
987 for (i = 0; i < pta_size; i++)
988 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
990 s390_arch = processor_alias_table[i].processor;
991 s390_arch_flags = processor_alias_table[i].flags;
995 error ("Unknown cpu used in -march=%s.", s390_arch_string);
997 /* Determine processor to tune for. */
998 if (!s390_tune_string)
1000 s390_tune = s390_arch;
1001 s390_tune_flags = s390_arch_flags;
1002 s390_tune_string = s390_arch_string;
1006 for (i = 0; i < pta_size; i++)
1007 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1009 s390_tune = processor_alias_table[i].processor;
1010 s390_tune_flags = processor_alias_table[i].flags;
1014 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1017 /* Sanity checks. */
1018 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1019 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1020 if (TARGET_64BIT && !TARGET_ZARCH)
1021 error ("64-bit ABI not supported in ESA/390 mode.");
1024 /* Map for smallest class containing reg regno. */
1026 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1027 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1028 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1029 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1030 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1031 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1032 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1033 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1034 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1035 ADDR_REGS, NO_REGS, ADDR_REGS
1038 /* Return attribute type of insn. */
1040 static enum attr_type
1041 s390_safe_attr_type (insn)
1044 if (recog_memoized (insn) >= 0)
1045 return get_attr_type (insn);
1050 /* Return true if OP a (const_int 0) operand.
1051 OP is the current operation.
1052 MODE is the current operation mode. */
1055 const0_operand (op, mode)
1057 enum machine_mode mode;
1059 return op == CONST0_RTX (mode);
1062 /* Return true if OP is constant.
1063 OP is the current operation.
1064 MODE is the current operation mode. */
1067 consttable_operand (op, mode)
1069 enum machine_mode mode ATTRIBUTE_UNUSED;
1071 return CONSTANT_P (op);
1074 /* Return true if the mode of operand OP matches MODE.
1075 If MODE is set to VOIDmode, set it to the mode of OP. */
1078 check_mode (op, mode)
1080 enum machine_mode *mode;
1082 if (*mode == VOIDmode)
1083 *mode = GET_MODE (op);
1086 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1092 /* Return true if OP a valid operand for the LARL instruction.
1093 OP is the current operation.
1094 MODE is the current operation mode. */
1097 larl_operand (op, mode)
1099 enum machine_mode mode;
1101 if (! check_mode (op, &mode))
1104 /* Allow labels and local symbols. */
1105 if (GET_CODE (op) == LABEL_REF)
1107 if (GET_CODE (op) == SYMBOL_REF)
1108 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1109 && SYMBOL_REF_TLS_MODEL (op) == 0
1110 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1112 /* Everything else must have a CONST, so strip it. */
1113 if (GET_CODE (op) != CONST)
1117 /* Allow adding *even* in-range constants. */
1118 if (GET_CODE (op) == PLUS)
1120 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1121 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1123 #if HOST_BITS_PER_WIDE_INT > 32
1124 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1125 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1131 /* Labels and local symbols allowed here as well. */
1132 if (GET_CODE (op) == LABEL_REF)
1134 if (GET_CODE (op) == SYMBOL_REF)
1135 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1136 && SYMBOL_REF_TLS_MODEL (op) == 0
1137 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1139 /* Now we must have a @GOTENT offset or @PLT stub
1140 or an @INDNTPOFF TLS offset. */
1141 if (GET_CODE (op) == UNSPEC
1142 && XINT (op, 1) == UNSPEC_GOTENT)
1144 if (GET_CODE (op) == UNSPEC
1145 && XINT (op, 1) == UNSPEC_PLT)
1147 if (GET_CODE (op) == UNSPEC
1148 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1154 /* Helper routine to implement s_operand and s_imm_operand.
1155 OP is the current operation.
1156 MODE is the current operation mode.
1157 ALLOW_IMMEDIATE specifies whether immediate operands should
1158 be accepted or not. */
1161 general_s_operand (op, mode, allow_immediate)
1163 enum machine_mode mode;
1164 int allow_immediate;
1166 struct s390_address addr;
1168 /* Call general_operand first, so that we don't have to
1169 check for many special cases. */
1170 if (!general_operand (op, mode))
1173 /* Just like memory_operand, allow (subreg (mem ...))
1175 if (reload_completed
1176 && GET_CODE (op) == SUBREG
1177 && GET_CODE (SUBREG_REG (op)) == MEM)
1178 op = SUBREG_REG (op);
1180 switch (GET_CODE (op))
1182 /* Constants that we are sure will be forced to the
1183 literal pool in reload are OK as s-operand. Note
1184 that we cannot call s390_preferred_reload_class here
1185 because it might not be known yet at this point
1186 whether the current function is a leaf or not. */
1189 if (!allow_immediate || reload_completed)
1191 if (!legitimate_reload_constant_p (op))
1197 /* Memory operands are OK unless they already use an
1200 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1202 if (s390_decompose_address (XEXP (op, 0), &addr)
1214 /* Return true if OP is a valid S-type operand.
1215 OP is the current operation.
1216 MODE is the current operation mode. */
1219 s_operand (op, mode)
1221 enum machine_mode mode;
1223 return general_s_operand (op, mode, 0);
1226 /* Return true if OP is a valid S-type operand or an immediate
1227 operand that can be addressed as S-type operand by forcing
1228 it into the literal pool.
1229 OP is the current operation.
1230 MODE is the current operation mode. */
1233 s_imm_operand (op, mode)
1235 enum machine_mode mode;
1237 return general_s_operand (op, mode, 1);
1240 /* Return true if DISP is a valid short displacement. */
1243 s390_short_displacement (disp)
1246 /* No displacement is OK. */
1250 /* Integer displacement in range. */
1251 if (GET_CODE (disp) == CONST_INT)
1252 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1254 /* GOT offset is not OK, the GOT can be large. */
1255 if (GET_CODE (disp) == CONST
1256 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1257 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1260 /* All other symbolic constants are literal pool references,
1261 which are OK as the literal pool must be small. */
1262 if (GET_CODE (disp) == CONST)
1268 /* Return true if OP is a valid operand for a C constraint. */
1271 s390_extra_constraint (op, c)
1275 struct s390_address addr;
1280 if (GET_CODE (op) != MEM)
1282 if (!s390_decompose_address (XEXP (op, 0), &addr))
1287 if (TARGET_LONG_DISPLACEMENT)
1289 if (!s390_short_displacement (addr.disp))
1295 if (GET_CODE (op) != MEM)
1298 if (TARGET_LONG_DISPLACEMENT)
1300 if (!s390_decompose_address (XEXP (op, 0), &addr))
1302 if (!s390_short_displacement (addr.disp))
1308 if (!TARGET_LONG_DISPLACEMENT)
1310 if (GET_CODE (op) != MEM)
1312 if (!s390_decompose_address (XEXP (op, 0), &addr))
1316 if (s390_short_displacement (addr.disp))
1321 if (!TARGET_LONG_DISPLACEMENT)
1323 if (GET_CODE (op) != MEM)
1325 /* Any invalid address here will be fixed up by reload,
1326 so accept it for the most generic constraint. */
1327 if (s390_decompose_address (XEXP (op, 0), &addr)
1328 && s390_short_displacement (addr.disp))
1333 if (TARGET_LONG_DISPLACEMENT)
1335 if (!s390_decompose_address (op, &addr))
1337 if (!s390_short_displacement (addr.disp))
1343 if (!TARGET_LONG_DISPLACEMENT)
1345 /* Any invalid address here will be fixed up by reload,
1346 so accept it for the most generic constraint. */
1347 if (s390_decompose_address (op, &addr)
1348 && s390_short_displacement (addr.disp))
1359 /* Compute a (partial) cost for rtx X. Return true if the complete
1360 cost has been computed, and false if subexpressions should be
1361 scanned. In either case, *TOTAL contains the cost result. */
1364 s390_rtx_costs (x, code, outer_code, total)
1366 int code, outer_code;
1372 if (GET_CODE (XEXP (x, 0)) == MINUS
1373 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1380 /* Force_const_mem does not work out of reload, because the
1381 saveable_obstack is set to reload_obstack, which does not
1382 live long enough. Because of this we cannot use force_const_mem
1383 in addsi3. This leads to problems with gen_add2_insn with a
1384 constant greater than a short. Because of that we give an
1385 addition of greater constants a cost of 3 (reload1.c 10096). */
1386 /* ??? saveable_obstack no longer exists. */
1387 if (outer_code == PLUS
1388 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1389 *total = COSTS_N_INSNS (3);
1410 *total = COSTS_N_INSNS (1);
1414 if (GET_MODE (XEXP (x, 0)) == DImode)
1415 *total = COSTS_N_INSNS (40);
1417 *total = COSTS_N_INSNS (7);
1424 *total = COSTS_N_INSNS (33);
1432 /* Return the cost of an address rtx ADDR. */
1435 s390_address_cost (addr)
1438 struct s390_address ad;
1439 if (!s390_decompose_address (addr, &ad))
1442 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1445 /* Return true if OP is a valid operand for the BRAS instruction.
1446 OP is the current operation.
1447 MODE is the current operation mode. */
1450 bras_sym_operand (op, mode)
1452 enum machine_mode mode ATTRIBUTE_UNUSED;
1454 register enum rtx_code code = GET_CODE (op);
1456 /* Allow SYMBOL_REFs. */
1457 if (code == SYMBOL_REF)
1460 /* Allow @PLT stubs. */
1462 && GET_CODE (XEXP (op, 0)) == UNSPEC
1463 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1468 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1469 otherwise return 0. */
1472 tls_symbolic_operand (op)
1475 if (GET_CODE (op) != SYMBOL_REF)
1477 return SYMBOL_REF_TLS_MODEL (op);
1480 /* Return true if OP is a load multiple operation. It is known to be a
1481 PARALLEL and the first section will be tested.
1482 OP is the current operation.
1483 MODE is the current operation mode. */
1486 load_multiple_operation (op, mode)
1488 enum machine_mode mode ATTRIBUTE_UNUSED;
1490 int count = XVECLEN (op, 0);
1491 unsigned int dest_regno;
1496 /* Perform a quick check so we don't blow up below. */
1498 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1499 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1500 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1503 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1504 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1506 /* Check, is base, or base + displacement. */
1508 if (GET_CODE (src_addr) == REG)
1510 else if (GET_CODE (src_addr) == PLUS
1511 && GET_CODE (XEXP (src_addr, 0)) == REG
1512 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1514 off = INTVAL (XEXP (src_addr, 1));
1515 src_addr = XEXP (src_addr, 0);
1520 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1523 for (i = 1; i < count; i++)
1525 rtx elt = XVECEXP (op, 0, i);
1527 if (GET_CODE (elt) != SET
1528 || GET_CODE (SET_DEST (elt)) != REG
1529 || GET_MODE (SET_DEST (elt)) != Pmode
1530 || REGNO (SET_DEST (elt)) != dest_regno + i
1531 || GET_CODE (SET_SRC (elt)) != MEM
1532 || GET_MODE (SET_SRC (elt)) != Pmode
1533 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1534 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1535 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1536 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1537 != off + i * UNITS_PER_WORD)
1544 /* Return true if OP is a store multiple operation. It is known to be a
1545 PARALLEL and the first section will be tested.
1546 OP is the current operation.
1547 MODE is the current operation mode. */
1550 store_multiple_operation (op, mode)
1552 enum machine_mode mode ATTRIBUTE_UNUSED;
1554 int count = XVECLEN (op, 0);
1555 unsigned int src_regno;
1559 /* Perform a quick check so we don't blow up below. */
1561 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1562 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1563 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1566 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1567 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1569 /* Check, is base, or base + displacement. */
1571 if (GET_CODE (dest_addr) == REG)
1573 else if (GET_CODE (dest_addr) == PLUS
1574 && GET_CODE (XEXP (dest_addr, 0)) == REG
1575 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1577 off = INTVAL (XEXP (dest_addr, 1));
1578 dest_addr = XEXP (dest_addr, 0);
1583 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1586 for (i = 1; i < count; i++)
1588 rtx elt = XVECEXP (op, 0, i);
1590 if (GET_CODE (elt) != SET
1591 || GET_CODE (SET_SRC (elt)) != REG
1592 || GET_MODE (SET_SRC (elt)) != Pmode
1593 || REGNO (SET_SRC (elt)) != src_regno + i
1594 || GET_CODE (SET_DEST (elt)) != MEM
1595 || GET_MODE (SET_DEST (elt)) != Pmode
1596 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1597 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1598 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1599 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1600 != off + i * UNITS_PER_WORD)
1607 /* Return true if OP contains a symbol reference */
1610 symbolic_reference_mentioned_p (op)
1613 register const char *fmt;
1616 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1619 fmt = GET_RTX_FORMAT (GET_CODE (op));
1620 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1626 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1627 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1631 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1638 /* Return true if OP contains a reference to a thread-local symbol. */
1641 tls_symbolic_reference_mentioned_p (op)
1644 register const char *fmt;
1647 if (GET_CODE (op) == SYMBOL_REF)
1648 return tls_symbolic_operand (op);
1650 fmt = GET_RTX_FORMAT (GET_CODE (op));
1651 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1657 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1658 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1662 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1670 /* Return true if OP is a legitimate general operand when
1671 generating PIC code. It is given that flag_pic is on
1672 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1675 legitimate_pic_operand_p (op)
1678 /* Accept all non-symbolic constants. */
1679 if (!SYMBOLIC_CONST (op))
1682 /* Reject everything else; must be handled
1683 via emit_symbolic_move. */
1687 /* Returns true if the constant value OP is a legitimate general operand.
1688 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1691 legitimate_constant_p (op)
1694 /* Accept all non-symbolic constants. */
1695 if (!SYMBOLIC_CONST (op))
1698 /* Accept immediate LARL operands. */
1699 if (TARGET_64BIT && larl_operand (op, VOIDmode))
1702 /* Thread-local symbols are never legal constants. This is
1703 so that emit_call knows that computing such addresses
1704 might require a function call. */
1705 if (TLS_SYMBOLIC_CONST (op))
1708 /* In the PIC case, symbolic constants must *not* be
1709 forced into the literal pool. We accept them here,
1710 so that they will be handled by emit_symbolic_move. */
1714 /* All remaining non-PIC symbolic constants are
1715 forced into the literal pool. */
1719 /* Determine if it's legal to put X into the constant pool. This
1720 is not possible if X contains the address of a symbol that is
1721 not constant (TLS) or not known at final link time (PIC). */
1724 s390_cannot_force_const_mem (x)
1727 switch (GET_CODE (x))
1731 /* Accept all non-symbolic constants. */
1735 /* Labels are OK iff we are non-PIC. */
1736 return flag_pic != 0;
1739 /* 'Naked' TLS symbol references are never OK,
1740 non-TLS symbols are OK iff we are non-PIC. */
1741 if (tls_symbolic_operand (x))
1744 return flag_pic != 0;
1747 return s390_cannot_force_const_mem (XEXP (x, 0));
1750 return s390_cannot_force_const_mem (XEXP (x, 0))
1751 || s390_cannot_force_const_mem (XEXP (x, 1));
1754 switch (XINT (x, 1))
1756 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1757 case UNSPEC_LTREL_OFFSET:
1765 case UNSPEC_GOTNTPOFF:
1766 case UNSPEC_INDNTPOFF:
1779 /* Returns true if the constant value OP is a legitimate general
1780 operand during and after reload. The difference to
1781 legitimate_constant_p is that this function will not accept
1782 a constant that would need to be forced to the literal pool
1783 before it can be used as operand. */
1786 legitimate_reload_constant_p (op)
1789 /* Accept la(y) operands. */
1790 if (GET_CODE (op) == CONST_INT
1791 && DISP_IN_RANGE (INTVAL (op)))
1794 /* Accept l(g)hi operands. */
1795 if (GET_CODE (op) == CONST_INT
1796 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1799 /* Accept lliXX operands. */
1801 && s390_single_hi (op, DImode, 0) >= 0)
1804 /* Accept larl operands. */
1806 && larl_operand (op, VOIDmode))
1809 /* Everything else cannot be handled without reload. */
1813 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1814 return the class of reg to actually use. */
1817 s390_preferred_reload_class (op, class)
1819 enum reg_class class;
1821 /* This can happen if a floating point constant is being
1822 reloaded into an integer register. Leave well alone. */
1823 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1824 && class != FP_REGS)
1827 switch (GET_CODE (op))
1829 /* Constants we cannot reload must be forced into the
1834 if (legitimate_reload_constant_p (op))
1839 /* If a symbolic constant or a PLUS is reloaded,
1840 it is most likely being used as an address, so
1841 prefer ADDR_REGS. If 'class' is not a superset
1842 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1847 if (reg_class_subset_p (ADDR_REGS, class))
1859 /* Return the register class of a scratch register needed to
1860 load IN into a register of class CLASS in MODE.
1862 We need a temporary when loading a PLUS expression which
1863 is not a legitimate operand of the LOAD ADDRESS instruction. */
1866 s390_secondary_input_reload_class (class, mode, in)
1867 enum reg_class class ATTRIBUTE_UNUSED;
1868 enum machine_mode mode;
1871 if (s390_plus_operand (in, mode))
1877 /* Return the register class of a scratch register needed to
1878 store a register of class CLASS in MODE into OUT:
1880 We need a temporary when storing a double-word to a
1881 non-offsettable memory address. */
1884 s390_secondary_output_reload_class (class, mode, out)
1885 enum reg_class class;
1886 enum machine_mode mode;
1889 if ((TARGET_64BIT ? mode == TImode
1890 : (mode == DImode || mode == DFmode))
1891 && reg_classes_intersect_p (GENERAL_REGS, class)
1892 && GET_CODE (out) == MEM
1893 && !offsettable_memref_p (out)
1894 && !s_operand (out, VOIDmode))
1900 /* Return true if OP is a PLUS that is not a legitimate
1901 operand for the LA instruction.
1902 OP is the current operation.
1903 MODE is the current operation mode. */
1906 s390_plus_operand (op, mode)
1908 enum machine_mode mode;
1910 if (!check_mode (op, &mode) || mode != Pmode)
1913 if (GET_CODE (op) != PLUS)
1916 if (legitimate_la_operand_p (op))
1922 /* Generate code to load SRC, which is PLUS that is not a
1923 legitimate operand for the LA instruction, into TARGET.
1924 SCRATCH may be used as scratch register. */
1927 s390_expand_plus_operand (target, src, scratch)
1928 register rtx target;
1930 register rtx scratch;
1933 struct s390_address ad;
1935 /* src must be a PLUS; get its two operands. */
1936 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1939 /* Check if any of the two operands is already scheduled
1940 for replacement by reload. This can happen e.g. when
1941 float registers occur in an address. */
1942 sum1 = find_replacement (&XEXP (src, 0));
1943 sum2 = find_replacement (&XEXP (src, 1));
1944 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1946 /* If the address is already strictly valid, there's nothing to do. */
1947 if (!s390_decompose_address (src, &ad)
1948 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1949 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1951 /* Otherwise, one of the operands cannot be an address register;
1952 we reload its value into the scratch register. */
1953 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1955 emit_move_insn (scratch, sum1);
1958 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1960 emit_move_insn (scratch, sum2);
1964 /* According to the way these invalid addresses are generated
1965 in reload.c, it should never happen (at least on s390) that
1966 *neither* of the PLUS components, after find_replacements
1967 was applied, is an address register. */
1968 if (sum1 == scratch && sum2 == scratch)
1974 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1977 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1978 is only ever performed on addresses, so we can mark the
1979 sum as legitimate for LA in any case. */
1980 s390_load_address (target, src);
1984 /* Decompose a RTL expression ADDR for a memory address into
1985 its components, returned in OUT.
1987 Returns 0 if ADDR is not a valid memory address, nonzero
1988 otherwise. If OUT is NULL, don't return the components,
1989 but check for validity only.
1991 Note: Only addresses in canonical form are recognized.
1992 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1993 canonical form so that they will be recognized. */
1996 s390_decompose_address (addr, out)
1998 struct s390_address *out;
2000 rtx base = NULL_RTX;
2001 rtx indx = NULL_RTX;
2002 rtx disp = NULL_RTX;
2003 int pointer = FALSE;
2004 int base_ptr = FALSE;
2005 int indx_ptr = FALSE;
2007 /* Decompose address into base + index + displacement. */
2009 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2012 else if (GET_CODE (addr) == PLUS)
2014 rtx op0 = XEXP (addr, 0);
2015 rtx op1 = XEXP (addr, 1);
2016 enum rtx_code code0 = GET_CODE (op0);
2017 enum rtx_code code1 = GET_CODE (op1);
2019 if (code0 == REG || code0 == UNSPEC)
2021 if (code1 == REG || code1 == UNSPEC)
2023 indx = op0; /* index + base */
2029 base = op0; /* base + displacement */
2034 else if (code0 == PLUS)
2036 indx = XEXP (op0, 0); /* index + base + disp */
2037 base = XEXP (op0, 1);
2048 disp = addr; /* displacement */
2051 /* Validate base register. */
2054 if (GET_CODE (base) == UNSPEC)
2056 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != UNSPEC_LTREL_BASE)
2058 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2061 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2064 if (REGNO (base) == BASE_REGISTER
2065 || REGNO (base) == STACK_POINTER_REGNUM
2066 || REGNO (base) == FRAME_POINTER_REGNUM
2067 || ((reload_completed || reload_in_progress)
2068 && frame_pointer_needed
2069 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2070 || REGNO (base) == ARG_POINTER_REGNUM
2071 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
2072 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
2074 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2075 pointer = base_ptr = TRUE;
2078 /* Validate index register. */
2081 if (GET_CODE (indx) == UNSPEC)
2083 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != UNSPEC_LTREL_BASE)
2085 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2088 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2091 if (REGNO (indx) == BASE_REGISTER
2092 || REGNO (indx) == STACK_POINTER_REGNUM
2093 || REGNO (indx) == FRAME_POINTER_REGNUM
2094 || ((reload_completed || reload_in_progress)
2095 && frame_pointer_needed
2096 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2097 || REGNO (indx) == ARG_POINTER_REGNUM
2098 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
2099 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
2101 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2102 pointer = indx_ptr = TRUE;
2105 /* Prefer to use pointer as base, not index. */
2106 if (base && indx && !base_ptr
2107 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2114 /* Validate displacement. */
2117 /* Allow integer constant in range. */
2118 if (GET_CODE (disp) == CONST_INT)
2120 /* If the argument pointer is involved, the displacement will change
2121 later anyway as the argument pointer gets eliminated. This could
2122 make a valid displacement invalid, but it is more likely to make
2123 an invalid displacement valid, because we sometimes access the
2124 register save area via negative offsets to the arg pointer.
2125 Thus we don't check the displacement for validity here. If after
2126 elimination the displacement turns out to be invalid after all,
2127 this is fixed up by reload in any case. */
2128 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
2130 if (!DISP_IN_RANGE (INTVAL (disp)))
2135 /* In the small-PIC case, the linker converts @GOT
2136 and @GOTNTPOFF offsets to possible displacements. */
2137 else if (GET_CODE (disp) == CONST
2138 && GET_CODE (XEXP (disp, 0)) == UNSPEC
2139 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
2140 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
2148 /* Accept chunkfied literal pool symbol references. */
2149 else if (GET_CODE (disp) == CONST
2150 && GET_CODE (XEXP (disp, 0)) == MINUS
2151 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
2152 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
2157 /* Likewise if a constant offset is present. */
2158 else if (GET_CODE (disp) == CONST
2159 && GET_CODE (XEXP (disp, 0)) == PLUS
2160 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
2161 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
2162 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
2163 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
2168 /* We can convert literal pool addresses to
2169 displacements by basing them off the base register. */
2172 /* In some cases, we can accept an additional
2173 small constant offset. Split these off here. */
2175 unsigned int offset = 0;
2177 if (GET_CODE (disp) == CONST
2178 && GET_CODE (XEXP (disp, 0)) == PLUS
2179 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2181 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2182 disp = XEXP (XEXP (disp, 0), 0);
2185 /* Now we must have a literal pool address. */
2186 if (GET_CODE (disp) != SYMBOL_REF
2187 || !CONSTANT_POOL_ADDRESS_P (disp))
2190 /* If we have an offset, make sure it does not
2191 exceed the size of the constant pool entry. */
2192 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2195 /* Either base or index must be free to
2196 hold the base register. */
2200 /* Convert the address. */
2202 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2204 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2206 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2207 UNSPEC_LTREL_OFFSET);
2208 disp = gen_rtx_CONST (Pmode, disp);
2211 disp = plus_constant (disp, offset);
2225 out->pointer = pointer;
2231 /* Return nonzero if ADDR is a valid memory address.
2232 STRICT specifies whether strict register checking applies. */
2235 legitimate_address_p (mode, addr, strict)
2236 enum machine_mode mode ATTRIBUTE_UNUSED;
2240 struct s390_address ad;
2241 if (!s390_decompose_address (addr, &ad))
2246 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2248 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2253 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2255 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2262 /* Return 1 if OP is a valid operand for the LA instruction.
2263 In 31-bit, we need to prove that the result is used as an
2264 address, as LA performs only a 31-bit addition. */
2267 legitimate_la_operand_p (op)
2270 struct s390_address addr;
2271 if (!s390_decompose_address (op, &addr))
2274 if (TARGET_64BIT || addr.pointer)
2280 /* Return 1 if OP is a valid operand for the LA instruction,
2281 and we prefer to use LA over addition to compute it. */
2284 preferred_la_operand_p (op)
2287 struct s390_address addr;
2288 if (!s390_decompose_address (op, &addr))
2291 if (!TARGET_64BIT && !addr.pointer)
2297 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2298 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2304 /* Emit a forced load-address operation to load SRC into DST.
2305 This will use the LOAD ADDRESS instruction even in situations
2306 where legitimate_la_operand_p (SRC) returns false. */
2309 s390_load_address (dst, src)
2314 emit_move_insn (dst, src);
2316 emit_insn (gen_force_la_31 (dst, src));
2319 /* Return a legitimate reference for ORIG (an address) using the
2320 register REG. If REG is 0, a new pseudo is generated.
2322 There are two types of references that must be handled:
2324 1. Global data references must load the address from the GOT, via
2325 the PIC reg. An insn is emitted to do this load, and the reg is
2328 2. Static data references, constant pool addresses, and code labels
2329 compute the address as an offset from the GOT, whose base is in
2330 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2331 differentiate them from global data objects. The returned
2332 address is the PIC reg + an unspec constant.
2334 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2335 reg also appears in the address. */
2338 legitimize_pic_address (orig, reg)
2346 if (GET_CODE (addr) == LABEL_REF
2347 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2349 /* This is a local symbol. */
2350 if (TARGET_64BIT && larl_operand (addr, VOIDmode))
2352 /* Access local symbols PC-relative via LARL.
2353 This is the same as in the non-PIC case, so it is
2354 handled automatically ... */
2358 /* Access local symbols relative to the GOT. */
2360 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2362 if (reload_in_progress || reload_completed)
2363 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2365 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2366 addr = gen_rtx_CONST (Pmode, addr);
2367 addr = force_const_mem (Pmode, addr);
2368 emit_move_insn (temp, addr);
2370 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2373 emit_move_insn (reg, new);
2378 else if (GET_CODE (addr) == SYMBOL_REF)
2381 reg = gen_reg_rtx (Pmode);
2385 /* Assume GOT offset < 4k. This is handled the same way
2386 in both 31- and 64-bit code (@GOT). */
2388 if (reload_in_progress || reload_completed)
2389 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2391 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2392 new = gen_rtx_CONST (Pmode, new);
2393 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2394 new = gen_rtx_MEM (Pmode, new);
2395 RTX_UNCHANGING_P (new) = 1;
2396 emit_move_insn (reg, new);
2399 else if (TARGET_64BIT)
2401 /* If the GOT offset might be >= 4k, we determine the position
2402 of the GOT entry via a PC-relative LARL (@GOTENT). */
2404 rtx temp = gen_reg_rtx (Pmode);
2406 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2407 new = gen_rtx_CONST (Pmode, new);
2408 emit_move_insn (temp, new);
2410 new = gen_rtx_MEM (Pmode, temp);
2411 RTX_UNCHANGING_P (new) = 1;
2412 emit_move_insn (reg, new);
2417 /* If the GOT offset might be >= 4k, we have to load it
2418 from the literal pool (@GOT). */
2420 rtx temp = gen_reg_rtx (Pmode);
2422 if (reload_in_progress || reload_completed)
2423 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2425 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2426 addr = gen_rtx_CONST (Pmode, addr);
2427 addr = force_const_mem (Pmode, addr);
2428 emit_move_insn (temp, addr);
2430 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2431 new = gen_rtx_MEM (Pmode, new);
2432 RTX_UNCHANGING_P (new) = 1;
2433 emit_move_insn (reg, new);
2439 if (GET_CODE (addr) == CONST)
2441 addr = XEXP (addr, 0);
2442 if (GET_CODE (addr) == UNSPEC)
2444 if (XVECLEN (addr, 0) != 1)
2446 switch (XINT (addr, 1))
2448 /* If someone moved a GOT-relative UNSPEC
2449 out of the literal pool, force them back in. */
2452 new = force_const_mem (Pmode, orig);
2455 /* @GOT is OK as is if small. */
2458 new = force_const_mem (Pmode, orig);
2461 /* @GOTENT is OK as is. */
2465 /* @PLT is OK as is on 64-bit, must be converted to
2466 GOT-relative @PLTOFF on 31-bit. */
2470 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2472 if (reload_in_progress || reload_completed)
2473 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2475 addr = XVECEXP (addr, 0, 0);
2476 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2478 addr = gen_rtx_CONST (Pmode, addr);
2479 addr = force_const_mem (Pmode, addr);
2480 emit_move_insn (temp, addr);
2482 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2485 emit_move_insn (reg, new);
2491 /* Everything else cannot happen. */
2496 else if (GET_CODE (addr) != PLUS)
2499 if (GET_CODE (addr) == PLUS)
2501 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2502 /* Check first to see if this is a constant offset
2503 from a local symbol reference. */
2504 if ((GET_CODE (op0) == LABEL_REF
2505 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2506 && GET_CODE (op1) == CONST_INT)
2508 if (TARGET_64BIT && larl_operand (op0, VOIDmode))
2510 if (INTVAL (op1) & 1)
2512 /* LARL can't handle odd offsets, so emit a
2513 pair of LARL and LA. */
2514 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2516 if (!DISP_IN_RANGE (INTVAL (op1)))
2518 int even = INTVAL (op1) - 1;
2519 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2520 op0 = gen_rtx_CONST (Pmode, op0);
2524 emit_move_insn (temp, op0);
2525 new = gen_rtx_PLUS (Pmode, temp, op1);
2529 emit_move_insn (reg, new);
2535 /* If the offset is even, we can just use LARL.
2536 This will happen automatically. */
2541 /* Access local symbols relative to the GOT. */
2543 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2545 if (reload_in_progress || reload_completed)
2546 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2548 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2550 addr = gen_rtx_PLUS (Pmode, addr, op1);
2551 addr = gen_rtx_CONST (Pmode, addr);
2552 addr = force_const_mem (Pmode, addr);
2553 emit_move_insn (temp, addr);
2555 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2558 emit_move_insn (reg, new);
2564 /* Now, check whether it is a GOT relative symbol plus offset
2565 that was pulled out of the literal pool. Force it back in. */
2567 else if (GET_CODE (op0) == UNSPEC
2568 && GET_CODE (op1) == CONST_INT)
2570 if (XVECLEN (op0, 0) != 1)
2572 if (XINT (op0, 1) != UNSPEC_GOTOFF)
2575 new = force_const_mem (Pmode, orig);
2578 /* Otherwise, compute the sum. */
2581 base = legitimize_pic_address (XEXP (addr, 0), reg);
2582 new = legitimize_pic_address (XEXP (addr, 1),
2583 base == reg ? NULL_RTX : reg);
2584 if (GET_CODE (new) == CONST_INT)
2585 new = plus_constant (base, INTVAL (new));
2588 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2590 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2591 new = XEXP (new, 1);
2593 new = gen_rtx_PLUS (Pmode, base, new);
2596 if (GET_CODE (new) == CONST)
2597 new = XEXP (new, 0);
2598 new = force_operand (new, 0);
2605 /* Load the thread pointer into a register. */
2608 get_thread_pointer ()
2612 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2613 tp = force_reg (Pmode, tp);
2614 mark_reg_pointer (tp, BITS_PER_WORD);
2619 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2621 static GTY(()) rtx s390_tls_symbol;
2623 s390_tls_get_offset ()
2625 if (!s390_tls_symbol)
2626 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2628 return s390_tls_symbol;
2631 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2632 this (thread-local) address. REG may be used as temporary. */
2635 legitimize_tls_address (addr, reg)
2639 rtx new, tls_call, temp, base, r2, insn;
2641 if (GET_CODE (addr) == SYMBOL_REF)
2642 switch (tls_symbolic_operand (addr))
2644 case TLS_MODEL_GLOBAL_DYNAMIC:
2646 r2 = gen_rtx_REG (Pmode, 2);
2647 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2648 new = gen_rtx_CONST (Pmode, tls_call);
2649 new = force_const_mem (Pmode, new);
2650 emit_move_insn (r2, new);
2651 emit_call_insn (gen_call_value_tls (r2, tls_call));
2652 insn = get_insns ();
2655 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2656 temp = gen_reg_rtx (Pmode);
2657 emit_libcall_block (insn, temp, r2, new);
2659 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2662 s390_load_address (reg, new);
2667 case TLS_MODEL_LOCAL_DYNAMIC:
2669 r2 = gen_rtx_REG (Pmode, 2);
2670 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2671 new = gen_rtx_CONST (Pmode, tls_call);
2672 new = force_const_mem (Pmode, new);
2673 emit_move_insn (r2, new);
2674 emit_call_insn (gen_call_value_tls (r2, tls_call));
2675 insn = get_insns ();
2678 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2679 temp = gen_reg_rtx (Pmode);
2680 emit_libcall_block (insn, temp, r2, new);
2682 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2683 base = gen_reg_rtx (Pmode);
2684 s390_load_address (base, new);
2686 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2687 new = gen_rtx_CONST (Pmode, new);
2688 new = force_const_mem (Pmode, new);
2689 temp = gen_reg_rtx (Pmode);
2690 emit_move_insn (temp, new);
2692 new = gen_rtx_PLUS (Pmode, base, temp);
2695 s390_load_address (reg, new);
2700 case TLS_MODEL_INITIAL_EXEC:
2703 /* Assume GOT offset < 4k. This is handled the same way
2704 in both 31- and 64-bit code. */
2706 if (reload_in_progress || reload_completed)
2707 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2709 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2710 new = gen_rtx_CONST (Pmode, new);
2711 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2712 new = gen_rtx_MEM (Pmode, new);
2713 RTX_UNCHANGING_P (new) = 1;
2714 temp = gen_reg_rtx (Pmode);
2715 emit_move_insn (temp, new);
2717 else if (TARGET_64BIT)
2719 /* If the GOT offset might be >= 4k, we determine the position
2720 of the GOT entry via a PC-relative LARL. */
2722 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2723 new = gen_rtx_CONST (Pmode, new);
2724 temp = gen_reg_rtx (Pmode);
2725 emit_move_insn (temp, new);
2727 new = gen_rtx_MEM (Pmode, temp);
2728 RTX_UNCHANGING_P (new) = 1;
2729 temp = gen_reg_rtx (Pmode);
2730 emit_move_insn (temp, new);
2734 /* If the GOT offset might be >= 4k, we have to load it
2735 from the literal pool. */
2737 if (reload_in_progress || reload_completed)
2738 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2740 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2741 new = gen_rtx_CONST (Pmode, new);
2742 new = force_const_mem (Pmode, new);
2743 temp = gen_reg_rtx (Pmode);
2744 emit_move_insn (temp, new);
2746 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2747 new = gen_rtx_MEM (Pmode, new);
2748 RTX_UNCHANGING_P (new) = 1;
2750 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2751 temp = gen_reg_rtx (Pmode);
2752 emit_insn (gen_rtx_SET (Pmode, temp, new));
2756 /* In position-dependent code, load the absolute address of
2757 the GOT entry from the literal pool. */
2759 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2760 new = gen_rtx_CONST (Pmode, new);
2761 new = force_const_mem (Pmode, new);
2762 temp = gen_reg_rtx (Pmode);
2763 emit_move_insn (temp, new);
2766 new = gen_rtx_MEM (Pmode, new);
2767 RTX_UNCHANGING_P (new) = 1;
2769 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2770 temp = gen_reg_rtx (Pmode);
2771 emit_insn (gen_rtx_SET (Pmode, temp, new));
2774 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2777 s390_load_address (reg, new);
2782 case TLS_MODEL_LOCAL_EXEC:
2783 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2784 new = gen_rtx_CONST (Pmode, new);
2785 new = force_const_mem (Pmode, new);
2786 temp = gen_reg_rtx (Pmode);
2787 emit_move_insn (temp, new);
2789 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2792 s390_load_address (reg, new);
2801 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2803 switch (XINT (XEXP (addr, 0), 1))
2805 case UNSPEC_INDNTPOFF:
2818 abort (); /* for now ... */
2823 /* Emit insns to move operands[1] into operands[0]. */
2826 emit_symbolic_move (operands)
2829 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2831 if (GET_CODE (operands[0]) == MEM)
2832 operands[1] = force_reg (Pmode, operands[1]);
2833 else if (TLS_SYMBOLIC_CONST (operands[1]))
2834 operands[1] = legitimize_tls_address (operands[1], temp);
2836 operands[1] = legitimize_pic_address (operands[1], temp);
2839 /* Try machine-dependent ways of modifying an illegitimate address X
2840 to be legitimate. If we find one, return the new, valid address.
2842 OLDX is the address as it was before break_out_memory_refs was called.
2843 In some cases it is useful to look at this to decide what needs to be done.
2845 MODE is the mode of the operand pointed to by X.
2847 When -fpic is used, special handling is needed for symbolic references.
2848 See comments by legitimize_pic_address for details. */
2851 legitimize_address (x, oldx, mode)
2853 register rtx oldx ATTRIBUTE_UNUSED;
2854 enum machine_mode mode ATTRIBUTE_UNUSED;
2856 rtx constant_term = const0_rtx;
2858 if (TLS_SYMBOLIC_CONST (x))
2860 x = legitimize_tls_address (x, 0);
2862 if (legitimate_address_p (mode, x, FALSE))
2867 if (SYMBOLIC_CONST (x)
2868 || (GET_CODE (x) == PLUS
2869 && (SYMBOLIC_CONST (XEXP (x, 0))
2870 || SYMBOLIC_CONST (XEXP (x, 1)))))
2871 x = legitimize_pic_address (x, 0);
2873 if (legitimate_address_p (mode, x, FALSE))
2877 x = eliminate_constant_term (x, &constant_term);
2879 /* Optimize loading of large displacements by splitting them
2880 into the multiple of 4K and the rest; this allows the
2881 former to be CSE'd if possible.
2883 Don't do this if the displacement is added to a register
2884 pointing into the stack frame, as the offsets will
2885 change later anyway. */
2887 if (GET_CODE (constant_term) == CONST_INT
2888 && !TARGET_LONG_DISPLACEMENT
2889 && !DISP_IN_RANGE (INTVAL (constant_term))
2890 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2892 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2893 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2895 rtx temp = gen_reg_rtx (Pmode);
2896 rtx val = force_operand (GEN_INT (upper), temp);
2898 emit_move_insn (temp, val);
2900 x = gen_rtx_PLUS (Pmode, x, temp);
2901 constant_term = GEN_INT (lower);
2904 if (GET_CODE (x) == PLUS)
2906 if (GET_CODE (XEXP (x, 0)) == REG)
2908 register rtx temp = gen_reg_rtx (Pmode);
2909 register rtx val = force_operand (XEXP (x, 1), temp);
2911 emit_move_insn (temp, val);
2913 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2916 else if (GET_CODE (XEXP (x, 1)) == REG)
2918 register rtx temp = gen_reg_rtx (Pmode);
2919 register rtx val = force_operand (XEXP (x, 0), temp);
2921 emit_move_insn (temp, val);
2923 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2927 if (constant_term != const0_rtx)
2928 x = gen_rtx_PLUS (Pmode, x, constant_term);
2933 /* Emit code to move LEN bytes from DST to SRC. */
2936 s390_expand_movstr (dst, src, len)
2941 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2942 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2943 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2944 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2947 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2949 if (INTVAL (len) > 0)
2950 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2953 else if (TARGET_MVCLE)
2955 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2956 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2957 rtx reg0 = gen_reg_rtx (double_mode);
2958 rtx reg1 = gen_reg_rtx (double_mode);
2960 emit_move_insn (gen_highpart (single_mode, reg0),
2961 force_operand (XEXP (dst, 0), NULL_RTX));
2962 emit_move_insn (gen_highpart (single_mode, reg1),
2963 force_operand (XEXP (src, 0), NULL_RTX));
2965 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2966 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2968 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2973 rtx dst_addr, src_addr, count, blocks, temp;
2974 rtx end_label = gen_label_rtx ();
2975 enum machine_mode mode;
2978 mode = GET_MODE (len);
2979 if (mode == VOIDmode)
2982 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2986 dst_addr = gen_reg_rtx (Pmode);
2987 src_addr = gen_reg_rtx (Pmode);
2988 count = gen_reg_rtx (mode);
2989 blocks = gen_reg_rtx (mode);
2991 convert_move (count, len, 1);
2992 emit_cmp_and_jump_insns (count, const0_rtx,
2993 EQ, NULL_RTX, mode, 1, end_label);
2995 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2996 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2997 dst = change_address (dst, VOIDmode, dst_addr);
2998 src = change_address (src, VOIDmode, src_addr);
3000 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3002 emit_move_insn (count, temp);
3004 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3006 emit_move_insn (blocks, temp);
3008 expand_start_loop (1);
3009 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3010 make_tree (type, blocks),
3011 make_tree (type, const0_rtx)));
3013 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
3014 s390_load_address (dst_addr,
3015 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3016 s390_load_address (src_addr,
3017 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3019 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3021 emit_move_insn (blocks, temp);
3025 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
3026 emit_label (end_label);
3030 /* Emit code to clear LEN bytes at DST. */
3033 s390_expand_clrstr (dst, len)
3037 rtx (*gen_short) PARAMS ((rtx, rtx)) =
3038 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
3039 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
3040 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
3043 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3045 if (INTVAL (len) > 0)
3046 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
3049 else if (TARGET_MVCLE)
3051 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3052 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3053 rtx reg0 = gen_reg_rtx (double_mode);
3054 rtx reg1 = gen_reg_rtx (double_mode);
3056 emit_move_insn (gen_highpart (single_mode, reg0),
3057 force_operand (XEXP (dst, 0), NULL_RTX));
3058 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3060 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
3061 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
3063 emit_insn ((*gen_long) (reg0, reg1, reg0));
3068 rtx dst_addr, src_addr, count, blocks, temp;
3069 rtx end_label = gen_label_rtx ();
3070 enum machine_mode mode;
3073 mode = GET_MODE (len);
3074 if (mode == VOIDmode)
3077 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3081 dst_addr = gen_reg_rtx (Pmode);
3082 src_addr = gen_reg_rtx (Pmode);
3083 count = gen_reg_rtx (mode);
3084 blocks = gen_reg_rtx (mode);
3086 convert_move (count, len, 1);
3087 emit_cmp_and_jump_insns (count, const0_rtx,
3088 EQ, NULL_RTX, mode, 1, end_label);
3090 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3091 dst = change_address (dst, VOIDmode, dst_addr);
3093 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3095 emit_move_insn (count, temp);
3097 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3099 emit_move_insn (blocks, temp);
3101 expand_start_loop (1);
3102 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3103 make_tree (type, blocks),
3104 make_tree (type, const0_rtx)));
3106 emit_insn ((*gen_short) (dst, GEN_INT (255)));
3107 s390_load_address (dst_addr,
3108 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3110 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3112 emit_move_insn (blocks, temp);
3116 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
3117 emit_label (end_label);
3121 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3122 and return the result in TARGET. */
3125 s390_expand_cmpmem (target, op0, op1, len)
3131 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
3132 TARGET_64BIT ? gen_cmpmem_short_64 : gen_cmpmem_short_31;
3133 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
3134 TARGET_64BIT ? gen_cmpmem_long_64 : gen_cmpmem_long_31;
3135 rtx (*gen_result) PARAMS ((rtx)) =
3136 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3138 op0 = protect_from_queue (op0, 0);
3139 op1 = protect_from_queue (op1, 0);
3140 len = protect_from_queue (len, 0);
3142 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3144 if (INTVAL (len) > 0)
3146 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
3147 emit_insn ((*gen_result) (target));
3150 emit_move_insn (target, const0_rtx);
3153 else /* if (TARGET_MVCLE) */
3155 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3156 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3157 rtx reg0 = gen_reg_rtx (double_mode);
3158 rtx reg1 = gen_reg_rtx (double_mode);
3160 emit_move_insn (gen_highpart (single_mode, reg0),
3161 force_operand (XEXP (op0, 0), NULL_RTX));
3162 emit_move_insn (gen_highpart (single_mode, reg1),
3163 force_operand (XEXP (op1, 0), NULL_RTX));
3165 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3166 convert_move (gen_lowpart (single_mode, reg1), len, 1);
3168 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
3169 emit_insn ((*gen_result) (target));
3173 /* Deactivate for now as profile code cannot cope with
3174 CC being live across basic block boundaries. */
3177 rtx addr0, addr1, count, blocks, temp;
3178 rtx end_label = gen_label_rtx ();
3179 enum machine_mode mode;
3182 mode = GET_MODE (len);
3183 if (mode == VOIDmode)
3186 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3190 addr0 = gen_reg_rtx (Pmode);
3191 addr1 = gen_reg_rtx (Pmode);
3192 count = gen_reg_rtx (mode);
3193 blocks = gen_reg_rtx (mode);
3195 convert_move (count, len, 1);
3196 emit_cmp_and_jump_insns (count, const0_rtx,
3197 EQ, NULL_RTX, mode, 1, end_label);
3199 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3200 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3201 op0 = change_address (op0, VOIDmode, addr0);
3202 op1 = change_address (op1, VOIDmode, addr1);
3204 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3206 emit_move_insn (count, temp);
3208 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3210 emit_move_insn (blocks, temp);
3212 expand_start_loop (1);
3213 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3214 make_tree (type, blocks),
3215 make_tree (type, const0_rtx)));
3217 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
3218 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3219 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3220 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3221 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3222 emit_jump_insn (temp);
3224 s390_load_address (addr0,
3225 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3226 s390_load_address (addr1,
3227 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3229 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3231 emit_move_insn (blocks, temp);
3235 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
3236 emit_label (end_label);
3238 emit_insn ((*gen_result) (target));
3243 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3244 We need to emit DTP-relative relocations. */
3247 s390_output_dwarf_dtprel (file, size, x)
3255 fputs ("\t.long\t", file);
3258 fputs ("\t.quad\t", file);
3263 output_addr_const (file, x);
3264 fputs ("@DTPOFF", file);
3267 /* In the name of slightly smaller debug output, and to cater to
3268 general assembler losage, recognize various UNSPEC sequences
3269 and turn them back into a direct symbol reference. */
3272 s390_delegitimize_address (orig_x)
3277 if (GET_CODE (x) != MEM)
3281 if (GET_CODE (x) == PLUS
3282 && GET_CODE (XEXP (x, 1)) == CONST
3283 && GET_CODE (XEXP (x, 0)) == REG
3284 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3286 y = XEXP (XEXP (x, 1), 0);
3287 if (GET_CODE (y) == UNSPEC
3288 && XINT (y, 1) == UNSPEC_GOT)
3289 return XVECEXP (y, 0, 0);
3293 if (GET_CODE (x) == CONST)
3296 if (GET_CODE (y) == UNSPEC
3297 && XINT (y, 1) == UNSPEC_GOTENT)
3298 return XVECEXP (y, 0, 0);
3305 /* Locate some local-dynamic symbol still in use by this function
3306 so that we can print its name in local-dynamic base patterns. */
3309 get_some_local_dynamic_name ()
3313 if (cfun->machine->some_ld_name)
3314 return cfun->machine->some_ld_name;
3316 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3318 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3319 return cfun->machine->some_ld_name;
3325 get_some_local_dynamic_name_1 (px, data)
3327 void *data ATTRIBUTE_UNUSED;
3331 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3333 x = get_pool_constant (x);
3334 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3337 if (GET_CODE (x) == SYMBOL_REF
3338 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3340 cfun->machine->some_ld_name = XSTR (x, 0);
3347 /* Output symbolic constant X in assembler syntax to
3348 stdio stream FILE. */
3351 s390_output_symbolic_const (file, x)
3355 switch (GET_CODE (x))
3360 s390_output_symbolic_const (file, XEXP (x, 0));
3364 s390_output_symbolic_const (file, XEXP (x, 0));
3365 fprintf (file, "+");
3366 s390_output_symbolic_const (file, XEXP (x, 1));
3370 s390_output_symbolic_const (file, XEXP (x, 0));
3371 fprintf (file, "-");
3372 s390_output_symbolic_const (file, XEXP (x, 1));
3379 output_addr_const (file, x);
3383 if (XVECLEN (x, 0) != 1)
3384 output_operand_lossage ("invalid UNSPEC as operand (1)");
3385 switch (XINT (x, 1))
3387 case UNSPEC_LTREL_OFFSET:
3388 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3389 fprintf (file, "-");
3390 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3393 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3394 fprintf (file, "@GOTENT");
3397 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3398 fprintf (file, "@GOT");
3401 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3402 fprintf (file, "@GOTOFF");
3405 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3406 fprintf (file, "@PLT");
3409 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3410 fprintf (file, "@PLTOFF");
3413 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3414 fprintf (file, "@TLSGD");
3417 assemble_name (file, get_some_local_dynamic_name ());
3418 fprintf (file, "@TLSLDM");
3421 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3422 fprintf (file, "@DTPOFF");
3425 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3426 fprintf (file, "@NTPOFF");
3428 case UNSPEC_GOTNTPOFF:
3429 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3430 fprintf (file, "@GOTNTPOFF");
3432 case UNSPEC_INDNTPOFF:
3433 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3434 fprintf (file, "@INDNTPOFF");
3437 output_operand_lossage ("invalid UNSPEC as operand (2)");
3443 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3448 /* Output address operand ADDR in assembler syntax to
3449 stdio stream FILE. */
3452 print_operand_address (file, addr)
3456 struct s390_address ad;
3458 if (!s390_decompose_address (addr, &ad)
3459 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3460 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3461 output_operand_lossage ("Cannot decompose address.");
3464 s390_output_symbolic_const (file, ad.disp);
3466 fprintf (file, "0");
3468 if (ad.base && ad.indx)
3469 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3470 reg_names[REGNO (ad.base)]);
3472 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3475 /* Output operand X in assembler syntax to stdio stream FILE.
3476 CODE specified the format flag. The following format flags
3479 'C': print opcode suffix for branch condition.
3480 'D': print opcode suffix for inverse branch condition.
3481 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3482 'O': print only the displacement of a memory reference.
3483 'R': print only the base register of a memory reference.
3484 'N': print the second word of a DImode operand.
3485 'M': print the second word of a TImode operand.
3487 'b': print integer X as if it's an unsigned byte.
3488 'x': print integer X as if it's an unsigned word.
3489 'h': print integer X as if it's a signed word. */
3492 print_operand (file, x, code)
3500 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3504 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3508 if (GET_CODE (x) == SYMBOL_REF)
3510 fprintf (file, "%s", ":tls_load:");
3511 output_addr_const (file, x);
3513 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3515 fprintf (file, "%s", ":tls_gdcall:");
3516 output_addr_const (file, XVECEXP (x, 0, 0));
3518 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3520 fprintf (file, "%s", ":tls_ldcall:");
3521 assemble_name (file, get_some_local_dynamic_name ());
3529 struct s390_address ad;
3531 if (GET_CODE (x) != MEM
3532 || !s390_decompose_address (XEXP (x, 0), &ad)
3533 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3538 s390_output_symbolic_const (file, ad.disp);
3540 fprintf (file, "0");
3546 struct s390_address ad;
3548 if (GET_CODE (x) != MEM
3549 || !s390_decompose_address (XEXP (x, 0), &ad)
3550 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3555 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3557 fprintf (file, "0");
3562 if (GET_CODE (x) == REG)
3563 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3564 else if (GET_CODE (x) == MEM)
3565 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3571 if (GET_CODE (x) == REG)
3572 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3573 else if (GET_CODE (x) == MEM)
3574 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3580 switch (GET_CODE (x))
3583 fprintf (file, "%s", reg_names[REGNO (x)]);
3587 output_address (XEXP (x, 0));
3594 s390_output_symbolic_const (file, x);
3599 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3600 else if (code == 'x')
3601 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3602 else if (code == 'h')
3603 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3605 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3609 if (GET_MODE (x) != VOIDmode)
3612 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3613 else if (code == 'x')
3614 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3615 else if (code == 'h')
3616 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3622 fatal_insn ("UNKNOWN in print_operand !?", x);
3627 /* Target hook for assembling integer objects. We need to define it
3628 here to work a round a bug in some versions of GAS, which couldn't
3629 handle values smaller than INT_MIN when printed in decimal. */
3632 s390_assemble_integer (x, size, aligned_p)
3637 if (size == 8 && aligned_p
3638 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3640 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3644 return default_assemble_integer (x, size, aligned_p);
3647 /* Returns true if register REGNO is used for forming
3648 a memory address in expression X. */
3651 reg_used_in_mem_p (regno, x)
3655 enum rtx_code code = GET_CODE (x);
3661 if (refers_to_regno_p (regno, regno+1,
3665 else if (code == SET
3666 && GET_CODE (SET_DEST (x)) == PC)
3668 if (refers_to_regno_p (regno, regno+1,
3673 fmt = GET_RTX_FORMAT (code);
3674 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3677 && reg_used_in_mem_p (regno, XEXP (x, i)))
3680 else if (fmt[i] == 'E')
3681 for (j = 0; j < XVECLEN (x, i); j++)
3682 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3688 /* Returns true if expression DEP_RTX sets an address register
3689 used by instruction INSN to address memory. */
3692 addr_generation_dependency_p (dep_rtx, insn)
3698 if (GET_CODE (dep_rtx) == INSN)
3699 dep_rtx = PATTERN (dep_rtx);
3701 if (GET_CODE (dep_rtx) == SET)
3703 target = SET_DEST (dep_rtx);
3704 if (GET_CODE (target) == STRICT_LOW_PART)
3705 target = XEXP (target, 0);
3706 while (GET_CODE (target) == SUBREG)
3707 target = SUBREG_REG (target);
3709 if (GET_CODE (target) == REG)
3711 int regno = REGNO (target);
3713 if (s390_safe_attr_type (insn) == TYPE_LA)
3715 pat = PATTERN (insn);
3716 if (GET_CODE (pat) == PARALLEL)
3718 if (XVECLEN (pat, 0) != 2)
3720 pat = XVECEXP (pat, 0, 0);
3722 if (GET_CODE (pat) == SET)
3723 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3727 else if (get_attr_atype (insn) == ATYPE_AGEN)
3728 return reg_used_in_mem_p (regno, PATTERN (insn));
3734 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3737 s390_agen_dep_p(dep_insn, insn)
3741 rtx dep_rtx = PATTERN (dep_insn);
3744 if (GET_CODE (dep_rtx) == SET
3745 && addr_generation_dependency_p (dep_rtx, insn))
3747 else if (GET_CODE (dep_rtx) == PARALLEL)
3749 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3751 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3758 /* Return the modified cost of the dependency of instruction INSN
3759 on instruction DEP_INSN through the link LINK. COST is the
3760 default cost of that dependency.
3762 Data dependencies are all handled without delay. However, if a
3763 register is modified and subsequently used as base or index
3764 register of a memory reference, at least 4 cycles need to pass
3765 between setting and using the register to avoid pipeline stalls.
3766 An exception is the LA instruction. An address generated by LA can
3767 be used by introducing only a one cycle stall on the pipeline. */
3770 s390_adjust_cost (insn, link, dep_insn, cost)
3779 /* If the dependence is an anti-dependence, there is no cost. For an
3780 output dependence, there is sometimes a cost, but it doesn't seem
3781 worth handling those few cases. */
3783 if (REG_NOTE_KIND (link) != 0)
3786 /* If we can't recognize the insns, we can't really do anything. */
3787 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3790 /* DFA based scheduling checks address dependency in md file. */
3791 if (s390_use_dfa_pipeline_interface ())
3793 /* Operand forward in case of lr, load and la. */
3794 if (s390_tune == PROCESSOR_2084_Z990
3796 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3797 || s390_safe_attr_type (dep_insn) == TYPE_LR
3798 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3803 dep_rtx = PATTERN (dep_insn);
3805 if (GET_CODE (dep_rtx) == SET
3806 && addr_generation_dependency_p (dep_rtx, insn))
3807 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3808 else if (GET_CODE (dep_rtx) == PARALLEL)
3810 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3812 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3813 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3819 /* A C statement (sans semicolon) to update the integer scheduling priority
3820 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3821 reduce the priority to execute INSN later. Do not define this macro if
3822 you do not need to adjust the scheduling priorities of insns.
3824 A STD instruction should be scheduled earlier,
3825 in order to use the bypass. */
3828 s390_adjust_priority (insn, priority)
3829 rtx insn ATTRIBUTE_UNUSED;
3832 if (! INSN_P (insn))
3835 if (s390_tune != PROCESSOR_2084_Z990)
3838 switch (s390_safe_attr_type (insn))
3842 priority = priority << 3;
3845 priority = priority << 1;
3853 /* The number of instructions that can be issued per cycle. */
3858 if (s390_tune == PROCESSOR_2084_Z990)
3863 /* If the following function returns TRUE, we will use the the DFA
3867 s390_use_dfa_pipeline_interface ()
3869 if (s390_tune == PROCESSOR_2064_Z900
3870 || s390_tune == PROCESSOR_2084_Z990)
3877 s390_first_cycle_multipass_dfa_lookahead ()
3879 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3882 /* Called after issuing each insn.
3883 Triggers default sort algorithm to better slot instructions. */
3886 s390_sched_reorder2 (dump, sched_verbose, ready, pn_ready, clock_var)
3887 FILE *dump ATTRIBUTE_UNUSED;
3888 int sched_verbose ATTRIBUTE_UNUSED;
3889 rtx *ready ATTRIBUTE_UNUSED;
3890 int *pn_ready ATTRIBUTE_UNUSED;
3891 int clock_var ATTRIBUTE_UNUSED;
3893 return s390_issue_rate();
3897 /* Split all branches that exceed the maximum distance.
3898 Returns true if this created a new literal pool entry.
3900 Code generated by this routine is allowed to use
3901 TEMP_REG as temporary scratch register. If this is
3902 done, TEMP_USED is set to true. */
3905 s390_split_branches (temp_reg, temp_used)
3909 int new_literal = 0;
3910 rtx insn, pat, tmp, target;
3913 /* We need correct insn addresses. */
3915 shorten_branches (get_insns ());
3917 /* Find all branches that exceed 64KB, and split them. */
3919 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3921 if (GET_CODE (insn) != JUMP_INSN)
3924 pat = PATTERN (insn);
3925 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3926 pat = XVECEXP (pat, 0, 0);
3927 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3930 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3932 label = &SET_SRC (pat);
3934 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3936 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3937 label = &XEXP (SET_SRC (pat), 1);
3938 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3939 label = &XEXP (SET_SRC (pat), 2);
3946 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3953 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3954 INSN_ADDRESSES_NEW (tmp, -1);
3961 tmp = force_const_mem (Pmode, *label);
3962 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3963 INSN_ADDRESSES_NEW (tmp, -1);
3970 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
3971 UNSPEC_LTREL_OFFSET);
3972 target = gen_rtx_CONST (Pmode, target);
3973 target = force_const_mem (Pmode, target);
3974 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
3975 INSN_ADDRESSES_NEW (tmp, -1);
3977 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (target, 0)),
3979 target = gen_rtx_PLUS (Pmode, temp_reg, target);
3982 if (!validate_change (insn, label, target, 0))
3990 /* Find a literal pool symbol referenced in RTX X, and store
3991 it at REF. Will abort if X contains references to more than
3992 one such pool symbol; multiple references to the same symbol
3993 are allowed, however.
3995 The rtx pointed to by REF must be initialized to NULL_RTX
3996 by the caller before calling this routine. */
3999 find_constant_pool_ref (x, ref)
4006 /* Ignore LTREL_BASE references. */
4007 if (GET_CODE (x) == UNSPEC
4008 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4011 if (GET_CODE (x) == SYMBOL_REF
4012 && CONSTANT_POOL_ADDRESS_P (x))
4014 if (*ref == NULL_RTX)
4020 fmt = GET_RTX_FORMAT (GET_CODE (x));
4021 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4025 find_constant_pool_ref (XEXP (x, i), ref);
4027 else if (fmt[i] == 'E')
4029 for (j = 0; j < XVECLEN (x, i); j++)
4030 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4035 /* Replace every reference to the literal pool symbol REF
4036 in X by the address ADDR. Fix up MEMs as required. */
4039 replace_constant_pool_ref (x, ref, addr)
4050 /* Literal pool references can only occur inside a MEM ... */
4051 if (GET_CODE (*x) == MEM)
4053 rtx memref = XEXP (*x, 0);
4057 *x = replace_equiv_address (*x, addr);
4061 if (GET_CODE (memref) == CONST
4062 && GET_CODE (XEXP (memref, 0)) == PLUS
4063 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4064 && XEXP (XEXP (memref, 0), 0) == ref)
4066 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4067 *x = replace_equiv_address (*x, plus_constant (addr, off));
4072 /* ... or a load-address type pattern. */
4073 if (GET_CODE (*x) == SET)
4075 rtx addrref = SET_SRC (*x);
4079 SET_SRC (*x) = addr;
4083 if (GET_CODE (addrref) == CONST
4084 && GET_CODE (XEXP (addrref, 0)) == PLUS
4085 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4086 && XEXP (XEXP (addrref, 0), 0) == ref)
4088 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4089 SET_SRC (*x) = plus_constant (addr, off);
4094 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4095 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4099 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
4101 else if (fmt[i] == 'E')
4103 for (j = 0; j < XVECLEN (*x, i); j++)
4104 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
4109 /* Check whether X contains an UNSPEC_LTREL_BASE.
4110 Return its constant pool symbol if found, NULL_RTX otherwise. */
4119 if (GET_CODE (x) == UNSPEC
4120 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4121 return XVECEXP (x, 0, 0);
4123 fmt = GET_RTX_FORMAT (GET_CODE (x));
4124 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4128 rtx fnd = find_ltrel_base (XEXP (x, i));
4132 else if (fmt[i] == 'E')
4134 for (j = 0; j < XVECLEN (x, i); j++)
4136 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4146 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
4149 replace_ltrel_base (x, base)
4156 if (GET_CODE (*x) == UNSPEC
4157 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4163 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4164 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4168 replace_ltrel_base (&XEXP (*x, i), base);
4170 else if (fmt[i] == 'E')
4172 for (j = 0; j < XVECLEN (*x, i); j++)
4173 replace_ltrel_base (&XVECEXP (*x, i, j), base);
4179 /* We keep a list of constants which we have to add to internal
4180 constant tables in the middle of large functions. */
4182 #define NR_C_MODES 7
4183 enum machine_mode constant_modes[NR_C_MODES] =
4192 rtx (*gen_consttable[NR_C_MODES])(rtx) =
4195 gen_consttable_df, gen_consttable_di,
4196 gen_consttable_sf, gen_consttable_si,
4203 struct constant *next;
4208 struct constant_pool
4210 struct constant_pool *next;
4215 struct constant *constants[NR_C_MODES];
4220 static struct constant_pool * s390_chunkify_start PARAMS ((void));
4221 static void s390_chunkify_finish PARAMS ((struct constant_pool *));
4222 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
4224 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
4225 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
4226 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
4227 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
4228 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4229 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4230 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
4231 static void s390_free_pool PARAMS ((struct constant_pool *));
4233 /* Create new constant pool covering instructions starting at INSN
4234 and chain it to the end of POOL_LIST. */
4236 static struct constant_pool *
4237 s390_start_pool (pool_list, insn)
4238 struct constant_pool **pool_list;
4241 struct constant_pool *pool, **prev;
4244 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4246 for (i = 0; i < NR_C_MODES; i++)
4247 pool->constants[i] = NULL;
4249 pool->label = gen_label_rtx ();
4250 pool->first_insn = insn;
4251 pool->pool_insn = NULL_RTX;
4252 pool->insns = BITMAP_XMALLOC ();
4255 for (prev = pool_list; *prev; prev = &(*prev)->next)
4262 /* End range of instructions covered by POOL at INSN and emit
4263 placeholder insn representing the pool. */
4266 s390_end_pool (pool, insn)
4267 struct constant_pool *pool;
4270 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4273 insn = get_last_insn ();
4275 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4276 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4279 /* Add INSN to the list of insns covered by POOL. */
4282 s390_add_pool_insn (pool, insn)
4283 struct constant_pool *pool;
4286 bitmap_set_bit (pool->insns, INSN_UID (insn));
4289 /* Return pool out of POOL_LIST that covers INSN. */
4291 static struct constant_pool *
4292 s390_find_pool (pool_list, insn)
4293 struct constant_pool *pool_list;
4296 struct constant_pool *pool;
4298 for (pool = pool_list; pool; pool = pool->next)
4299 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4305 /* Add constant VAL of mode MODE to the constant pool POOL. */
4308 s390_add_constant (pool, val, mode)
4309 struct constant_pool *pool;
4311 enum machine_mode mode;
4316 for (i = 0; i < NR_C_MODES; i++)
4317 if (constant_modes[i] == mode)
4319 if (i == NR_C_MODES)
4322 for (c = pool->constants[i]; c != NULL; c = c->next)
4323 if (rtx_equal_p (val, c->value))
4328 c = (struct constant *) xmalloc (sizeof *c);
4330 c->label = gen_label_rtx ();
4331 c->next = pool->constants[i];
4332 pool->constants[i] = c;
4333 pool->size += GET_MODE_SIZE (mode);
4337 /* Find constant VAL of mode MODE in the constant pool POOL.
4338 Return an RTX describing the distance from the start of
4339 the pool to the location of the new constant. */
4342 s390_find_constant (pool, val, mode)
4343 struct constant_pool *pool;
4345 enum machine_mode mode;
4351 for (i = 0; i < NR_C_MODES; i++)
4352 if (constant_modes[i] == mode)
4354 if (i == NR_C_MODES)
4357 for (c = pool->constants[i]; c != NULL; c = c->next)
4358 if (rtx_equal_p (val, c->value))
4364 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4365 gen_rtx_LABEL_REF (Pmode, pool->label));
4366 offset = gen_rtx_CONST (Pmode, offset);
4370 /* Dump out the constants in POOL. */
4373 s390_dump_pool (pool)
4374 struct constant_pool *pool;
4380 /* Pool start insn switches to proper section
4381 and guarantees necessary alignment. */
4383 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4385 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4386 INSN_ADDRESSES_NEW (insn, -1);
4388 insn = emit_label_after (pool->label, insn);
4389 INSN_ADDRESSES_NEW (insn, -1);
4391 /* Dump constants in descending alignment requirement order,
4392 ensuring proper alignment for every constant. */
4393 for (i = 0; i < NR_C_MODES; i++)
4394 for (c = pool->constants[i]; c; c = c->next)
4396 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4397 rtx value = c->value;
4398 if (GET_CODE (value) == CONST
4399 && GET_CODE (XEXP (value, 0)) == UNSPEC
4400 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
4401 && XVECLEN (XEXP (value, 0), 0) == 1)
4403 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4404 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4405 value = gen_rtx_CONST (VOIDmode, value);
4408 insn = emit_label_after (c->label, insn);
4409 INSN_ADDRESSES_NEW (insn, -1);
4410 insn = emit_insn_after (gen_consttable[i] (value), insn);
4411 INSN_ADDRESSES_NEW (insn, -1);
4414 /* Pool end insn switches back to previous section
4415 and guarantees necessary alignment. */
4417 insn = emit_insn_after (gen_pool_end_64 (), insn);
4419 insn = emit_insn_after (gen_pool_end_31 (), insn);
4420 INSN_ADDRESSES_NEW (insn, -1);
4422 insn = emit_barrier_after (insn);
4423 INSN_ADDRESSES_NEW (insn, -1);
4425 /* Remove placeholder insn. */
4426 remove_insn (pool->pool_insn);
4431 /* Free all memory used by POOL. */
4434 s390_free_pool (pool)
4435 struct constant_pool *pool;
4439 for (i = 0; i < NR_C_MODES; i++)
4441 struct constant *c = pool->constants[i];
4444 struct constant *next = c->next;
4450 BITMAP_XFREE (pool->insns);
4455 /* Chunkify the literal pool if required. */
4457 #define S390_POOL_CHUNK_MIN 0xc00
4458 #define S390_POOL_CHUNK_MAX 0xe00
4460 static struct constant_pool *
4461 s390_chunkify_start (void)
4463 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4465 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4468 rtx pending_ltrel = NULL_RTX;
4471 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
4472 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4475 /* Do we need to chunkify the literal pool? */
4477 if (get_pool_size () < S390_POOL_CHUNK_MAX)
4480 /* We need correct insn addresses. */
4482 shorten_branches (get_insns ());
4484 /* Scan all insns and move literals to pool chunks. */
4486 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4488 /* Check for pending LTREL_BASE. */
4491 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
4494 if (ltrel_base == pending_ltrel)
4495 pending_ltrel = NULL_RTX;
4501 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4503 rtx pool_ref = NULL_RTX;
4504 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4507 rtx constant = get_pool_constant (pool_ref);
4508 enum machine_mode mode = get_pool_mode (pool_ref);
4511 curr_pool = s390_start_pool (&pool_list, insn);
4513 s390_add_constant (curr_pool, constant, mode);
4514 s390_add_pool_insn (curr_pool, insn);
4516 /* Don't split the pool chunk between a LTREL_OFFSET load
4517 and the corresponding LTREL_BASE. */
4518 if (GET_CODE (constant) == CONST
4519 && GET_CODE (XEXP (constant, 0)) == UNSPEC
4520 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
4524 pending_ltrel = pool_ref;
4529 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4532 s390_add_pool_insn (curr_pool, insn);
4533 /* An LTREL_BASE must follow within the same basic block. */
4539 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4540 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4545 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4548 s390_end_pool (curr_pool, NULL_RTX);
4553 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4554 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4557 /* We will later have to insert base register reload insns.
4558 Those will have an effect on code size, which we need to
4559 consider here. This calculation makes rather pessimistic
4560 worst-case assumptions. */
4561 if (GET_CODE (insn) == CODE_LABEL)
4564 if (chunk_size < S390_POOL_CHUNK_MIN
4565 && curr_pool->size < S390_POOL_CHUNK_MIN)
4568 /* Pool chunks can only be inserted after BARRIERs ... */
4569 if (GET_CODE (insn) == BARRIER)
4571 s390_end_pool (curr_pool, insn);
4576 /* ... so if we don't find one in time, create one. */
4577 else if ((chunk_size > S390_POOL_CHUNK_MAX
4578 || curr_pool->size > S390_POOL_CHUNK_MAX))
4580 rtx label, jump, barrier;
4582 /* We can insert the barrier only after a 'real' insn. */
4583 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4585 if (get_attr_length (insn) == 0)
4588 /* Don't separate LTREL_BASE from the corresponding
4589 LTREL_OFFSET load. */
4593 label = gen_label_rtx ();
4594 jump = emit_jump_insn_after (gen_jump (label), insn);
4595 barrier = emit_barrier_after (jump);
4596 insn = emit_label_after (label, barrier);
4597 JUMP_LABEL (jump) = label;
4598 LABEL_NUSES (label) = 1;
4600 INSN_ADDRESSES_NEW (jump, -1);
4601 INSN_ADDRESSES_NEW (barrier, -1);
4602 INSN_ADDRESSES_NEW (insn, -1);
4604 s390_end_pool (curr_pool, barrier);
4612 s390_end_pool (curr_pool, NULL_RTX);
4617 /* Find all labels that are branched into
4618 from an insn belonging to a different chunk. */
4620 far_labels = BITMAP_XMALLOC ();
4622 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4624 /* Labels marked with LABEL_PRESERVE_P can be target
4625 of non-local jumps, so we have to mark them.
4626 The same holds for named labels.
4628 Don't do that, however, if it is the label before
4631 if (GET_CODE (insn) == CODE_LABEL
4632 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4634 rtx vec_insn = next_real_insn (insn);
4635 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4636 PATTERN (vec_insn) : NULL_RTX;
4638 || !(GET_CODE (vec_pat) == ADDR_VEC
4639 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4640 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4643 /* If we have a direct jump (conditional or unconditional)
4644 or a casesi jump, check all potential targets. */
4645 else if (GET_CODE (insn) == JUMP_INSN)
4647 rtx pat = PATTERN (insn);
4648 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4649 pat = XVECEXP (pat, 0, 0);
4651 if (GET_CODE (pat) == SET)
4653 rtx label = JUMP_LABEL (insn);
4656 if (s390_find_pool (pool_list, label)
4657 != s390_find_pool (pool_list, insn))
4658 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4661 else if (GET_CODE (pat) == PARALLEL
4662 && XVECLEN (pat, 0) == 2
4663 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4664 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4665 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4667 /* Find the jump table used by this casesi jump. */
4668 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4669 rtx vec_insn = next_real_insn (vec_label);
4670 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4671 PATTERN (vec_insn) : NULL_RTX;
4673 && (GET_CODE (vec_pat) == ADDR_VEC
4674 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4676 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4678 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4680 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4682 if (s390_find_pool (pool_list, label)
4683 != s390_find_pool (pool_list, insn))
4684 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4691 /* Insert base register reload insns before every pool. */
4693 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4695 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4696 rtx insn = curr_pool->first_insn;
4697 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4700 /* Insert base register reload insns at every far label. */
4702 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4703 if (GET_CODE (insn) == CODE_LABEL
4704 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4706 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4709 rtx new_insn = gen_reload_base (base_reg, pool->label);
4710 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4715 BITMAP_XFREE (far_labels);
4718 /* Recompute insn addresses. */
4720 init_insn_lengths ();
4721 shorten_branches (get_insns ());
4726 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4727 After we have decided to use this list, finish implementing
4728 all changes to the current function as required. */
4731 s390_chunkify_finish (pool_list)
4732 struct constant_pool *pool_list;
4734 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4735 struct constant_pool *curr_pool = NULL;
4739 /* Replace all literal pool references. */
4741 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4744 replace_ltrel_base (&PATTERN (insn), base_reg);
4746 curr_pool = s390_find_pool (pool_list, insn);
4750 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4752 rtx addr, pool_ref = NULL_RTX;
4753 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4756 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4757 get_pool_mode (pool_ref));
4758 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4759 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4760 INSN_CODE (insn) = -1;
4765 /* Dump out all literal pools. */
4767 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4768 s390_dump_pool (curr_pool);
4770 /* Free pool list. */
4774 struct constant_pool *next = pool_list->next;
4775 s390_free_pool (pool_list);
4780 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4781 We have decided we cannot use this list, so revert all changes
4782 to the current function that were done by s390_chunkify_start. */
4785 s390_chunkify_cancel (pool_list)
4786 struct constant_pool *pool_list;
4788 struct constant_pool *curr_pool = NULL;
4791 /* Remove all pool placeholder insns. */
4793 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4795 /* Did we insert an extra barrier? Remove it. */
4796 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4797 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4798 rtx label = NEXT_INSN (curr_pool->pool_insn);
4800 if (jump && GET_CODE (jump) == JUMP_INSN
4801 && barrier && GET_CODE (barrier) == BARRIER
4802 && label && GET_CODE (label) == CODE_LABEL
4803 && GET_CODE (PATTERN (jump)) == SET
4804 && SET_DEST (PATTERN (jump)) == pc_rtx
4805 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4806 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4809 remove_insn (barrier);
4810 remove_insn (label);
4813 remove_insn (curr_pool->pool_insn);
4816 /* Remove all base register reload insns. */
4818 for (insn = get_insns (); insn; )
4820 rtx next_insn = NEXT_INSN (insn);
4822 if (GET_CODE (insn) == INSN
4823 && GET_CODE (PATTERN (insn)) == SET
4824 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4825 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
4831 /* Free pool list. */
4835 struct constant_pool *next = pool_list->next;
4836 s390_free_pool (pool_list);
4842 /* Index of constant pool chunk that is currently being processed.
4843 Set to -1 before function output has started. */
4844 int s390_pool_count = -1;
4846 /* Number of elements of current constant pool. */
4847 int s390_nr_constants;
4849 /* Output main constant pool to stdio stream FILE. */
4852 s390_output_constant_pool (start_label, end_label)
4858 readonly_data_section ();
4859 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4860 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4861 CODE_LABEL_NUMBER (start_label));
4865 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4866 CODE_LABEL_NUMBER (start_label));
4867 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4870 s390_pool_count = 0;
4871 output_constant_pool (current_function_name, current_function_decl);
4872 s390_pool_count = -1;
4874 function_section (current_function_decl);
4877 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4878 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4879 CODE_LABEL_NUMBER (end_label));
4883 /* Rework the prolog/epilog to avoid saving/restoring
4884 registers unnecessarily. If TEMP_REGNO is nonnegative,
4885 it specifies the number of a caller-saved register used
4886 as temporary scratch register by code emitted during
4887 machine dependent reorg. */
4890 s390_optimize_prolog (temp_regno)
4893 int save_first, save_last, restore_first, restore_last;
4895 rtx insn, new_insn, next_insn;
4897 /* Recompute regs_ever_live data for special registers. */
4898 regs_ever_live[BASE_REGISTER] = 0;
4899 regs_ever_live[RETURN_REGNUM] = 0;
4900 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4902 /* If there is (possibly) any pool entry, we need to
4903 load the base register.
4904 ??? FIXME: this should be more precise. */
4905 if (get_pool_size ())
4906 regs_ever_live[BASE_REGISTER] = 1;
4908 /* In non-leaf functions, the prolog/epilog code relies
4909 on RETURN_REGNUM being saved in any case. */
4910 if (!current_function_is_leaf)
4911 regs_ever_live[RETURN_REGNUM] = 1;
4913 /* We need to save/restore the temporary register. */
4914 if (temp_regno >= 0)
4915 regs_ever_live[temp_regno] = 1;
4918 /* Find first and last gpr to be saved. */
4920 for (i = 6; i < 16; i++)
4921 if (regs_ever_live[i])
4923 || i == STACK_POINTER_REGNUM
4924 || i == RETURN_REGNUM
4925 || i == BASE_REGISTER
4926 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4929 for (j = 15; j > i; j--)
4930 if (regs_ever_live[j])
4932 || j == STACK_POINTER_REGNUM
4933 || j == RETURN_REGNUM
4934 || j == BASE_REGISTER
4935 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4940 /* Nothing to save/restore. */
4941 save_first = restore_first = -1;
4942 save_last = restore_last = -1;
4946 /* Save/restore from i to j. */
4947 save_first = restore_first = i;
4948 save_last = restore_last = j;
4951 /* Varargs functions need to save gprs 2 to 6. */
4952 if (current_function_stdarg)
4960 /* If all special registers are in fact used, there's nothing we
4961 can do, so no point in walking the insn list. */
4962 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4963 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4967 /* Search for prolog/epilog insns and replace them. */
4969 for (insn = get_insns (); insn; insn = next_insn)
4971 int first, last, off;
4972 rtx set, base, offset;
4974 next_insn = NEXT_INSN (insn);
4976 if (GET_CODE (insn) != INSN)
4978 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4981 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4983 set = XVECEXP (PATTERN (insn), 0, 0);
4984 first = REGNO (SET_SRC (set));
4985 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4986 offset = const0_rtx;
4987 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4988 off = INTVAL (offset) - first * UNITS_PER_WORD;
4990 if (GET_CODE (base) != REG || off < 0)
4992 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4994 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4997 if (save_first != -1)
4999 new_insn = save_gprs (base, off, save_first, save_last);
5000 new_insn = emit_insn_before (new_insn, insn);
5001 INSN_ADDRESSES_NEW (new_insn, -1);
5007 if (load_multiple_operation (PATTERN (insn), VOIDmode))
5009 set = XVECEXP (PATTERN (insn), 0, 0);
5010 first = REGNO (SET_DEST (set));
5011 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5012 offset = const0_rtx;
5013 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5014 off = INTVAL (offset) - first * UNITS_PER_WORD;
5016 if (GET_CODE (base) != REG || off < 0)
5018 if (first > BASE_REGISTER && first > RETURN_REGNUM)
5020 if (last < BASE_REGISTER && last < RETURN_REGNUM)
5023 if (restore_first != -1)
5025 new_insn = restore_gprs (base, off, restore_first, restore_last);
5026 new_insn = emit_insn_before (new_insn, insn);
5027 INSN_ADDRESSES_NEW (new_insn, -1);
5035 /* Check whether any insn in the function makes use of the original
5036 value of RETURN_REG (e.g. for __builtin_return_address).
5037 If so, insert an insn reloading that value.
5039 Return true if any such insn was found. */
5042 s390_fixup_clobbered_return_reg (return_reg)
5045 bool replacement_done = 0;
5048 /* If we never called __builtin_return_address, register 14
5049 might have been used as temp during the prolog; we do
5050 not want to touch those uses. */
5051 if (!has_hard_reg_initial_val (Pmode, REGNO (return_reg)))
5054 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5056 rtx reg, off, new_insn;
5058 if (GET_CODE (insn) != INSN)
5060 if (!reg_referenced_p (return_reg, PATTERN (insn)))
5062 if (GET_CODE (PATTERN (insn)) == PARALLEL
5063 && store_multiple_operation (PATTERN (insn), VOIDmode))
5066 if (frame_pointer_needed)
5067 reg = hard_frame_pointer_rtx;
5069 reg = stack_pointer_rtx;
5071 off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
5072 if (!DISP_IN_RANGE (INTVAL (off)))
5074 off = force_const_mem (Pmode, off);
5075 new_insn = gen_rtx_SET (Pmode, return_reg, off);
5076 new_insn = emit_insn_before (new_insn, insn);
5077 INSN_ADDRESSES_NEW (new_insn, -1);
5081 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
5082 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
5083 new_insn = emit_insn_before (new_insn, insn);
5084 INSN_ADDRESSES_NEW (new_insn, -1);
5086 replacement_done = 1;
5089 return replacement_done;
5092 /* Perform machine-dependent processing. */
5097 bool fixed_up_clobbered_return_reg = 0;
5098 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5101 /* Make sure all splits have been performed; splits after
5102 machine_dependent_reorg might confuse insn length counts. */
5103 split_all_insns_noflow ();
5106 /* There are two problematic situations we need to correct:
5108 - the literal pool might be > 4096 bytes in size, so that
5109 some of its elements cannot be directly accessed
5111 - a branch target might be > 64K away from the branch, so that
5112 it is not possible to use a PC-relative instruction.
5114 To fix those, we split the single literal pool into multiple
5115 pool chunks, reloading the pool base register at various
5116 points throughout the function to ensure it always points to
5117 the pool chunk the following code expects, and / or replace
5118 PC-relative branches by absolute branches.
5120 However, the two problems are interdependent: splitting the
5121 literal pool can move a branch further away from its target,
5122 causing the 64K limit to overflow, and on the other hand,
5123 replacing a PC-relative branch by an absolute branch means
5124 we need to put the branch target address into the literal
5125 pool, possibly causing it to overflow.
5127 So, we loop trying to fix up both problems until we manage
5128 to satisfy both conditions at the same time. Note that the
5129 loop is guaranteed to terminate as every pass of the loop
5130 strictly decreases the total number of PC-relative branches
5131 in the function. (This is not completely true as there
5132 might be branch-over-pool insns introduced by chunkify_start.
5133 Those never need to be split however.) */
5137 struct constant_pool *pool_list;
5139 /* Try to chunkify the literal pool. */
5140 pool_list = s390_chunkify_start ();
5142 /* Split out-of-range branches. If this has created new
5143 literal pool entries, cancel current chunk list and
5145 if (s390_split_branches (temp_reg, &temp_used))
5148 s390_chunkify_cancel (pool_list);
5153 /* Check whether we have clobbered a use of the return
5154 register (e.g. for __builtin_return_address). If so,
5155 add insns reloading the register where necessary. */
5156 if (temp_used && !fixed_up_clobbered_return_reg
5157 && s390_fixup_clobbered_return_reg (temp_reg))
5159 fixed_up_clobbered_return_reg = 1;
5161 /* The fixup insns might have caused a jump to overflow. */
5163 s390_chunkify_cancel (pool_list);
5168 /* If we made it up to here, both conditions are satisfied.
5169 Finish up pool chunkification if required. */
5171 s390_chunkify_finish (pool_list);
5176 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
5180 /* Return an RTL expression representing the value of the return address
5181 for the frame COUNT steps up from the current frame. FRAME is the
5182 frame pointer of that frame. */
5185 s390_return_addr_rtx (count, frame)
5191 /* For the current frame, we use the initial value of RETURN_REGNUM.
5192 This works both in leaf and non-leaf functions. */
5195 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
5197 /* For frames farther back, we read the stack slot where the
5198 corresponding RETURN_REGNUM value was saved. */
5200 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5201 addr = memory_address (Pmode, addr);
5202 return gen_rtx_MEM (Pmode, addr);
5205 /* Find first call clobbered register unsused in a function.
5206 This could be used as base register in a leaf function
5207 or for holding the return address before epilogue. */
5210 find_unused_clobbered_reg ()
5213 for (i = 0; i < 6; i++)
5214 if (!regs_ever_live[i])
5219 /* Fill FRAME with info about frame of current function. */
5224 char gprs_ever_live[16];
5226 HOST_WIDE_INT fsize = get_frame_size ();
5228 if (fsize > 0x7fff0000)
5229 fatal_error ("Total size of local variables exceeds architecture limit.");
5231 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5232 cfun->machine->save_fprs_p = 0;
5234 for (i = 24; i < 32; i++)
5235 if (regs_ever_live[i] && !global_regs[i])
5237 cfun->machine->save_fprs_p = 1;
5241 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5243 /* Does function need to setup frame and save area. */
5245 if (! current_function_is_leaf
5246 || cfun->machine->frame_size > 0
5247 || current_function_calls_alloca
5248 || current_function_stdarg)
5249 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5251 /* Find first and last gpr to be saved. Note that at this point,
5252 we assume the return register and the base register always
5253 need to be saved. This is done because the usage of these
5254 register might change even after the prolog was emitted.
5255 If it turns out later that we really don't need them, the
5256 prolog/epilog code is modified again. */
5258 for (i = 0; i < 16; i++)
5259 gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5262 gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5263 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5264 gprs_ever_live[BASE_REGISTER] = 1;
5265 gprs_ever_live[RETURN_REGNUM] = 1;
5266 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5268 for (i = 6; i < 16; i++)
5269 if (gprs_ever_live[i])
5272 for (j = 15; j > i; j--)
5273 if (gprs_ever_live[j])
5277 /* Save / Restore from gpr i to j. */
5278 cfun->machine->first_save_gpr = i;
5279 cfun->machine->first_restore_gpr = i;
5280 cfun->machine->last_save_gpr = j;
5282 /* Varargs functions need to save gprs 2 to 6. */
5283 if (current_function_stdarg)
5284 cfun->machine->first_save_gpr = 2;
5287 /* Return offset between argument pointer and frame pointer
5288 initially after prologue. */
5291 s390_arg_frame_offset ()
5293 HOST_WIDE_INT fsize = get_frame_size ();
5296 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5299 for (i = 24; i < 32; i++)
5300 if (regs_ever_live[i] && !global_regs[i])
5306 fsize = fsize + save_fprs_p * 64;
5308 /* Does function need to setup frame and save area. */
5310 if (! current_function_is_leaf
5312 || current_function_calls_alloca
5313 || current_function_stdarg)
5314 fsize += STARTING_FRAME_OFFSET;
5315 return fsize + STACK_POINTER_OFFSET;
5318 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5319 to register BASE. Return generated insn. */
5322 save_fpr (base, offset, regnum)
5328 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5329 set_mem_alias_set (addr, s390_sr_alias_set);
5331 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5334 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5335 to register BASE. Return generated insn. */
5338 restore_fpr (base, offset, regnum)
5344 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5345 set_mem_alias_set (addr, s390_sr_alias_set);
5347 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5350 /* Generate insn to save registers FIRST to LAST into
5351 the register save area located at offset OFFSET
5352 relative to register BASE. */
5355 save_gprs (base, offset, first, last)
5361 rtx addr, insn, note;
5364 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5365 addr = gen_rtx_MEM (Pmode, addr);
5366 set_mem_alias_set (addr, s390_sr_alias_set);
5368 /* Special-case single register. */
5372 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5374 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5376 RTX_FRAME_RELATED_P (insn) = 1;
5381 insn = gen_store_multiple (addr,
5382 gen_rtx_REG (Pmode, first),
5383 GEN_INT (last - first + 1));
5386 /* We need to set the FRAME_RELATED flag on all SETs
5387 inside the store-multiple pattern.
5389 However, we must not emit DWARF records for registers 2..5
5390 if they are stored for use by variable arguments ...
5392 ??? Unfortunately, it is not enough to simply not the the
5393 FRAME_RELATED flags for those SETs, because the first SET
5394 of the PARALLEL is always treated as if it had the flag
5395 set, even if it does not. Therefore we emit a new pattern
5396 without those registers as REG_FRAME_RELATED_EXPR note. */
5400 rtx pat = PATTERN (insn);
5402 for (i = 0; i < XVECLEN (pat, 0); i++)
5403 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5404 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5406 RTX_FRAME_RELATED_P (insn) = 1;
5410 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5411 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5412 gen_rtx_REG (Pmode, 6),
5413 GEN_INT (last - 6 + 1));
5414 note = PATTERN (note);
5417 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5418 note, REG_NOTES (insn));
5420 for (i = 0; i < XVECLEN (note, 0); i++)
5421 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5422 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5424 RTX_FRAME_RELATED_P (insn) = 1;
5430 /* Generate insn to restore registers FIRST to LAST from
5431 the register save area located at offset OFFSET
5432 relative to register BASE. */
5435 restore_gprs (base, offset, first, last)
5443 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5444 addr = gen_rtx_MEM (Pmode, addr);
5445 set_mem_alias_set (addr, s390_sr_alias_set);
5447 /* Special-case single register. */
5451 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5453 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5458 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5460 GEN_INT (last - first + 1));
5464 /* Emit code to load the GOT register. If MAYBE_DEAD is true,
5465 annotate generated insns with REG_MAYBE_DEAD notes. */
5467 static GTY(()) rtx got_symbol;
5469 s390_load_got (maybe_dead)
5474 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5475 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5480 rtx insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5482 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5489 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
5490 UNSPEC_LTREL_OFFSET);
5491 offset = gen_rtx_CONST (Pmode, offset);
5492 offset = force_const_mem (Pmode, offset);
5494 insn = emit_move_insn (pic_offset_table_rtx, offset);
5496 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5499 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
5501 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
5503 insn = emit_move_insn (pic_offset_table_rtx, offset);
5505 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5510 /* Expand the prologue into a bunch of separate insns. */
5513 s390_emit_prologue ()
5517 rtx pool_start_label, pool_end_label;
5520 /* Compute frame_info. */
5524 /* Choose best register to use for temp use within prologue.
5525 See below for why TPF must use the register 1. */
5527 if (!current_function_is_leaf
5528 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
5529 && get_pool_size () < S390_POOL_CHUNK_MAX / 2
5531 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5533 temp_reg = gen_rtx_REG (Pmode, 1);
5535 /* Save call saved gprs. */
5537 insn = save_gprs (stack_pointer_rtx, 0,
5538 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5541 /* Dump constant pool and set constant pool register. */
5543 pool_start_label = gen_label_rtx();
5544 pool_end_label = gen_label_rtx();
5545 cfun->machine->literal_pool_label = pool_start_label;
5548 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5549 pool_start_label, pool_end_label));
5551 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5552 pool_start_label, pool_end_label));
5554 /* Save fprs for variable args. */
5556 if (current_function_stdarg)
5558 /* Save fpr 0 and 2. */
5560 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5561 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5565 /* Save fpr 4 and 6. */
5567 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5568 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5572 /* Save fprs 4 and 6 if used (31 bit ABI). */
5576 /* Save fpr 4 and 6. */
5577 if (regs_ever_live[18] && !global_regs[18])
5579 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5580 RTX_FRAME_RELATED_P (insn) = 1;
5582 if (regs_ever_live[19] && !global_regs[19])
5584 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5585 RTX_FRAME_RELATED_P (insn) = 1;
5589 /* Decrement stack pointer. */
5591 if (cfun->machine->frame_size > 0)
5593 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5595 /* Save incoming stack pointer into temp reg. */
5597 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5599 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5602 /* Subtract frame size from stack pointer. */
5604 if (DISP_IN_RANGE (INTVAL (frame_off)))
5606 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5607 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5609 insn = emit_insn (insn);
5613 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5614 frame_off = force_const_mem (Pmode, frame_off);
5616 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5619 RTX_FRAME_RELATED_P (insn) = 1;
5621 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5622 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5623 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5624 GEN_INT (-cfun->machine->frame_size))),
5627 /* Set backchain. */
5629 if (TARGET_BACKCHAIN)
5631 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5632 set_mem_alias_set (addr, s390_sr_alias_set);
5633 insn = emit_insn (gen_move_insn (addr, temp_reg));
5636 /* If we support asynchronous exceptions (e.g. for Java),
5637 we need to make sure the backchain pointer is set up
5638 before any possibly trapping memory access. */
5640 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5642 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5643 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5647 /* Save fprs 8 - 15 (64 bit ABI). */
5649 if (cfun->machine->save_fprs_p)
5651 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5653 for (i = 24; i < 32; i++)
5654 if (regs_ever_live[i] && !global_regs[i])
5656 rtx addr = plus_constant (stack_pointer_rtx,
5657 cfun->machine->frame_size - 64 + (i-24)*8);
5659 insn = save_fpr (temp_reg, (i-24)*8, i);
5660 RTX_FRAME_RELATED_P (insn) = 1;
5662 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5663 gen_rtx_SET (VOIDmode,
5664 gen_rtx_MEM (DFmode, addr),
5665 gen_rtx_REG (DFmode, i)),
5670 /* Set frame pointer, if needed. */
5672 if (frame_pointer_needed)
5674 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5675 RTX_FRAME_RELATED_P (insn) = 1;
5678 /* Set up got pointer, if needed. */
5680 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5681 s390_load_got(true);
5685 /* Generate a BAS instruction to serve as a function
5686 entry intercept to facilitate the use of tracing
5687 algorithms located at the branch target.
5689 This must use register 1. */
5694 addr = GEN_INT (0xfe0);
5695 unkn = CONST0_RTX (SImode);
5696 link = gen_rtx_REG (Pmode, 1);
5698 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5700 /* Emit a blockage here so that all code
5701 lies between the profiling mechanisms. */
5702 emit_insn (gen_blockage ());
5706 /* Expand the epilogue into a bunch of separate insns. */
5709 s390_emit_epilogue ()
5711 rtx frame_pointer, return_reg;
5712 int area_bottom, area_top, offset = 0;
5718 /* Generate a BAS instruction to serve as a function
5719 entry intercept to facilitate the use of tracing
5720 algorithms located at the branch target.
5722 This must use register 1. */
5728 addr = GEN_INT (0xfe6);
5729 unkn = CONST0_RTX (SImode);
5730 link = gen_rtx_REG (Pmode, 1);
5732 /* Emit a blockage here so that all code
5733 lies between the profiling mechanisms. */
5734 emit_insn (gen_blockage ());
5736 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5739 /* Check whether to use frame or stack pointer for restore. */
5741 frame_pointer = frame_pointer_needed ?
5742 hard_frame_pointer_rtx : stack_pointer_rtx;
5744 /* Compute which parts of the save area we need to access. */
5746 if (cfun->machine->first_restore_gpr != -1)
5748 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5749 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5753 area_bottom = INT_MAX;
5759 if (cfun->machine->save_fprs_p)
5761 if (area_bottom > -64)
5769 if (regs_ever_live[18] && !global_regs[18])
5771 if (area_bottom > STACK_POINTER_OFFSET - 16)
5772 area_bottom = STACK_POINTER_OFFSET - 16;
5773 if (area_top < STACK_POINTER_OFFSET - 8)
5774 area_top = STACK_POINTER_OFFSET - 8;
5776 if (regs_ever_live[19] && !global_regs[19])
5778 if (area_bottom > STACK_POINTER_OFFSET - 8)
5779 area_bottom = STACK_POINTER_OFFSET - 8;
5780 if (area_top < STACK_POINTER_OFFSET)
5781 area_top = STACK_POINTER_OFFSET;
5785 /* Check whether we can access the register save area.
5786 If not, increment the frame pointer as required. */
5788 if (area_top <= area_bottom)
5790 /* Nothing to restore. */
5792 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
5793 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
5795 /* Area is in range. */
5796 offset = cfun->machine->frame_size;
5800 rtx insn, frame_off;
5802 offset = area_bottom < 0 ? -area_bottom : 0;
5803 frame_off = GEN_INT (cfun->machine->frame_size - offset);
5805 if (DISP_IN_RANGE (INTVAL (frame_off)))
5807 insn = gen_rtx_SET (VOIDmode, frame_pointer,
5808 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
5809 insn = emit_insn (insn);
5813 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5814 frame_off = force_const_mem (Pmode, frame_off);
5816 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5820 /* Restore call saved fprs. */
5826 if (cfun->machine->save_fprs_p)
5827 for (i = 24; i < 32; i++)
5828 if (regs_ever_live[i] && !global_regs[i])
5829 restore_fpr (frame_pointer,
5830 offset - 64 + (i-24) * 8, i);
5834 if (regs_ever_live[18] && !global_regs[18])
5835 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5836 if (regs_ever_live[19] && !global_regs[19])
5837 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5840 /* Return register. */
5842 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5844 /* Restore call saved gprs. */
5846 if (cfun->machine->first_restore_gpr != -1)
5851 /* Check for global register and save them
5852 to stack location from where they get restored. */
5854 for (i = cfun->machine->first_restore_gpr;
5855 i <= cfun->machine->last_save_gpr;
5858 /* These registers are special and need to be
5859 restored in any case. */
5860 if (i == STACK_POINTER_REGNUM
5861 || i == RETURN_REGNUM
5862 || i == BASE_REGISTER
5863 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5868 addr = plus_constant (frame_pointer,
5869 offset + i * UNITS_PER_WORD);
5870 addr = gen_rtx_MEM (Pmode, addr);
5871 set_mem_alias_set (addr, s390_sr_alias_set);
5872 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5876 /* Fetch return address from stack before load multiple,
5877 this will do good for scheduling. */
5879 if (!current_function_is_leaf)
5881 int return_regnum = find_unused_clobbered_reg();
5884 return_reg = gen_rtx_REG (Pmode, return_regnum);
5886 addr = plus_constant (frame_pointer,
5887 offset + RETURN_REGNUM * UNITS_PER_WORD);
5888 addr = gen_rtx_MEM (Pmode, addr);
5889 set_mem_alias_set (addr, s390_sr_alias_set);
5890 emit_move_insn (return_reg, addr);
5893 /* ??? As references to the base register are not made
5894 explicit in insn RTX code, we have to add a barrier here
5895 to prevent incorrect scheduling. */
5897 emit_insn (gen_blockage());
5899 insn = restore_gprs (frame_pointer, offset,
5900 cfun->machine->first_restore_gpr,
5901 cfun->machine->last_save_gpr);
5905 /* Return to caller. */
5907 p = rtvec_alloc (2);
5909 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5910 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5911 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5915 /* Return the size in bytes of a function argument of
5916 type TYPE and/or mode MODE. At least one of TYPE or
5917 MODE must be specified. */
5920 s390_function_arg_size (mode, type)
5921 enum machine_mode mode;
5925 return int_size_in_bytes (type);
5927 /* No type info available for some library calls ... */
5928 if (mode != BLKmode)
5929 return GET_MODE_SIZE (mode);
5931 /* If we have neither type nor mode, abort */
5935 /* Return true if a function argument of type TYPE and mode MODE
5936 is to be passed in a floating-point register, if available. */
5939 s390_function_arg_float (mode, type)
5940 enum machine_mode mode;
5943 /* Soft-float changes the ABI: no floating-point registers are used. */
5944 if (TARGET_SOFT_FLOAT)
5947 /* No type info available for some library calls ... */
5949 return mode == SFmode || mode == DFmode;
5951 /* The ABI says that record types with a single member are treated
5952 just like that member would be. */
5953 while (TREE_CODE (type) == RECORD_TYPE)
5955 tree field, single = NULL_TREE;
5957 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5959 if (TREE_CODE (field) != FIELD_DECL)
5962 if (single == NULL_TREE)
5963 single = TREE_TYPE (field);
5968 if (single == NULL_TREE)
5974 return TREE_CODE (type) == REAL_TYPE;
5977 /* Return 1 if a function argument of type TYPE and mode MODE
5978 is to be passed by reference. The ABI specifies that only
5979 structures of size 1, 2, 4, or 8 bytes are passed by value,
5980 all other structures (and complex numbers) are passed by
5984 s390_function_arg_pass_by_reference (mode, type)
5985 enum machine_mode mode;
5988 int size = s390_function_arg_size (mode, type);
5992 if (AGGREGATE_TYPE_P (type) &&
5993 size != 1 && size != 2 && size != 4 && size != 8
5994 && !s390_function_arg_float (mode, type))
5997 if (TREE_CODE (type) == COMPLEX_TYPE)
6004 /* Update the data in CUM to advance over an argument of mode MODE and
6005 data type TYPE. (TYPE is null for libcalls where that information
6006 may not be available.). The boolean NAMED specifies whether the
6007 argument is a named argument (as opposed to an unnamed argument
6008 matching an ellipsis). */
6011 s390_function_arg_advance (cum, mode, type, named)
6012 CUMULATIVE_ARGS *cum;
6013 enum machine_mode mode;
6015 int named ATTRIBUTE_UNUSED;
6017 if (s390_function_arg_pass_by_reference (mode, type))
6021 else if (s390_function_arg_float (mode, type))
6027 int size = s390_function_arg_size (mode, type);
6028 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
6032 /* Define where to put the arguments to a function.
6033 Value is zero to push the argument on the stack,
6034 or a hard register in which to store the argument.
6036 MODE is the argument's machine mode.
6037 TYPE is the data type of the argument (as a tree).
6038 This is null for libcalls where that information may
6040 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6041 the preceding args and about the function being called.
6042 NAMED is nonzero if this argument is a named parameter
6043 (otherwise it is an extra parameter matching an ellipsis).
6045 On S/390, we use general purpose registers 2 through 6 to
6046 pass integer, pointer, and certain structure arguments, and
6047 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6048 to pass floating point arguments. All remaining arguments
6049 are pushed to the stack. */
6052 s390_function_arg (cum, mode, type, named)
6053 CUMULATIVE_ARGS *cum;
6054 enum machine_mode mode;
6056 int named ATTRIBUTE_UNUSED;
6058 if (s390_function_arg_pass_by_reference (mode, type))
6061 if (s390_function_arg_float (mode, type))
6063 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
6066 return gen_rtx (REG, mode, cum->fprs + 16);
6070 int size = s390_function_arg_size (mode, type);
6071 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
6073 if (cum->gprs + n_gprs > 5)
6076 return gen_rtx (REG, mode, cum->gprs + 2);
6081 /* Create and return the va_list datatype.
6083 On S/390, va_list is an array type equivalent to
6085 typedef struct __va_list_tag
6089 void *__overflow_arg_area;
6090 void *__reg_save_area;
6094 where __gpr and __fpr hold the number of general purpose
6095 or floating point arguments used up to now, respectively,
6096 __overflow_arg_area points to the stack location of the
6097 next argument passed on the stack, and __reg_save_area
6098 always points to the start of the register area in the
6099 call frame of the current function. The function prologue
6100 saves all registers used for argument passing into this
6101 area if the function uses variable arguments. */
6104 s390_build_va_list ()
6106 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
6108 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
6111 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6113 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
6114 long_integer_type_node);
6115 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
6116 long_integer_type_node);
6117 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
6119 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
6122 DECL_FIELD_CONTEXT (f_gpr) = record;
6123 DECL_FIELD_CONTEXT (f_fpr) = record;
6124 DECL_FIELD_CONTEXT (f_ovf) = record;
6125 DECL_FIELD_CONTEXT (f_sav) = record;
6127 TREE_CHAIN (record) = type_decl;
6128 TYPE_NAME (record) = type_decl;
6129 TYPE_FIELDS (record) = f_gpr;
6130 TREE_CHAIN (f_gpr) = f_fpr;
6131 TREE_CHAIN (f_fpr) = f_ovf;
6132 TREE_CHAIN (f_ovf) = f_sav;
6134 layout_type (record);
6136 /* The correct type is an array type of one element. */
6137 return build_array_type (record, build_index_type (size_zero_node));
6140 /* Implement va_start by filling the va_list structure VALIST.
6141 STDARG_P is always true, and ignored.
6142 NEXTARG points to the first anonymous stack argument.
6144 The following global variables are used to initialize
6145 the va_list structure:
6147 current_function_args_info:
6148 holds number of gprs and fprs used for named arguments.
6149 current_function_arg_offset_rtx:
6150 holds the offset of the first anonymous stack argument
6151 (relative to the virtual arg pointer). */
6154 s390_va_start (valist, nextarg)
6156 rtx nextarg ATTRIBUTE_UNUSED;
6158 HOST_WIDE_INT n_gpr, n_fpr;
6160 tree f_gpr, f_fpr, f_ovf, f_sav;
6161 tree gpr, fpr, ovf, sav, t;
6163 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6164 f_fpr = TREE_CHAIN (f_gpr);
6165 f_ovf = TREE_CHAIN (f_fpr);
6166 f_sav = TREE_CHAIN (f_ovf);
6168 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6169 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6170 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6171 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6172 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6174 /* Count number of gp and fp argument registers used. */
6176 n_gpr = current_function_args_info.gprs;
6177 n_fpr = current_function_args_info.fprs;
6179 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
6180 TREE_SIDE_EFFECTS (t) = 1;
6181 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6183 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
6184 TREE_SIDE_EFFECTS (t) = 1;
6185 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6187 /* Find the overflow area. */
6188 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6190 off = INTVAL (current_function_arg_offset_rtx);
6191 off = off < 0 ? 0 : off;
6192 if (TARGET_DEBUG_ARG)
6193 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6194 (int)n_gpr, (int)n_fpr, off);
6196 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
6198 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6199 TREE_SIDE_EFFECTS (t) = 1;
6200 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6202 /* Find the register save area. */
6203 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
6204 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
6205 build_int_2 (-STACK_POINTER_OFFSET, -1));
6206 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6207 TREE_SIDE_EFFECTS (t) = 1;
6208 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6211 /* Implement va_arg by updating the va_list structure
6212 VALIST as required to retrieve an argument of type
6213 TYPE, and returning that argument.
6215 Generates code equivalent to:
6217 if (integral value) {
6218 if (size <= 4 && args.gpr < 5 ||
6219 size > 4 && args.gpr < 4 )
6220 ret = args.reg_save_area[args.gpr+8]
6222 ret = *args.overflow_arg_area++;
6223 } else if (float value) {
6225 ret = args.reg_save_area[args.fpr+64]
6227 ret = *args.overflow_arg_area++;
6228 } else if (aggregate value) {
6230 ret = *args.reg_save_area[args.gpr]
6232 ret = **args.overflow_arg_area++;
6236 s390_va_arg (valist, type)
6240 tree f_gpr, f_fpr, f_ovf, f_sav;
6241 tree gpr, fpr, ovf, sav, reg, t, u;
6242 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6243 rtx lab_false, lab_over, addr_rtx, r;
6245 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6246 f_fpr = TREE_CHAIN (f_gpr);
6247 f_ovf = TREE_CHAIN (f_fpr);
6248 f_sav = TREE_CHAIN (f_ovf);
6250 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6251 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6252 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6253 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6254 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6256 size = int_size_in_bytes (type);
6258 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
6260 if (TARGET_DEBUG_ARG)
6262 fprintf (stderr, "va_arg: aggregate type");
6266 /* Aggregates are passed by reference. */
6270 sav_ofs = 2 * UNITS_PER_WORD;
6271 sav_scale = UNITS_PER_WORD;
6272 size = UNITS_PER_WORD;
6275 else if (s390_function_arg_float (TYPE_MODE (type), type))
6277 if (TARGET_DEBUG_ARG)
6279 fprintf (stderr, "va_arg: float type");
6283 /* FP args go in FP registers, if present. */
6287 sav_ofs = 16 * UNITS_PER_WORD;
6289 /* TARGET_64BIT has up to 4 parameter in fprs */
6290 max_reg = TARGET_64BIT ? 3 : 1;
6294 if (TARGET_DEBUG_ARG)
6296 fprintf (stderr, "va_arg: other type");
6300 /* Otherwise into GP registers. */
6303 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6304 sav_ofs = 2 * UNITS_PER_WORD;
6306 if (size < UNITS_PER_WORD)
6307 sav_ofs += UNITS_PER_WORD - size;
6309 sav_scale = UNITS_PER_WORD;
6316 /* Pull the value out of the saved registers ... */
6318 lab_false = gen_label_rtx ();
6319 lab_over = gen_label_rtx ();
6320 addr_rtx = gen_reg_rtx (Pmode);
6322 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6324 GT, const1_rtx, Pmode, 0, lab_false);
6327 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6331 u = build (MULT_EXPR, long_integer_type_node,
6332 reg, build_int_2 (sav_scale, 0));
6333 TREE_SIDE_EFFECTS (u) = 1;
6335 t = build (PLUS_EXPR, ptr_type_node, t, u);
6336 TREE_SIDE_EFFECTS (t) = 1;
6338 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6340 emit_move_insn (addr_rtx, r);
6343 emit_jump_insn (gen_jump (lab_over));
6345 emit_label (lab_false);
6347 /* ... Otherwise out of the overflow area. */
6349 t = save_expr (ovf);
6352 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6353 if (size < UNITS_PER_WORD)
6355 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6356 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6357 TREE_SIDE_EFFECTS (t) = 1;
6358 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6360 t = save_expr (ovf);
6363 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6365 emit_move_insn (addr_rtx, r);
6367 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6368 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6369 TREE_SIDE_EFFECTS (t) = 1;
6370 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6372 emit_label (lab_over);
6374 /* If less than max_regs a registers are retrieved out
6375 of register save area, increment. */
6377 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6378 build_int_2 (n_reg, 0));
6379 TREE_SIDE_EFFECTS (u) = 1;
6380 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6384 r = gen_rtx_MEM (Pmode, addr_rtx);
6385 set_mem_alias_set (r, get_varargs_alias_set ());
6386 emit_move_insn (addr_rtx, r);
6398 S390_BUILTIN_THREAD_POINTER,
6399 S390_BUILTIN_SET_THREAD_POINTER,
6404 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6409 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6415 s390_init_builtins ()
6419 ftype = build_function_type (ptr_type_node, void_list_node);
6420 builtin_function ("__builtin_thread_pointer", ftype,
6421 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6424 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6425 builtin_function ("__builtin_set_thread_pointer", ftype,
6426 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6430 /* Expand an expression EXP that calls a built-in function,
6431 with result going to TARGET if that's convenient
6432 (and in mode MODE if that's convenient).
6433 SUBTARGET may be used as the target for computing one of EXP's operands.
6434 IGNORE is nonzero if the value is to be ignored. */
6437 s390_expand_builtin (exp, target, subtarget, mode, ignore)
6440 rtx subtarget ATTRIBUTE_UNUSED;
6441 enum machine_mode mode ATTRIBUTE_UNUSED;
6442 int ignore ATTRIBUTE_UNUSED;
6446 unsigned int const *code_for_builtin =
6447 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6449 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6450 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6451 tree arglist = TREE_OPERAND (exp, 1);
6452 enum insn_code icode;
6453 rtx op[MAX_ARGS], pat;
6457 if (fcode >= S390_BUILTIN_max)
6458 internal_error ("bad builtin fcode");
6459 icode = code_for_builtin[fcode];
6461 internal_error ("bad builtin fcode");
6463 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6465 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6467 arglist = TREE_CHAIN (arglist), arity++)
6469 const struct insn_operand_data *insn_op;
6471 tree arg = TREE_VALUE (arglist);
6472 if (arg == error_mark_node)
6474 if (arity > MAX_ARGS)
6477 insn_op = &insn_data[icode].operand[arity + nonvoid];
6479 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6481 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6482 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6487 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6489 || GET_MODE (target) != tmode
6490 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6491 target = gen_reg_rtx (tmode);
6497 pat = GEN_FCN (icode) (target);
6501 pat = GEN_FCN (icode) (target, op[0]);
6503 pat = GEN_FCN (icode) (op[0]);
6506 pat = GEN_FCN (icode) (target, op[0], op[1]);
6522 /* Output assembly code for the trampoline template to
6525 On S/390, we use gpr 1 internally in the trampoline code;
6526 gpr 0 is used to hold the static chain. */
6529 s390_trampoline_template (file)
6534 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6535 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6536 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6537 fprintf (file, "br\t%s\n", reg_names[1]);
6538 fprintf (file, "0:\t.quad\t0\n");
6539 fprintf (file, ".quad\t0\n");
6543 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6544 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6545 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6546 fprintf (file, "br\t%s\n", reg_names[1]);
6547 fprintf (file, ".long\t0\n");
6548 fprintf (file, ".long\t0\n");
6552 /* Emit RTL insns to initialize the variable parts of a trampoline.
6553 FNADDR is an RTX for the address of the function's pure code.
6554 CXT is an RTX for the static chain value for the function. */
6557 s390_initialize_trampoline (addr, fnaddr, cxt)
6562 emit_move_insn (gen_rtx
6564 memory_address (Pmode,
6565 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6566 emit_move_insn (gen_rtx
6568 memory_address (Pmode,
6569 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6572 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6573 LOW and HIGH, independent of the host word size. */
6576 s390_gen_rtx_const_DI (high, low)
6580 #if HOST_BITS_PER_WIDE_INT >= 64
6582 val = (HOST_WIDE_INT)high;
6584 val |= (HOST_WIDE_INT)low;
6586 return GEN_INT (val);
6588 #if HOST_BITS_PER_WIDE_INT >= 32
6589 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6596 /* Output assembler code to FILE to increment profiler label # LABELNO
6597 for profiling a function entry. */
6600 s390_function_profiler (file, labelno)
6607 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6609 fprintf (file, "# function profiler \n");
6611 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6612 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6613 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6615 op[2] = gen_rtx_REG (Pmode, 1);
6616 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6617 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
6619 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6622 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
6623 op[4] = gen_rtx_CONST (Pmode, op[4]);
6628 output_asm_insn ("stg\t%0,%1", op);
6629 output_asm_insn ("larl\t%2,%3", op);
6630 output_asm_insn ("brasl\t%0,%4", op);
6631 output_asm_insn ("lg\t%0,%1", op);
6635 op[6] = gen_label_rtx ();
6637 output_asm_insn ("st\t%0,%1", op);
6638 output_asm_insn ("bras\t%2,%l6", op);
6639 output_asm_insn (".long\t%4", op);
6640 output_asm_insn (".long\t%3", op);
6641 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6642 output_asm_insn ("l\t%0,0(%2)", op);
6643 output_asm_insn ("l\t%2,4(%2)", op);
6644 output_asm_insn ("basr\t%0,%0", op);
6645 output_asm_insn ("l\t%0,%1", op);
6649 op[5] = gen_label_rtx ();
6650 op[6] = gen_label_rtx ();
6652 output_asm_insn ("st\t%0,%1", op);
6653 output_asm_insn ("bras\t%2,%l6", op);
6654 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[5]));
6655 output_asm_insn (".long\t%4-%l5", op);
6656 output_asm_insn (".long\t%3-%l5", op);
6657 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6658 output_asm_insn ("lr\t%0,%2", op);
6659 output_asm_insn ("a\t%0,0(%2)", op);
6660 output_asm_insn ("a\t%2,4(%2)", op);
6661 output_asm_insn ("basr\t%0,%0", op);
6662 output_asm_insn ("l\t%0,%1", op);
6666 /* Select section for constant in constant pool. In 32-bit mode,
6667 constants go in the function section; in 64-bit mode in .rodata. */
6670 s390_select_rtx_section (mode, x, align)
6671 enum machine_mode mode ATTRIBUTE_UNUSED;
6672 rtx x ATTRIBUTE_UNUSED;
6673 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
6676 readonly_data_section ();
6678 function_section (current_function_decl);
6681 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6682 into its SYMBOL_REF_FLAGS. */
6685 s390_encode_section_info (decl, rtl, first)
6690 default_encode_section_info (decl, rtl, first);
6692 /* If a variable has a forced alignment to < 2 bytes, mark it with
6693 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6694 if (TREE_CODE (decl) == VAR_DECL
6695 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
6696 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
6699 /* Output thunk to FILE that implements a C++ virtual function call (with
6700 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6701 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6702 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6703 relative to the resulting this pointer. */
6706 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
6708 tree thunk ATTRIBUTE_UNUSED;
6709 HOST_WIDE_INT delta;
6710 HOST_WIDE_INT vcall_offset;
6716 /* Operand 0 is the target function. */
6717 op[0] = XEXP (DECL_RTL (function), 0);
6718 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
6721 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
6722 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
6723 op[0] = gen_rtx_CONST (Pmode, op[0]);
6726 /* Operand 1 is the 'this' pointer. */
6727 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6728 op[1] = gen_rtx_REG (Pmode, 3);
6730 op[1] = gen_rtx_REG (Pmode, 2);
6732 /* Operand 2 is the delta. */
6733 op[2] = GEN_INT (delta);
6735 /* Operand 3 is the vcall_offset. */
6736 op[3] = GEN_INT (vcall_offset);
6738 /* Operand 4 is the temporary register. */
6739 op[4] = gen_rtx_REG (Pmode, 1);
6741 /* Operands 5 to 8 can be used as labels. */
6747 /* Operand 9 can be used for temporary register. */
6750 /* Generate code. */
6753 /* Setup literal pool pointer if required. */
6754 if ((!DISP_IN_RANGE (delta)
6755 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6756 || (!DISP_IN_RANGE (vcall_offset)
6757 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6759 op[5] = gen_label_rtx ();
6760 output_asm_insn ("larl\t%4,%5", op);
6763 /* Add DELTA to this pointer. */
6766 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6767 output_asm_insn ("la\t%1,%2(%1)", op);
6768 else if (DISP_IN_RANGE (delta))
6769 output_asm_insn ("lay\t%1,%2(%1)", op);
6770 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6771 output_asm_insn ("aghi\t%1,%2", op);
6774 op[6] = gen_label_rtx ();
6775 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6779 /* Perform vcall adjustment. */
6782 if (DISP_IN_RANGE (vcall_offset))
6784 output_asm_insn ("lg\t%4,0(%1)", op);
6785 output_asm_insn ("ag\t%1,%3(%4)", op);
6787 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6789 output_asm_insn ("lghi\t%4,%3", op);
6790 output_asm_insn ("ag\t%4,0(%1)", op);
6791 output_asm_insn ("ag\t%1,0(%4)", op);
6795 op[7] = gen_label_rtx ();
6796 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6797 output_asm_insn ("ag\t%4,0(%1)", op);
6798 output_asm_insn ("ag\t%1,0(%4)", op);
6802 /* Jump to target. */
6803 output_asm_insn ("jg\t%0", op);
6805 /* Output literal pool if required. */
6808 output_asm_insn (".align\t4", op);
6809 (*targetm.asm_out.internal_label) (file, "L",
6810 CODE_LABEL_NUMBER (op[5]));
6814 (*targetm.asm_out.internal_label) (file, "L",
6815 CODE_LABEL_NUMBER (op[6]));
6816 output_asm_insn (".long\t%2", op);
6820 (*targetm.asm_out.internal_label) (file, "L",
6821 CODE_LABEL_NUMBER (op[7]));
6822 output_asm_insn (".long\t%3", op);
6827 /* Setup base pointer if required. */
6829 || (!DISP_IN_RANGE (delta)
6830 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6831 || (!DISP_IN_RANGE (delta)
6832 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6834 op[5] = gen_label_rtx ();
6835 output_asm_insn ("basr\t%4,0", op);
6836 (*targetm.asm_out.internal_label) (file, "L",
6837 CODE_LABEL_NUMBER (op[5]));
6840 /* Add DELTA to this pointer. */
6843 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6844 output_asm_insn ("la\t%1,%2(%1)", op);
6845 else if (DISP_IN_RANGE (delta))
6846 output_asm_insn ("lay\t%1,%2(%1)", op);
6847 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6848 output_asm_insn ("ahi\t%1,%2", op);
6851 op[6] = gen_label_rtx ();
6852 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6856 /* Perform vcall adjustment. */
6859 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6861 output_asm_insn ("lg\t%4,0(%1)", op);
6862 output_asm_insn ("a\t%1,%3(%4)", op);
6864 else if (DISP_IN_RANGE (vcall_offset))
6866 output_asm_insn ("lg\t%4,0(%1)", op);
6867 output_asm_insn ("ay\t%1,%3(%4)", op);
6869 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6871 output_asm_insn ("lhi\t%4,%3", op);
6872 output_asm_insn ("a\t%4,0(%1)", op);
6873 output_asm_insn ("a\t%1,0(%4)", op);
6877 op[7] = gen_label_rtx ();
6878 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6879 output_asm_insn ("a\t%4,0(%1)", op);
6880 output_asm_insn ("a\t%1,0(%4)", op);
6883 /* We had to clobber the base pointer register.
6884 Re-setup the base pointer (with a different base). */
6885 op[5] = gen_label_rtx ();
6886 output_asm_insn ("basr\t%4,0", op);
6887 (*targetm.asm_out.internal_label) (file, "L",
6888 CODE_LABEL_NUMBER (op[5]));
6891 /* Jump to target. */
6892 op[8] = gen_label_rtx ();
6895 output_asm_insn ("l\t%4,%8-%5(%4)", op);
6897 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6898 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6899 else if (flag_pic == 1)
6901 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6902 output_asm_insn ("l\t%4,%0(%4)", op);
6904 else if (flag_pic == 2)
6906 op[9] = gen_rtx_REG (Pmode, 0);
6907 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6908 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6909 output_asm_insn ("ar\t%4,%9", op);
6910 output_asm_insn ("l\t%4,0(%4)", op);
6913 output_asm_insn ("br\t%4", op);
6915 /* Output literal pool. */
6916 output_asm_insn (".align\t4", op);
6918 if (nonlocal && flag_pic == 2)
6919 output_asm_insn (".long\t%0", op);
6922 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6923 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
6926 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[8]));
6928 output_asm_insn (".long\t%0", op);
6930 output_asm_insn (".long\t%0-%5", op);
6934 (*targetm.asm_out.internal_label) (file, "L",
6935 CODE_LABEL_NUMBER (op[6]));
6936 output_asm_insn (".long\t%2", op);
6940 (*targetm.asm_out.internal_label) (file, "L",
6941 CODE_LABEL_NUMBER (op[7]));
6942 output_asm_insn (".long\t%3", op);
6948 s390_valid_pointer_mode (mode)
6949 enum machine_mode mode;
6951 return (mode == SImode || (TARGET_64BIT && mode == DImode));
6954 /* How to allocate a 'struct machine_function'. */
6956 static struct machine_function *
6957 s390_init_machine_status ()
6959 return ggc_alloc_cleared (sizeof (struct machine_function));
6962 #include "gt-s390.h"