1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 /* Machine-specific symbol_ref flags. */
54 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
57 static bool s390_assemble_integer (rtx, unsigned int, int);
58 static void s390_select_rtx_section (enum machine_mode, rtx,
59 unsigned HOST_WIDE_INT);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static void s390_init_builtins (void);
64 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
65 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
67 static enum attr_type s390_safe_attr_type (rtx);
69 static int s390_adjust_cost (rtx, rtx, rtx, int);
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_use_dfa_pipeline_interface (void);
73 static int s390_first_cycle_multipass_dfa_lookahead (void);
74 static int s390_sched_reorder2 (FILE *, int, rtx *, int *, int);
75 static bool s390_rtx_costs (rtx, int, int, int *);
76 static int s390_address_cost (rtx);
77 static void s390_reorg (void);
78 static bool s390_valid_pointer_mode (enum machine_mode);
80 #undef TARGET_ASM_ALIGNED_HI_OP
81 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
82 #undef TARGET_ASM_ALIGNED_DI_OP
83 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
84 #undef TARGET_ASM_INTEGER
85 #define TARGET_ASM_INTEGER s390_assemble_integer
87 #undef TARGET_ASM_OPEN_PAREN
88 #define TARGET_ASM_OPEN_PAREN ""
90 #undef TARGET_ASM_CLOSE_PAREN
91 #define TARGET_ASM_CLOSE_PAREN ""
93 #undef TARGET_ASM_SELECT_RTX_SECTION
94 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
96 #undef TARGET_ENCODE_SECTION_INFO
97 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
100 #undef TARGET_HAVE_TLS
101 #define TARGET_HAVE_TLS true
103 #undef TARGET_CANNOT_FORCE_CONST_MEM
104 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
106 #undef TARGET_DELEGITIMIZE_ADDRESS
107 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
109 #undef TARGET_INIT_BUILTINS
110 #define TARGET_INIT_BUILTINS s390_init_builtins
111 #undef TARGET_EXPAND_BUILTIN
112 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
114 #undef TARGET_ASM_OUTPUT_MI_THUNK
115 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
116 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
117 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
119 #undef TARGET_SCHED_ADJUST_COST
120 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
121 #undef TARGET_SCHED_ADJUST_PRIORITY
122 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
123 #undef TARGET_SCHED_ISSUE_RATE
124 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
125 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
126 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
127 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
128 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
129 #undef TARGET_SCHED_REORDER2
130 #define TARGET_SCHED_REORDER2 s390_sched_reorder2
132 #undef TARGET_RTX_COSTS
133 #define TARGET_RTX_COSTS s390_rtx_costs
134 #undef TARGET_ADDRESS_COST
135 #define TARGET_ADDRESS_COST s390_address_cost
137 #undef TARGET_MACHINE_DEPENDENT_REORG
138 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
140 #undef TARGET_VALID_POINTER_MODE
141 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
143 struct gcc_target targetm = TARGET_INITIALIZER;
145 extern int reload_completed;
147 /* The alias set for prologue/epilogue register save/restore. */
148 static int s390_sr_alias_set = 0;
150 /* Save information from a "cmpxx" operation until the branch or scc is
152 rtx s390_compare_op0, s390_compare_op1;
154 /* Structure used to hold the components of a S/390 memory
155 address. A legitimate address on S/390 is of the general
157 base + index + displacement
158 where any of the components is optional.
160 base and index are registers of the class ADDR_REGS,
161 displacement is an unsigned 12-bit immediate constant. */
171 /* Which cpu are we tuning for. */
172 enum processor_type s390_tune;
173 enum processor_flags s390_tune_flags;
174 /* Which instruction set architecture to use. */
175 enum processor_type s390_arch;
176 enum processor_flags s390_arch_flags;
178 /* Strings to hold which cpu and instruction set architecture to use. */
179 const char *s390_tune_string; /* for -mtune=<xxx> */
180 const char *s390_arch_string; /* for -march=<xxx> */
182 /* Define the structure for the machine field in struct function. */
184 struct machine_function GTY(())
186 /* Label of start of initial literal pool. */
187 rtx literal_pool_label;
189 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
192 /* Number of first and last gpr to be saved, restored. */
194 int first_restore_gpr;
197 /* Size of stack frame. */
198 HOST_WIDE_INT frame_size;
200 /* Some local-dynamic TLS symbol name. */
201 const char *some_ld_name;
204 static int s390_match_ccmode_set (rtx, enum machine_mode);
205 static int s390_branch_condition_mask (rtx);
206 static const char *s390_branch_condition_mnemonic (rtx, int);
207 static int check_mode (rtx, enum machine_mode *);
208 static int general_s_operand (rtx, enum machine_mode, int);
209 static int s390_short_displacement (rtx);
210 static int s390_decompose_address (rtx, struct s390_address *);
211 static rtx get_thread_pointer (void);
212 static rtx legitimize_tls_address (rtx, rtx);
213 static const char *get_some_local_dynamic_name (void);
214 static int get_some_local_dynamic_name_1 (rtx *, void *);
215 static int reg_used_in_mem_p (int, rtx);
216 static int addr_generation_dependency_p (rtx, rtx);
217 static int s390_split_branches (rtx, bool *);
218 static void find_constant_pool_ref (rtx, rtx *);
219 static void replace_constant_pool_ref (rtx *, rtx, rtx);
220 static rtx find_ltrel_base (rtx);
221 static void replace_ltrel_base (rtx *, rtx);
222 static void s390_optimize_prolog (int);
223 static bool s390_fixup_clobbered_return_reg (rtx);
224 static int find_unused_clobbered_reg (void);
225 static void s390_frame_info (void);
226 static rtx save_fpr (rtx, int, int);
227 static rtx restore_fpr (rtx, int, int);
228 static rtx save_gprs (rtx, int, int, int);
229 static rtx restore_gprs (rtx, int, int, int);
230 static int s390_function_arg_size (enum machine_mode, tree);
231 static bool s390_function_arg_float (enum machine_mode, tree);
232 static struct machine_function * s390_init_machine_status (void);
234 /* Check whether integer displacement is in range. */
235 #define DISP_IN_RANGE(d) \
236 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
237 : ((d) >= 0 && (d) <= 4095))
239 /* Return true if SET either doesn't set the CC register, or else
240 the source and destination have matching CC modes and that
241 CC mode is at least as constrained as REQ_MODE. */
244 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
246 enum machine_mode set_mode;
248 if (GET_CODE (set) != SET)
251 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
254 set_mode = GET_MODE (SET_DEST (set));
267 if (req_mode != set_mode)
272 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
273 && req_mode != CCSRmode && req_mode != CCURmode)
279 if (req_mode != CCAmode)
287 return (GET_MODE (SET_SRC (set)) == set_mode);
290 /* Return true if every SET in INSN that sets the CC register
291 has source and destination with matching CC modes and that
292 CC mode is at least as constrained as REQ_MODE.
293 If REQ_MODE is VOIDmode, always return false. */
296 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
300 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
301 if (req_mode == VOIDmode)
304 if (GET_CODE (PATTERN (insn)) == SET)
305 return s390_match_ccmode_set (PATTERN (insn), req_mode);
307 if (GET_CODE (PATTERN (insn)) == PARALLEL)
308 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
310 rtx set = XVECEXP (PATTERN (insn), 0, i);
311 if (GET_CODE (set) == SET)
312 if (!s390_match_ccmode_set (set, req_mode))
319 /* If a test-under-mask instruction can be used to implement
320 (compare (and ... OP1) OP2), return the CC mode required
321 to do that. Otherwise, return VOIDmode.
322 MIXED is true if the instruction can distinguish between
323 CC1 and CC2 for mixed selected bits (TMxx), it is false
324 if the instruction cannot (TM). */
327 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
331 /* ??? Fixme: should work on CONST_DOUBLE as well. */
332 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
335 /* Selected bits all zero: CC0. */
336 if (INTVAL (op2) == 0)
339 /* Selected bits all one: CC3. */
340 if (INTVAL (op2) == INTVAL (op1))
343 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
346 bit1 = exact_log2 (INTVAL (op2));
347 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
348 if (bit0 != -1 && bit1 != -1)
349 return bit0 > bit1 ? CCT1mode : CCT2mode;
355 /* Given a comparison code OP (EQ, NE, etc.) and the operands
356 OP0 and OP1 of a COMPARE, return the mode to be used for the
360 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
366 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
367 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
369 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
370 || GET_CODE (op1) == NEG)
371 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
374 if (GET_CODE (op0) == AND)
376 /* Check whether we can potentially do it via TM. */
377 enum machine_mode ccmode;
378 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
379 if (ccmode != VOIDmode)
381 /* Relax CCTmode to CCZmode to allow fall-back to AND
382 if that turns out to be beneficial. */
383 return ccmode == CCTmode ? CCZmode : ccmode;
387 if (register_operand (op0, HImode)
388 && GET_CODE (op1) == CONST_INT
389 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
391 if (register_operand (op0, QImode)
392 && GET_CODE (op1) == CONST_INT
393 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
402 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
403 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
405 if (INTVAL (XEXP((op0), 1)) < 0)
418 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
419 && GET_CODE (op1) != CONST_INT)
425 if (GET_CODE (op0) == PLUS
426 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
429 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
430 && GET_CODE (op1) != CONST_INT)
436 if (GET_CODE (op0) == MINUS
437 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
440 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
441 && GET_CODE (op1) != CONST_INT)
450 /* Return branch condition mask to implement a branch
451 specified by CODE. */
454 s390_branch_condition_mask (rtx code)
456 const int CC0 = 1 << 3;
457 const int CC1 = 1 << 2;
458 const int CC2 = 1 << 1;
459 const int CC3 = 1 << 0;
461 if (GET_CODE (XEXP (code, 0)) != REG
462 || REGNO (XEXP (code, 0)) != CC_REGNUM
463 || XEXP (code, 1) != const0_rtx)
466 switch (GET_MODE (XEXP (code, 0)))
469 switch (GET_CODE (code))
472 case NE: return CC1 | CC2 | CC3;
479 switch (GET_CODE (code))
482 case NE: return CC0 | CC2 | CC3;
489 switch (GET_CODE (code))
492 case NE: return CC0 | CC1 | CC3;
499 switch (GET_CODE (code))
502 case NE: return CC0 | CC1 | CC2;
509 switch (GET_CODE (code))
511 case EQ: return CC0 | CC2;
512 case NE: return CC1 | CC3;
519 switch (GET_CODE (code))
521 case LTU: return CC2 | CC3; /* carry */
522 case GEU: return CC0 | CC1; /* no carry */
529 switch (GET_CODE (code))
531 case GTU: return CC0 | CC1; /* borrow */
532 case LEU: return CC2 | CC3; /* no borrow */
539 switch (GET_CODE (code))
542 case NE: return CC1 | CC2 | CC3;
543 case LTU: return CC1;
544 case GTU: return CC2;
545 case LEU: return CC0 | CC1;
546 case GEU: return CC0 | CC2;
553 switch (GET_CODE (code))
556 case NE: return CC2 | CC1 | CC3;
557 case LTU: return CC2;
558 case GTU: return CC1;
559 case LEU: return CC0 | CC2;
560 case GEU: return CC0 | CC1;
567 switch (GET_CODE (code))
570 case NE: return CC1 | CC2 | CC3;
571 case LT: return CC1 | CC3;
573 case LE: return CC0 | CC1 | CC3;
574 case GE: return CC0 | CC2;
581 switch (GET_CODE (code))
584 case NE: return CC1 | CC2 | CC3;
586 case GT: return CC2 | CC3;
587 case LE: return CC0 | CC1;
588 case GE: return CC0 | CC2 | CC3;
595 switch (GET_CODE (code))
598 case NE: return CC1 | CC2 | CC3;
601 case LE: return CC0 | CC1;
602 case GE: return CC0 | CC2;
603 case UNORDERED: return CC3;
604 case ORDERED: return CC0 | CC1 | CC2;
605 case UNEQ: return CC0 | CC3;
606 case UNLT: return CC1 | CC3;
607 case UNGT: return CC2 | CC3;
608 case UNLE: return CC0 | CC1 | CC3;
609 case UNGE: return CC0 | CC2 | CC3;
610 case LTGT: return CC1 | CC2;
617 switch (GET_CODE (code))
620 case NE: return CC2 | CC1 | CC3;
623 case LE: return CC0 | CC2;
624 case GE: return CC0 | CC1;
625 case UNORDERED: return CC3;
626 case ORDERED: return CC0 | CC2 | CC1;
627 case UNEQ: return CC0 | CC3;
628 case UNLT: return CC2 | CC3;
629 case UNGT: return CC1 | CC3;
630 case UNLE: return CC0 | CC2 | CC3;
631 case UNGE: return CC0 | CC1 | CC3;
632 case LTGT: return CC2 | CC1;
643 /* If INV is false, return assembler mnemonic string to implement
644 a branch specified by CODE. If INV is true, return mnemonic
645 for the corresponding inverted branch. */
648 s390_branch_condition_mnemonic (rtx code, int inv)
650 static const char *const mnemonic[16] =
652 NULL, "o", "h", "nle",
653 "l", "nhe", "lh", "ne",
654 "e", "nlh", "he", "nl",
655 "le", "nh", "no", NULL
658 int mask = s390_branch_condition_mask (code);
663 if (mask < 1 || mask > 14)
666 return mnemonic[mask];
669 /* If OP is an integer constant of mode MODE with exactly one
670 HImode subpart unequal to DEF, return the number of that
671 subpart. As a special case, all HImode subparts of OP are
672 equal to DEF, return zero. Otherwise, return -1. */
675 s390_single_hi (rtx op, enum machine_mode mode, int def)
677 if (GET_CODE (op) == CONST_INT)
679 unsigned HOST_WIDE_INT value = 0;
680 int n_parts = GET_MODE_SIZE (mode) / 2;
683 for (i = 0; i < n_parts; i++)
686 value = (unsigned HOST_WIDE_INT) INTVAL (op);
690 if ((value & 0xffff) != (unsigned)(def & 0xffff))
699 return part == -1 ? 0 : (n_parts - 1 - part);
702 else if (GET_CODE (op) == CONST_DOUBLE
703 && GET_MODE (op) == VOIDmode)
705 unsigned HOST_WIDE_INT value = 0;
706 int n_parts = GET_MODE_SIZE (mode) / 2;
709 for (i = 0; i < n_parts; i++)
712 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
713 else if (i == HOST_BITS_PER_WIDE_INT / 16)
714 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
718 if ((value & 0xffff) != (unsigned)(def & 0xffff))
727 return part == -1 ? 0 : (n_parts - 1 - part);
733 /* Extract the HImode part number PART from integer
734 constant OP of mode MODE. */
737 s390_extract_hi (rtx op, enum machine_mode mode, int part)
739 int n_parts = GET_MODE_SIZE (mode) / 2;
740 if (part < 0 || part >= n_parts)
743 part = n_parts - 1 - part;
745 if (GET_CODE (op) == CONST_INT)
747 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
748 return ((value >> (16 * part)) & 0xffff);
750 else if (GET_CODE (op) == CONST_DOUBLE
751 && GET_MODE (op) == VOIDmode)
753 unsigned HOST_WIDE_INT value;
754 if (part < HOST_BITS_PER_WIDE_INT / 16)
755 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
757 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
758 part -= HOST_BITS_PER_WIDE_INT / 16;
760 return ((value >> (16 * part)) & 0xffff);
766 /* If OP is an integer constant of mode MODE with exactly one
767 QImode subpart unequal to DEF, return the number of that
768 subpart. As a special case, all QImode subparts of OP are
769 equal to DEF, return zero. Otherwise, return -1. */
772 s390_single_qi (rtx op, enum machine_mode mode, int def)
774 if (GET_CODE (op) == CONST_INT)
776 unsigned HOST_WIDE_INT value = 0;
777 int n_parts = GET_MODE_SIZE (mode);
780 for (i = 0; i < n_parts; i++)
783 value = (unsigned HOST_WIDE_INT) INTVAL (op);
787 if ((value & 0xff) != (unsigned)(def & 0xff))
796 return part == -1 ? 0 : (n_parts - 1 - part);
799 else if (GET_CODE (op) == CONST_DOUBLE
800 && GET_MODE (op) == VOIDmode)
802 unsigned HOST_WIDE_INT value = 0;
803 int n_parts = GET_MODE_SIZE (mode);
806 for (i = 0; i < n_parts; i++)
809 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
810 else if (i == HOST_BITS_PER_WIDE_INT / 8)
811 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
815 if ((value & 0xff) != (unsigned)(def & 0xff))
824 return part == -1 ? 0 : (n_parts - 1 - part);
830 /* Extract the QImode part number PART from integer
831 constant OP of mode MODE. */
834 s390_extract_qi (rtx op, enum machine_mode mode, int part)
836 int n_parts = GET_MODE_SIZE (mode);
837 if (part < 0 || part >= n_parts)
840 part = n_parts - 1 - part;
842 if (GET_CODE (op) == CONST_INT)
844 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
845 return ((value >> (8 * part)) & 0xff);
847 else if (GET_CODE (op) == CONST_DOUBLE
848 && GET_MODE (op) == VOIDmode)
850 unsigned HOST_WIDE_INT value;
851 if (part < HOST_BITS_PER_WIDE_INT / 8)
852 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
854 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
855 part -= HOST_BITS_PER_WIDE_INT / 8;
857 return ((value >> (8 * part)) & 0xff);
863 /* Check whether we can (and want to) split a double-word
864 move in mode MODE from SRC to DST into two single-word
865 moves, moving the subword FIRST_SUBWORD first. */
868 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
870 /* Floating point registers cannot be split. */
871 if (FP_REG_P (src) || FP_REG_P (dst))
874 /* We don't need to split if operands are directly accessible. */
875 if (s_operand (src, mode) || s_operand (dst, mode))
878 /* Non-offsettable memory references cannot be split. */
879 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
880 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
883 /* Moving the first subword must not clobber a register
884 needed to move the second subword. */
885 if (register_operand (dst, mode))
887 rtx subreg = operand_subword (dst, first_subword, 0, mode);
888 if (reg_overlap_mentioned_p (subreg, src))
896 /* Change optimizations to be performed, depending on the
899 LEVEL is the optimization level specified; 2 if `-O2' is
900 specified, 1 if `-O' is specified, and 0 if neither is specified.
902 SIZE is nonzero if `-Os' is specified and zero otherwise. */
905 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
907 /* ??? There are apparently still problems with -fcaller-saves. */
908 flag_caller_saves = 0;
910 /* By default, always emit DWARF-2 unwind info. This allows debugging
911 without maintaining a stack frame back-chain. */
912 flag_asynchronous_unwind_tables = 1;
916 override_options (void)
921 const char *const name; /* processor name or nickname. */
922 const enum processor_type processor;
923 const enum processor_flags flags;
925 const processor_alias_table[] =
927 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
928 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
929 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
930 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
931 | PF_LONG_DISPLACEMENT},
934 int const pta_size = ARRAY_SIZE (processor_alias_table);
936 /* Acquire a unique set number for our register saves and restores. */
937 s390_sr_alias_set = new_alias_set ();
939 /* Set up function hooks. */
940 init_machine_status = s390_init_machine_status;
942 /* Architecture mode defaults according to ABI. */
943 if (!(target_flags_explicit & MASK_ZARCH))
946 target_flags |= MASK_ZARCH;
948 target_flags &= ~MASK_ZARCH;
951 /* Determine processor architectural level. */
952 if (!s390_arch_string)
953 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
955 for (i = 0; i < pta_size; i++)
956 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
958 s390_arch = processor_alias_table[i].processor;
959 s390_arch_flags = processor_alias_table[i].flags;
963 error ("Unknown cpu used in -march=%s.", s390_arch_string);
965 /* Determine processor to tune for. */
966 if (!s390_tune_string)
968 s390_tune = s390_arch;
969 s390_tune_flags = s390_arch_flags;
970 s390_tune_string = s390_arch_string;
974 for (i = 0; i < pta_size; i++)
975 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
977 s390_tune = processor_alias_table[i].processor;
978 s390_tune_flags = processor_alias_table[i].flags;
982 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
986 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
987 error ("z/Architecture mode not supported on %s.", s390_arch_string);
988 if (TARGET_64BIT && !TARGET_ZARCH)
989 error ("64-bit ABI not supported in ESA/390 mode.");
992 /* Map for smallest class containing reg regno. */
994 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
995 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
996 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
997 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
998 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
999 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1000 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1001 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1002 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1003 ADDR_REGS, NO_REGS, ADDR_REGS
1006 /* Return attribute type of insn. */
1008 static enum attr_type
1009 s390_safe_attr_type (rtx insn)
1011 if (recog_memoized (insn) >= 0)
1012 return get_attr_type (insn);
1017 /* Return true if OP a (const_int 0) operand.
1018 OP is the current operation.
1019 MODE is the current operation mode. */
1022 const0_operand (register rtx op, enum machine_mode mode)
1024 return op == CONST0_RTX (mode);
1027 /* Return true if OP is constant.
1028 OP is the current operation.
1029 MODE is the current operation mode. */
1032 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1034 return CONSTANT_P (op);
1037 /* Return true if the mode of operand OP matches MODE.
1038 If MODE is set to VOIDmode, set it to the mode of OP. */
1041 check_mode (register rtx op, enum machine_mode *mode)
1043 if (*mode == VOIDmode)
1044 *mode = GET_MODE (op);
1047 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1053 /* Return true if OP a valid operand for the LARL instruction.
1054 OP is the current operation.
1055 MODE is the current operation mode. */
1058 larl_operand (register rtx op, enum machine_mode mode)
1060 if (! check_mode (op, &mode))
1063 /* Allow labels and local symbols. */
1064 if (GET_CODE (op) == LABEL_REF)
1066 if (GET_CODE (op) == SYMBOL_REF)
1067 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1068 && SYMBOL_REF_TLS_MODEL (op) == 0
1069 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1071 /* Everything else must have a CONST, so strip it. */
1072 if (GET_CODE (op) != CONST)
1076 /* Allow adding *even* in-range constants. */
1077 if (GET_CODE (op) == PLUS)
1079 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1080 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1082 #if HOST_BITS_PER_WIDE_INT > 32
1083 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1084 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1090 /* Labels and local symbols allowed here as well. */
1091 if (GET_CODE (op) == LABEL_REF)
1093 if (GET_CODE (op) == SYMBOL_REF)
1094 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1095 && SYMBOL_REF_TLS_MODEL (op) == 0
1096 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1098 /* Now we must have a @GOTENT offset or @PLT stub
1099 or an @INDNTPOFF TLS offset. */
1100 if (GET_CODE (op) == UNSPEC
1101 && XINT (op, 1) == UNSPEC_GOTENT)
1103 if (GET_CODE (op) == UNSPEC
1104 && XINT (op, 1) == UNSPEC_PLT)
1106 if (GET_CODE (op) == UNSPEC
1107 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1113 /* Helper routine to implement s_operand and s_imm_operand.
1114 OP is the current operation.
1115 MODE is the current operation mode.
1116 ALLOW_IMMEDIATE specifies whether immediate operands should
1117 be accepted or not. */
1120 general_s_operand (register rtx op, enum machine_mode mode,
1121 int allow_immediate)
1123 struct s390_address addr;
1125 /* Call general_operand first, so that we don't have to
1126 check for many special cases. */
1127 if (!general_operand (op, mode))
1130 /* Just like memory_operand, allow (subreg (mem ...))
1132 if (reload_completed
1133 && GET_CODE (op) == SUBREG
1134 && GET_CODE (SUBREG_REG (op)) == MEM)
1135 op = SUBREG_REG (op);
1137 switch (GET_CODE (op))
1139 /* Constants that we are sure will be forced to the
1140 literal pool in reload are OK as s-operand. Note
1141 that we cannot call s390_preferred_reload_class here
1142 because it might not be known yet at this point
1143 whether the current function is a leaf or not. */
1146 if (!allow_immediate || reload_completed)
1148 if (!legitimate_reload_constant_p (op))
1154 /* Memory operands are OK unless they already use an
1157 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1159 if (s390_decompose_address (XEXP (op, 0), &addr)
1171 /* Return true if OP is a valid S-type operand.
1172 OP is the current operation.
1173 MODE is the current operation mode. */
1176 s_operand (register rtx op, enum machine_mode mode)
1178 return general_s_operand (op, mode, 0);
1181 /* Return true if OP is a valid S-type operand or an immediate
1182 operand that can be addressed as S-type operand by forcing
1183 it into the literal pool.
1184 OP is the current operation.
1185 MODE is the current operation mode. */
1188 s_imm_operand (register rtx op, enum machine_mode mode)
1190 return general_s_operand (op, mode, 1);
1193 /* Return true if DISP is a valid short displacement. */
1196 s390_short_displacement (rtx disp)
1198 /* No displacement is OK. */
1202 /* Integer displacement in range. */
1203 if (GET_CODE (disp) == CONST_INT)
1204 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1206 /* GOT offset is not OK, the GOT can be large. */
1207 if (GET_CODE (disp) == CONST
1208 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1209 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1212 /* All other symbolic constants are literal pool references,
1213 which are OK as the literal pool must be small. */
1214 if (GET_CODE (disp) == CONST)
1220 /* Return true if OP is a valid operand for a C constraint. */
1223 s390_extra_constraint (rtx op, int c)
1225 struct s390_address addr;
1230 if (GET_CODE (op) != MEM)
1232 if (!s390_decompose_address (XEXP (op, 0), &addr))
1237 if (TARGET_LONG_DISPLACEMENT)
1239 if (!s390_short_displacement (addr.disp))
1245 if (GET_CODE (op) != MEM)
1248 if (TARGET_LONG_DISPLACEMENT)
1250 if (!s390_decompose_address (XEXP (op, 0), &addr))
1252 if (!s390_short_displacement (addr.disp))
1258 if (!TARGET_LONG_DISPLACEMENT)
1260 if (GET_CODE (op) != MEM)
1262 if (!s390_decompose_address (XEXP (op, 0), &addr))
1266 if (s390_short_displacement (addr.disp))
1271 if (!TARGET_LONG_DISPLACEMENT)
1273 if (GET_CODE (op) != MEM)
1275 /* Any invalid address here will be fixed up by reload,
1276 so accept it for the most generic constraint. */
1277 if (s390_decompose_address (XEXP (op, 0), &addr)
1278 && s390_short_displacement (addr.disp))
1283 if (TARGET_LONG_DISPLACEMENT)
1285 if (!s390_decompose_address (op, &addr))
1287 if (!s390_short_displacement (addr.disp))
1293 if (!TARGET_LONG_DISPLACEMENT)
1295 /* Any invalid address here will be fixed up by reload,
1296 so accept it for the most generic constraint. */
1297 if (s390_decompose_address (op, &addr)
1298 && s390_short_displacement (addr.disp))
1309 /* Compute a (partial) cost for rtx X. Return true if the complete
1310 cost has been computed, and false if subexpressions should be
1311 scanned. In either case, *TOTAL contains the cost result. */
1314 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1319 if (GET_CODE (XEXP (x, 0)) == MINUS
1320 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1327 /* Force_const_mem does not work out of reload, because the
1328 saveable_obstack is set to reload_obstack, which does not
1329 live long enough. Because of this we cannot use force_const_mem
1330 in addsi3. This leads to problems with gen_add2_insn with a
1331 constant greater than a short. Because of that we give an
1332 addition of greater constants a cost of 3 (reload1.c 10096). */
1333 /* ??? saveable_obstack no longer exists. */
1334 if (outer_code == PLUS
1335 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1336 *total = COSTS_N_INSNS (3);
1357 *total = COSTS_N_INSNS (1);
1361 if (GET_MODE (XEXP (x, 0)) == DImode)
1362 *total = COSTS_N_INSNS (40);
1364 *total = COSTS_N_INSNS (7);
1371 *total = COSTS_N_INSNS (33);
1379 /* Return the cost of an address rtx ADDR. */
1382 s390_address_cost (rtx addr)
1384 struct s390_address ad;
1385 if (!s390_decompose_address (addr, &ad))
1388 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1391 /* Return true if OP is a valid operand for the BRAS instruction.
1392 OP is the current operation.
1393 MODE is the current operation mode. */
1396 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1398 register enum rtx_code code = GET_CODE (op);
1400 /* Allow SYMBOL_REFs. */
1401 if (code == SYMBOL_REF)
1404 /* Allow @PLT stubs. */
1406 && GET_CODE (XEXP (op, 0)) == UNSPEC
1407 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1412 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1413 otherwise return 0. */
1416 tls_symbolic_operand (register rtx op)
1418 if (GET_CODE (op) != SYMBOL_REF)
1420 return SYMBOL_REF_TLS_MODEL (op);
1423 /* Return true if OP is a load multiple operation. It is known to be a
1424 PARALLEL and the first section will be tested.
1425 OP is the current operation.
1426 MODE is the current operation mode. */
1429 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1431 int count = XVECLEN (op, 0);
1432 unsigned int dest_regno;
1437 /* Perform a quick check so we don't blow up below. */
1439 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1440 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1441 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1444 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1445 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1447 /* Check, is base, or base + displacement. */
1449 if (GET_CODE (src_addr) == REG)
1451 else if (GET_CODE (src_addr) == PLUS
1452 && GET_CODE (XEXP (src_addr, 0)) == REG
1453 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1455 off = INTVAL (XEXP (src_addr, 1));
1456 src_addr = XEXP (src_addr, 0);
1461 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1464 for (i = 1; i < count; i++)
1466 rtx elt = XVECEXP (op, 0, i);
1468 if (GET_CODE (elt) != SET
1469 || GET_CODE (SET_DEST (elt)) != REG
1470 || GET_MODE (SET_DEST (elt)) != Pmode
1471 || REGNO (SET_DEST (elt)) != dest_regno + i
1472 || GET_CODE (SET_SRC (elt)) != MEM
1473 || GET_MODE (SET_SRC (elt)) != Pmode
1474 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1475 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1476 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1477 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1478 != off + i * UNITS_PER_WORD)
1485 /* Return true if OP is a store multiple operation. It is known to be a
1486 PARALLEL and the first section will be tested.
1487 OP is the current operation.
1488 MODE is the current operation mode. */
1491 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1493 int count = XVECLEN (op, 0);
1494 unsigned int src_regno;
1498 /* Perform a quick check so we don't blow up below. */
1500 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1501 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1502 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1505 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1506 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1508 /* Check, is base, or base + displacement. */
1510 if (GET_CODE (dest_addr) == REG)
1512 else if (GET_CODE (dest_addr) == PLUS
1513 && GET_CODE (XEXP (dest_addr, 0)) == REG
1514 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1516 off = INTVAL (XEXP (dest_addr, 1));
1517 dest_addr = XEXP (dest_addr, 0);
1522 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1525 for (i = 1; i < count; i++)
1527 rtx elt = XVECEXP (op, 0, i);
1529 if (GET_CODE (elt) != SET
1530 || GET_CODE (SET_SRC (elt)) != REG
1531 || GET_MODE (SET_SRC (elt)) != Pmode
1532 || REGNO (SET_SRC (elt)) != src_regno + i
1533 || GET_CODE (SET_DEST (elt)) != MEM
1534 || GET_MODE (SET_DEST (elt)) != Pmode
1535 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1536 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1537 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1538 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1539 != off + i * UNITS_PER_WORD)
1546 /* Return true if OP contains a symbol reference */
1549 symbolic_reference_mentioned_p (rtx op)
1551 register const char *fmt;
1554 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1557 fmt = GET_RTX_FORMAT (GET_CODE (op));
1558 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1564 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1565 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1569 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1576 /* Return true if OP contains a reference to a thread-local symbol. */
1579 tls_symbolic_reference_mentioned_p (rtx op)
1581 register const char *fmt;
1584 if (GET_CODE (op) == SYMBOL_REF)
1585 return tls_symbolic_operand (op);
1587 fmt = GET_RTX_FORMAT (GET_CODE (op));
1588 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1594 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1595 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1599 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1607 /* Return true if OP is a legitimate general operand when
1608 generating PIC code. It is given that flag_pic is on
1609 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1612 legitimate_pic_operand_p (register rtx op)
1614 /* Accept all non-symbolic constants. */
1615 if (!SYMBOLIC_CONST (op))
1618 /* Reject everything else; must be handled
1619 via emit_symbolic_move. */
1623 /* Returns true if the constant value OP is a legitimate general operand.
1624 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1627 legitimate_constant_p (register rtx op)
1629 /* Accept all non-symbolic constants. */
1630 if (!SYMBOLIC_CONST (op))
1633 /* Accept immediate LARL operands. */
1634 if (TARGET_64BIT && larl_operand (op, VOIDmode))
1637 /* Thread-local symbols are never legal constants. This is
1638 so that emit_call knows that computing such addresses
1639 might require a function call. */
1640 if (TLS_SYMBOLIC_CONST (op))
1643 /* In the PIC case, symbolic constants must *not* be
1644 forced into the literal pool. We accept them here,
1645 so that they will be handled by emit_symbolic_move. */
1649 /* All remaining non-PIC symbolic constants are
1650 forced into the literal pool. */
1654 /* Determine if it's legal to put X into the constant pool. This
1655 is not possible if X contains the address of a symbol that is
1656 not constant (TLS) or not known at final link time (PIC). */
1659 s390_cannot_force_const_mem (rtx x)
1661 switch (GET_CODE (x))
1665 /* Accept all non-symbolic constants. */
1669 /* Labels are OK iff we are non-PIC. */
1670 return flag_pic != 0;
1673 /* 'Naked' TLS symbol references are never OK,
1674 non-TLS symbols are OK iff we are non-PIC. */
1675 if (tls_symbolic_operand (x))
1678 return flag_pic != 0;
1681 return s390_cannot_force_const_mem (XEXP (x, 0));
1684 return s390_cannot_force_const_mem (XEXP (x, 0))
1685 || s390_cannot_force_const_mem (XEXP (x, 1));
1688 switch (XINT (x, 1))
1690 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1691 case UNSPEC_LTREL_OFFSET:
1699 case UNSPEC_GOTNTPOFF:
1700 case UNSPEC_INDNTPOFF:
1713 /* Returns true if the constant value OP is a legitimate general
1714 operand during and after reload. The difference to
1715 legitimate_constant_p is that this function will not accept
1716 a constant that would need to be forced to the literal pool
1717 before it can be used as operand. */
1720 legitimate_reload_constant_p (register rtx op)
1722 /* Accept la(y) operands. */
1723 if (GET_CODE (op) == CONST_INT
1724 && DISP_IN_RANGE (INTVAL (op)))
1727 /* Accept l(g)hi operands. */
1728 if (GET_CODE (op) == CONST_INT
1729 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1732 /* Accept lliXX operands. */
1734 && s390_single_hi (op, DImode, 0) >= 0)
1737 /* Accept larl operands. */
1739 && larl_operand (op, VOIDmode))
1742 /* Everything else cannot be handled without reload. */
1746 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1747 return the class of reg to actually use. */
1750 s390_preferred_reload_class (rtx op, enum reg_class class)
1752 /* This can happen if a floating point constant is being
1753 reloaded into an integer register. Leave well alone. */
1754 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1755 && class != FP_REGS)
1758 switch (GET_CODE (op))
1760 /* Constants we cannot reload must be forced into the
1765 if (legitimate_reload_constant_p (op))
1770 /* If a symbolic constant or a PLUS is reloaded,
1771 it is most likely being used as an address, so
1772 prefer ADDR_REGS. If 'class' is not a superset
1773 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1778 if (reg_class_subset_p (ADDR_REGS, class))
1790 /* Return the register class of a scratch register needed to
1791 load IN into a register of class CLASS in MODE.
1793 We need a temporary when loading a PLUS expression which
1794 is not a legitimate operand of the LOAD ADDRESS instruction. */
1797 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1798 enum machine_mode mode, rtx in)
1800 if (s390_plus_operand (in, mode))
1806 /* Return the register class of a scratch register needed to
1807 store a register of class CLASS in MODE into OUT:
1809 We need a temporary when storing a double-word to a
1810 non-offsettable memory address. */
1813 s390_secondary_output_reload_class (enum reg_class class,
1814 enum machine_mode mode, rtx out)
1816 if ((TARGET_64BIT ? mode == TImode
1817 : (mode == DImode || mode == DFmode))
1818 && reg_classes_intersect_p (GENERAL_REGS, class)
1819 && GET_CODE (out) == MEM
1820 && !offsettable_memref_p (out)
1821 && !s_operand (out, VOIDmode))
1827 /* Return true if OP is a PLUS that is not a legitimate
1828 operand for the LA instruction.
1829 OP is the current operation.
1830 MODE is the current operation mode. */
1833 s390_plus_operand (register rtx op, enum machine_mode mode)
1835 if (!check_mode (op, &mode) || mode != Pmode)
1838 if (GET_CODE (op) != PLUS)
1841 if (legitimate_la_operand_p (op))
1847 /* Generate code to load SRC, which is PLUS that is not a
1848 legitimate operand for the LA instruction, into TARGET.
1849 SCRATCH may be used as scratch register. */
1852 s390_expand_plus_operand (register rtx target, register rtx src,
1853 register rtx scratch)
1856 struct s390_address ad;
1858 /* src must be a PLUS; get its two operands. */
1859 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1862 /* Check if any of the two operands is already scheduled
1863 for replacement by reload. This can happen e.g. when
1864 float registers occur in an address. */
1865 sum1 = find_replacement (&XEXP (src, 0));
1866 sum2 = find_replacement (&XEXP (src, 1));
1867 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1869 /* If the address is already strictly valid, there's nothing to do. */
1870 if (!s390_decompose_address (src, &ad)
1871 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1872 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1874 /* Otherwise, one of the operands cannot be an address register;
1875 we reload its value into the scratch register. */
1876 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1878 emit_move_insn (scratch, sum1);
1881 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1883 emit_move_insn (scratch, sum2);
1887 /* According to the way these invalid addresses are generated
1888 in reload.c, it should never happen (at least on s390) that
1889 *neither* of the PLUS components, after find_replacements
1890 was applied, is an address register. */
1891 if (sum1 == scratch && sum2 == scratch)
1897 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1900 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1901 is only ever performed on addresses, so we can mark the
1902 sum as legitimate for LA in any case. */
1903 s390_load_address (target, src);
1907 /* Decompose a RTL expression ADDR for a memory address into
1908 its components, returned in OUT.
1910 Returns 0 if ADDR is not a valid memory address, nonzero
1911 otherwise. If OUT is NULL, don't return the components,
1912 but check for validity only.
1914 Note: Only addresses in canonical form are recognized.
1915 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1916 canonical form so that they will be recognized. */
1919 s390_decompose_address (register rtx addr, struct s390_address *out)
1921 rtx base = NULL_RTX;
1922 rtx indx = NULL_RTX;
1923 rtx disp = NULL_RTX;
1924 int pointer = FALSE;
1925 int base_ptr = FALSE;
1926 int indx_ptr = FALSE;
1928 /* Decompose address into base + index + displacement. */
1930 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1933 else if (GET_CODE (addr) == PLUS)
1935 rtx op0 = XEXP (addr, 0);
1936 rtx op1 = XEXP (addr, 1);
1937 enum rtx_code code0 = GET_CODE (op0);
1938 enum rtx_code code1 = GET_CODE (op1);
1940 if (code0 == REG || code0 == UNSPEC)
1942 if (code1 == REG || code1 == UNSPEC)
1944 indx = op0; /* index + base */
1950 base = op0; /* base + displacement */
1955 else if (code0 == PLUS)
1957 indx = XEXP (op0, 0); /* index + base + disp */
1958 base = XEXP (op0, 1);
1969 disp = addr; /* displacement */
1972 /* Validate base register. */
1975 if (GET_CODE (base) == UNSPEC)
1977 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != UNSPEC_LTREL_BASE)
1979 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1982 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1985 if (REGNO (base) == BASE_REGISTER
1986 || REGNO (base) == STACK_POINTER_REGNUM
1987 || REGNO (base) == FRAME_POINTER_REGNUM
1988 || ((reload_completed || reload_in_progress)
1989 && frame_pointer_needed
1990 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1991 || REGNO (base) == ARG_POINTER_REGNUM
1992 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1993 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1995 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1996 pointer = base_ptr = TRUE;
1999 /* Validate index register. */
2002 if (GET_CODE (indx) == UNSPEC)
2004 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != UNSPEC_LTREL_BASE)
2006 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2009 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2012 if (REGNO (indx) == BASE_REGISTER
2013 || REGNO (indx) == STACK_POINTER_REGNUM
2014 || REGNO (indx) == FRAME_POINTER_REGNUM
2015 || ((reload_completed || reload_in_progress)
2016 && frame_pointer_needed
2017 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2018 || REGNO (indx) == ARG_POINTER_REGNUM
2019 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
2020 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
2022 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2023 pointer = indx_ptr = TRUE;
2026 /* Prefer to use pointer as base, not index. */
2027 if (base && indx && !base_ptr
2028 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2035 /* Validate displacement. */
2038 /* Allow integer constant in range. */
2039 if (GET_CODE (disp) == CONST_INT)
2041 /* If the argument pointer is involved, the displacement will change
2042 later anyway as the argument pointer gets eliminated. This could
2043 make a valid displacement invalid, but it is more likely to make
2044 an invalid displacement valid, because we sometimes access the
2045 register save area via negative offsets to the arg pointer.
2046 Thus we don't check the displacement for validity here. If after
2047 elimination the displacement turns out to be invalid after all,
2048 this is fixed up by reload in any case. */
2049 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
2051 if (!DISP_IN_RANGE (INTVAL (disp)))
2056 /* In the small-PIC case, the linker converts @GOT
2057 and @GOTNTPOFF offsets to possible displacements. */
2058 else if (GET_CODE (disp) == CONST
2059 && GET_CODE (XEXP (disp, 0)) == UNSPEC
2060 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
2061 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
2069 /* Accept chunkfied literal pool symbol references. */
2070 else if (GET_CODE (disp) == CONST
2071 && GET_CODE (XEXP (disp, 0)) == MINUS
2072 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
2073 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
2078 /* Likewise if a constant offset is present. */
2079 else if (GET_CODE (disp) == CONST
2080 && GET_CODE (XEXP (disp, 0)) == PLUS
2081 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
2082 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
2083 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
2084 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
2089 /* We can convert literal pool addresses to
2090 displacements by basing them off the base register. */
2093 /* In some cases, we can accept an additional
2094 small constant offset. Split these off here. */
2096 unsigned int offset = 0;
2098 if (GET_CODE (disp) == CONST
2099 && GET_CODE (XEXP (disp, 0)) == PLUS
2100 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2102 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2103 disp = XEXP (XEXP (disp, 0), 0);
2106 /* Now we must have a literal pool address. */
2107 if (GET_CODE (disp) != SYMBOL_REF
2108 || !CONSTANT_POOL_ADDRESS_P (disp))
2111 /* If we have an offset, make sure it does not
2112 exceed the size of the constant pool entry. */
2113 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2116 /* Either base or index must be free to
2117 hold the base register. */
2121 /* Convert the address. */
2123 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2125 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2127 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2128 UNSPEC_LTREL_OFFSET);
2129 disp = gen_rtx_CONST (Pmode, disp);
2132 disp = plus_constant (disp, offset);
2146 out->pointer = pointer;
2152 /* Return nonzero if ADDR is a valid memory address.
2153 STRICT specifies whether strict register checking applies. */
2156 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2157 register rtx addr, int strict)
2159 struct s390_address ad;
2160 if (!s390_decompose_address (addr, &ad))
2165 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2167 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2172 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2174 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2181 /* Return 1 if OP is a valid operand for the LA instruction.
2182 In 31-bit, we need to prove that the result is used as an
2183 address, as LA performs only a 31-bit addition. */
2186 legitimate_la_operand_p (register rtx op)
2188 struct s390_address addr;
2189 if (!s390_decompose_address (op, &addr))
2192 if (TARGET_64BIT || addr.pointer)
2198 /* Return 1 if OP is a valid operand for the LA instruction,
2199 and we prefer to use LA over addition to compute it. */
2202 preferred_la_operand_p (register rtx op)
2204 struct s390_address addr;
2205 if (!s390_decompose_address (op, &addr))
2208 if (!TARGET_64BIT && !addr.pointer)
2214 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2215 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2221 /* Emit a forced load-address operation to load SRC into DST.
2222 This will use the LOAD ADDRESS instruction even in situations
2223 where legitimate_la_operand_p (SRC) returns false. */
2226 s390_load_address (rtx dst, rtx src)
2229 emit_move_insn (dst, src);
2231 emit_insn (gen_force_la_31 (dst, src));
2234 /* Return a legitimate reference for ORIG (an address) using the
2235 register REG. If REG is 0, a new pseudo is generated.
2237 There are two types of references that must be handled:
2239 1. Global data references must load the address from the GOT, via
2240 the PIC reg. An insn is emitted to do this load, and the reg is
2243 2. Static data references, constant pool addresses, and code labels
2244 compute the address as an offset from the GOT, whose base is in
2245 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2246 differentiate them from global data objects. The returned
2247 address is the PIC reg + an unspec constant.
2249 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2250 reg also appears in the address. */
2253 legitimize_pic_address (rtx orig, rtx reg)
2259 if (GET_CODE (addr) == LABEL_REF
2260 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2262 /* This is a local symbol. */
2263 if (TARGET_64BIT && larl_operand (addr, VOIDmode))
2265 /* Access local symbols PC-relative via LARL.
2266 This is the same as in the non-PIC case, so it is
2267 handled automatically ... */
2271 /* Access local symbols relative to the GOT. */
2273 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2275 if (reload_in_progress || reload_completed)
2276 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2278 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2279 addr = gen_rtx_CONST (Pmode, addr);
2280 addr = force_const_mem (Pmode, addr);
2281 emit_move_insn (temp, addr);
2283 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2286 emit_move_insn (reg, new);
2291 else if (GET_CODE (addr) == SYMBOL_REF)
2294 reg = gen_reg_rtx (Pmode);
2298 /* Assume GOT offset < 4k. This is handled the same way
2299 in both 31- and 64-bit code (@GOT). */
2301 if (reload_in_progress || reload_completed)
2302 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2304 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2305 new = gen_rtx_CONST (Pmode, new);
2306 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2307 new = gen_rtx_MEM (Pmode, new);
2308 RTX_UNCHANGING_P (new) = 1;
2309 emit_move_insn (reg, new);
2312 else if (TARGET_64BIT)
2314 /* If the GOT offset might be >= 4k, we determine the position
2315 of the GOT entry via a PC-relative LARL (@GOTENT). */
2317 rtx temp = gen_reg_rtx (Pmode);
2319 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2320 new = gen_rtx_CONST (Pmode, new);
2321 emit_move_insn (temp, new);
2323 new = gen_rtx_MEM (Pmode, temp);
2324 RTX_UNCHANGING_P (new) = 1;
2325 emit_move_insn (reg, new);
2330 /* If the GOT offset might be >= 4k, we have to load it
2331 from the literal pool (@GOT). */
2333 rtx temp = gen_reg_rtx (Pmode);
2335 if (reload_in_progress || reload_completed)
2336 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2338 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2339 addr = gen_rtx_CONST (Pmode, addr);
2340 addr = force_const_mem (Pmode, addr);
2341 emit_move_insn (temp, addr);
2343 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2344 new = gen_rtx_MEM (Pmode, new);
2345 RTX_UNCHANGING_P (new) = 1;
2346 emit_move_insn (reg, new);
2352 if (GET_CODE (addr) == CONST)
2354 addr = XEXP (addr, 0);
2355 if (GET_CODE (addr) == UNSPEC)
2357 if (XVECLEN (addr, 0) != 1)
2359 switch (XINT (addr, 1))
2361 /* If someone moved a GOT-relative UNSPEC
2362 out of the literal pool, force them back in. */
2365 new = force_const_mem (Pmode, orig);
2368 /* @GOT is OK as is if small. */
2371 new = force_const_mem (Pmode, orig);
2374 /* @GOTENT is OK as is. */
2378 /* @PLT is OK as is on 64-bit, must be converted to
2379 GOT-relative @PLTOFF on 31-bit. */
2383 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2385 if (reload_in_progress || reload_completed)
2386 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2388 addr = XVECEXP (addr, 0, 0);
2389 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2391 addr = gen_rtx_CONST (Pmode, addr);
2392 addr = force_const_mem (Pmode, addr);
2393 emit_move_insn (temp, addr);
2395 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2398 emit_move_insn (reg, new);
2404 /* Everything else cannot happen. */
2409 else if (GET_CODE (addr) != PLUS)
2412 if (GET_CODE (addr) == PLUS)
2414 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2415 /* Check first to see if this is a constant offset
2416 from a local symbol reference. */
2417 if ((GET_CODE (op0) == LABEL_REF
2418 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2419 && GET_CODE (op1) == CONST_INT)
2421 if (TARGET_64BIT && larl_operand (op0, VOIDmode))
2423 if (INTVAL (op1) & 1)
2425 /* LARL can't handle odd offsets, so emit a
2426 pair of LARL and LA. */
2427 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2429 if (!DISP_IN_RANGE (INTVAL (op1)))
2431 int even = INTVAL (op1) - 1;
2432 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2433 op0 = gen_rtx_CONST (Pmode, op0);
2437 emit_move_insn (temp, op0);
2438 new = gen_rtx_PLUS (Pmode, temp, op1);
2442 emit_move_insn (reg, new);
2448 /* If the offset is even, we can just use LARL.
2449 This will happen automatically. */
2454 /* Access local symbols relative to the GOT. */
2456 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2458 if (reload_in_progress || reload_completed)
2459 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2461 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2463 addr = gen_rtx_PLUS (Pmode, addr, op1);
2464 addr = gen_rtx_CONST (Pmode, addr);
2465 addr = force_const_mem (Pmode, addr);
2466 emit_move_insn (temp, addr);
2468 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2471 emit_move_insn (reg, new);
2477 /* Now, check whether it is a GOT relative symbol plus offset
2478 that was pulled out of the literal pool. Force it back in. */
2480 else if (GET_CODE (op0) == UNSPEC
2481 && GET_CODE (op1) == CONST_INT)
2483 if (XVECLEN (op0, 0) != 1)
2485 if (XINT (op0, 1) != UNSPEC_GOTOFF)
2488 new = force_const_mem (Pmode, orig);
2491 /* Otherwise, compute the sum. */
2494 base = legitimize_pic_address (XEXP (addr, 0), reg);
2495 new = legitimize_pic_address (XEXP (addr, 1),
2496 base == reg ? NULL_RTX : reg);
2497 if (GET_CODE (new) == CONST_INT)
2498 new = plus_constant (base, INTVAL (new));
2501 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2503 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2504 new = XEXP (new, 1);
2506 new = gen_rtx_PLUS (Pmode, base, new);
2509 if (GET_CODE (new) == CONST)
2510 new = XEXP (new, 0);
2511 new = force_operand (new, 0);
2518 /* Load the thread pointer into a register. */
2521 get_thread_pointer (void)
2525 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2526 tp = force_reg (Pmode, tp);
2527 mark_reg_pointer (tp, BITS_PER_WORD);
2532 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2534 static GTY(()) rtx s390_tls_symbol;
2536 s390_tls_get_offset (void)
2538 if (!s390_tls_symbol)
2539 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2541 return s390_tls_symbol;
2544 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2545 this (thread-local) address. REG may be used as temporary. */
2548 legitimize_tls_address (rtx addr, rtx reg)
2550 rtx new, tls_call, temp, base, r2, insn;
2552 if (GET_CODE (addr) == SYMBOL_REF)
2553 switch (tls_symbolic_operand (addr))
2555 case TLS_MODEL_GLOBAL_DYNAMIC:
2557 r2 = gen_rtx_REG (Pmode, 2);
2558 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2559 new = gen_rtx_CONST (Pmode, tls_call);
2560 new = force_const_mem (Pmode, new);
2561 emit_move_insn (r2, new);
2562 emit_call_insn (gen_call_value_tls (r2, tls_call));
2563 insn = get_insns ();
2566 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2567 temp = gen_reg_rtx (Pmode);
2568 emit_libcall_block (insn, temp, r2, new);
2570 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2573 s390_load_address (reg, new);
2578 case TLS_MODEL_LOCAL_DYNAMIC:
2580 r2 = gen_rtx_REG (Pmode, 2);
2581 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2582 new = gen_rtx_CONST (Pmode, tls_call);
2583 new = force_const_mem (Pmode, new);
2584 emit_move_insn (r2, new);
2585 emit_call_insn (gen_call_value_tls (r2, tls_call));
2586 insn = get_insns ();
2589 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2590 temp = gen_reg_rtx (Pmode);
2591 emit_libcall_block (insn, temp, r2, new);
2593 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2594 base = gen_reg_rtx (Pmode);
2595 s390_load_address (base, new);
2597 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2598 new = gen_rtx_CONST (Pmode, new);
2599 new = force_const_mem (Pmode, new);
2600 temp = gen_reg_rtx (Pmode);
2601 emit_move_insn (temp, new);
2603 new = gen_rtx_PLUS (Pmode, base, temp);
2606 s390_load_address (reg, new);
2611 case TLS_MODEL_INITIAL_EXEC:
2614 /* Assume GOT offset < 4k. This is handled the same way
2615 in both 31- and 64-bit code. */
2617 if (reload_in_progress || reload_completed)
2618 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2620 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2621 new = gen_rtx_CONST (Pmode, new);
2622 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2623 new = gen_rtx_MEM (Pmode, new);
2624 RTX_UNCHANGING_P (new) = 1;
2625 temp = gen_reg_rtx (Pmode);
2626 emit_move_insn (temp, new);
2628 else if (TARGET_64BIT)
2630 /* If the GOT offset might be >= 4k, we determine the position
2631 of the GOT entry via a PC-relative LARL. */
2633 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2634 new = gen_rtx_CONST (Pmode, new);
2635 temp = gen_reg_rtx (Pmode);
2636 emit_move_insn (temp, new);
2638 new = gen_rtx_MEM (Pmode, temp);
2639 RTX_UNCHANGING_P (new) = 1;
2640 temp = gen_reg_rtx (Pmode);
2641 emit_move_insn (temp, new);
2645 /* If the GOT offset might be >= 4k, we have to load it
2646 from the literal pool. */
2648 if (reload_in_progress || reload_completed)
2649 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2651 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2652 new = gen_rtx_CONST (Pmode, new);
2653 new = force_const_mem (Pmode, new);
2654 temp = gen_reg_rtx (Pmode);
2655 emit_move_insn (temp, new);
2657 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2658 new = gen_rtx_MEM (Pmode, new);
2659 RTX_UNCHANGING_P (new) = 1;
2661 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2662 temp = gen_reg_rtx (Pmode);
2663 emit_insn (gen_rtx_SET (Pmode, temp, new));
2667 /* In position-dependent code, load the absolute address of
2668 the GOT entry from the literal pool. */
2670 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2671 new = gen_rtx_CONST (Pmode, new);
2672 new = force_const_mem (Pmode, new);
2673 temp = gen_reg_rtx (Pmode);
2674 emit_move_insn (temp, new);
2677 new = gen_rtx_MEM (Pmode, new);
2678 RTX_UNCHANGING_P (new) = 1;
2680 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2681 temp = gen_reg_rtx (Pmode);
2682 emit_insn (gen_rtx_SET (Pmode, temp, new));
2685 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2688 s390_load_address (reg, new);
2693 case TLS_MODEL_LOCAL_EXEC:
2694 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2695 new = gen_rtx_CONST (Pmode, new);
2696 new = force_const_mem (Pmode, new);
2697 temp = gen_reg_rtx (Pmode);
2698 emit_move_insn (temp, new);
2700 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2703 s390_load_address (reg, new);
2712 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2714 switch (XINT (XEXP (addr, 0), 1))
2716 case UNSPEC_INDNTPOFF:
2729 abort (); /* for now ... */
2734 /* Emit insns to move operands[1] into operands[0]. */
2737 emit_symbolic_move (rtx *operands)
2739 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2741 if (GET_CODE (operands[0]) == MEM)
2742 operands[1] = force_reg (Pmode, operands[1]);
2743 else if (TLS_SYMBOLIC_CONST (operands[1]))
2744 operands[1] = legitimize_tls_address (operands[1], temp);
2746 operands[1] = legitimize_pic_address (operands[1], temp);
2749 /* Try machine-dependent ways of modifying an illegitimate address X
2750 to be legitimate. If we find one, return the new, valid address.
2752 OLDX is the address as it was before break_out_memory_refs was called.
2753 In some cases it is useful to look at this to decide what needs to be done.
2755 MODE is the mode of the operand pointed to by X.
2757 When -fpic is used, special handling is needed for symbolic references.
2758 See comments by legitimize_pic_address for details. */
2761 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
2762 enum machine_mode mode ATTRIBUTE_UNUSED)
2764 rtx constant_term = const0_rtx;
2766 if (TLS_SYMBOLIC_CONST (x))
2768 x = legitimize_tls_address (x, 0);
2770 if (legitimate_address_p (mode, x, FALSE))
2775 if (SYMBOLIC_CONST (x)
2776 || (GET_CODE (x) == PLUS
2777 && (SYMBOLIC_CONST (XEXP (x, 0))
2778 || SYMBOLIC_CONST (XEXP (x, 1)))))
2779 x = legitimize_pic_address (x, 0);
2781 if (legitimate_address_p (mode, x, FALSE))
2785 x = eliminate_constant_term (x, &constant_term);
2787 /* Optimize loading of large displacements by splitting them
2788 into the multiple of 4K and the rest; this allows the
2789 former to be CSE'd if possible.
2791 Don't do this if the displacement is added to a register
2792 pointing into the stack frame, as the offsets will
2793 change later anyway. */
2795 if (GET_CODE (constant_term) == CONST_INT
2796 && !TARGET_LONG_DISPLACEMENT
2797 && !DISP_IN_RANGE (INTVAL (constant_term))
2798 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2800 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2801 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2803 rtx temp = gen_reg_rtx (Pmode);
2804 rtx val = force_operand (GEN_INT (upper), temp);
2806 emit_move_insn (temp, val);
2808 x = gen_rtx_PLUS (Pmode, x, temp);
2809 constant_term = GEN_INT (lower);
2812 if (GET_CODE (x) == PLUS)
2814 if (GET_CODE (XEXP (x, 0)) == REG)
2816 register rtx temp = gen_reg_rtx (Pmode);
2817 register rtx val = force_operand (XEXP (x, 1), temp);
2819 emit_move_insn (temp, val);
2821 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2824 else if (GET_CODE (XEXP (x, 1)) == REG)
2826 register rtx temp = gen_reg_rtx (Pmode);
2827 register rtx val = force_operand (XEXP (x, 0), temp);
2829 emit_move_insn (temp, val);
2831 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2835 if (constant_term != const0_rtx)
2836 x = gen_rtx_PLUS (Pmode, x, constant_term);
2841 /* Emit code to move LEN bytes from DST to SRC. */
2844 s390_expand_movstr (rtx dst, rtx src, rtx len)
2846 rtx (*gen_short) (rtx, rtx, rtx) =
2847 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2848 rtx (*gen_long) (rtx, rtx, rtx, rtx) =
2849 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2852 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2854 if (INTVAL (len) > 0)
2855 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2858 else if (TARGET_MVCLE)
2860 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2861 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2862 rtx reg0 = gen_reg_rtx (double_mode);
2863 rtx reg1 = gen_reg_rtx (double_mode);
2865 emit_move_insn (gen_highpart (single_mode, reg0),
2866 force_operand (XEXP (dst, 0), NULL_RTX));
2867 emit_move_insn (gen_highpart (single_mode, reg1),
2868 force_operand (XEXP (src, 0), NULL_RTX));
2870 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2871 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2873 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2878 rtx dst_addr, src_addr, count, blocks, temp;
2879 rtx end_label = gen_label_rtx ();
2880 enum machine_mode mode;
2883 mode = GET_MODE (len);
2884 if (mode == VOIDmode)
2887 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2891 dst_addr = gen_reg_rtx (Pmode);
2892 src_addr = gen_reg_rtx (Pmode);
2893 count = gen_reg_rtx (mode);
2894 blocks = gen_reg_rtx (mode);
2896 convert_move (count, len, 1);
2897 emit_cmp_and_jump_insns (count, const0_rtx,
2898 EQ, NULL_RTX, mode, 1, end_label);
2900 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2901 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2902 dst = change_address (dst, VOIDmode, dst_addr);
2903 src = change_address (src, VOIDmode, src_addr);
2905 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2907 emit_move_insn (count, temp);
2909 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2911 emit_move_insn (blocks, temp);
2913 expand_start_loop (1);
2914 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2915 make_tree (type, blocks),
2916 make_tree (type, const0_rtx)));
2918 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2919 s390_load_address (dst_addr,
2920 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2921 s390_load_address (src_addr,
2922 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2924 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2926 emit_move_insn (blocks, temp);
2930 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2931 emit_label (end_label);
2935 /* Emit code to clear LEN bytes at DST. */
2938 s390_expand_clrstr (rtx dst, rtx len)
2940 rtx (*gen_short) (rtx, rtx) =
2941 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2942 rtx (*gen_long) (rtx, rtx, rtx) =
2943 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2946 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2948 if (INTVAL (len) > 0)
2949 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2952 else if (TARGET_MVCLE)
2954 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2955 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2956 rtx reg0 = gen_reg_rtx (double_mode);
2957 rtx reg1 = gen_reg_rtx (double_mode);
2959 emit_move_insn (gen_highpart (single_mode, reg0),
2960 force_operand (XEXP (dst, 0), NULL_RTX));
2961 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2963 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2964 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2966 emit_insn ((*gen_long) (reg0, reg1, reg0));
2971 rtx dst_addr, src_addr, count, blocks, temp;
2972 rtx end_label = gen_label_rtx ();
2973 enum machine_mode mode;
2976 mode = GET_MODE (len);
2977 if (mode == VOIDmode)
2980 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2984 dst_addr = gen_reg_rtx (Pmode);
2985 src_addr = gen_reg_rtx (Pmode);
2986 count = gen_reg_rtx (mode);
2987 blocks = gen_reg_rtx (mode);
2989 convert_move (count, len, 1);
2990 emit_cmp_and_jump_insns (count, const0_rtx,
2991 EQ, NULL_RTX, mode, 1, end_label);
2993 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2994 dst = change_address (dst, VOIDmode, dst_addr);
2996 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2998 emit_move_insn (count, temp);
3000 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3002 emit_move_insn (blocks, temp);
3004 expand_start_loop (1);
3005 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3006 make_tree (type, blocks),
3007 make_tree (type, const0_rtx)));
3009 emit_insn ((*gen_short) (dst, GEN_INT (255)));
3010 s390_load_address (dst_addr,
3011 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3013 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3015 emit_move_insn (blocks, temp);
3019 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
3020 emit_label (end_label);
3024 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3025 and return the result in TARGET. */
3028 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3030 rtx (*gen_short) (rtx, rtx, rtx) =
3031 TARGET_64BIT ? gen_cmpmem_short_64 : gen_cmpmem_short_31;
3032 rtx (*gen_long) (rtx, rtx, rtx, rtx) =
3033 TARGET_64BIT ? gen_cmpmem_long_64 : gen_cmpmem_long_31;
3034 rtx (*gen_result) (rtx) =
3035 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3037 op0 = protect_from_queue (op0, 0);
3038 op1 = protect_from_queue (op1, 0);
3039 len = protect_from_queue (len, 0);
3041 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3043 if (INTVAL (len) > 0)
3045 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
3046 emit_insn ((*gen_result) (target));
3049 emit_move_insn (target, const0_rtx);
3052 else /* if (TARGET_MVCLE) */
3054 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
3055 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
3056 rtx reg0 = gen_reg_rtx (double_mode);
3057 rtx reg1 = gen_reg_rtx (double_mode);
3059 emit_move_insn (gen_highpart (single_mode, reg0),
3060 force_operand (XEXP (op0, 0), NULL_RTX));
3061 emit_move_insn (gen_highpart (single_mode, reg1),
3062 force_operand (XEXP (op1, 0), NULL_RTX));
3064 convert_move (gen_lowpart (single_mode, reg0), len, 1);
3065 convert_move (gen_lowpart (single_mode, reg1), len, 1);
3067 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
3068 emit_insn ((*gen_result) (target));
3072 /* Deactivate for now as profile code cannot cope with
3073 CC being live across basic block boundaries. */
3076 rtx addr0, addr1, count, blocks, temp;
3077 rtx end_label = gen_label_rtx ();
3078 enum machine_mode mode;
3081 mode = GET_MODE (len);
3082 if (mode == VOIDmode)
3085 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3089 addr0 = gen_reg_rtx (Pmode);
3090 addr1 = gen_reg_rtx (Pmode);
3091 count = gen_reg_rtx (mode);
3092 blocks = gen_reg_rtx (mode);
3094 convert_move (count, len, 1);
3095 emit_cmp_and_jump_insns (count, const0_rtx,
3096 EQ, NULL_RTX, mode, 1, end_label);
3098 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3099 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3100 op0 = change_address (op0, VOIDmode, addr0);
3101 op1 = change_address (op1, VOIDmode, addr1);
3103 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3105 emit_move_insn (count, temp);
3107 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3109 emit_move_insn (blocks, temp);
3111 expand_start_loop (1);
3112 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3113 make_tree (type, blocks),
3114 make_tree (type, const0_rtx)));
3116 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
3117 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3118 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3119 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3120 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3121 emit_jump_insn (temp);
3123 s390_load_address (addr0,
3124 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3125 s390_load_address (addr1,
3126 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3128 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3130 emit_move_insn (blocks, temp);
3134 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
3135 emit_label (end_label);
3137 emit_insn ((*gen_result) (target));
3142 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3143 We need to emit DTP-relative relocations. */
3146 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3151 fputs ("\t.long\t", file);
3154 fputs ("\t.quad\t", file);
3159 output_addr_const (file, x);
3160 fputs ("@DTPOFF", file);
3163 /* In the name of slightly smaller debug output, and to cater to
3164 general assembler losage, recognize various UNSPEC sequences
3165 and turn them back into a direct symbol reference. */
3168 s390_delegitimize_address (rtx orig_x)
3172 if (GET_CODE (x) != MEM)
3176 if (GET_CODE (x) == PLUS
3177 && GET_CODE (XEXP (x, 1)) == CONST
3178 && GET_CODE (XEXP (x, 0)) == REG
3179 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3181 y = XEXP (XEXP (x, 1), 0);
3182 if (GET_CODE (y) == UNSPEC
3183 && XINT (y, 1) == UNSPEC_GOT)
3184 return XVECEXP (y, 0, 0);
3188 if (GET_CODE (x) == CONST)
3191 if (GET_CODE (y) == UNSPEC
3192 && XINT (y, 1) == UNSPEC_GOTENT)
3193 return XVECEXP (y, 0, 0);
3200 /* Locate some local-dynamic symbol still in use by this function
3201 so that we can print its name in local-dynamic base patterns. */
3204 get_some_local_dynamic_name (void)
3208 if (cfun->machine->some_ld_name)
3209 return cfun->machine->some_ld_name;
3211 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3213 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3214 return cfun->machine->some_ld_name;
3220 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3224 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3226 x = get_pool_constant (x);
3227 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3230 if (GET_CODE (x) == SYMBOL_REF
3231 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3233 cfun->machine->some_ld_name = XSTR (x, 0);
3240 /* Output symbolic constant X in assembler syntax to
3241 stdio stream FILE. */
3244 s390_output_symbolic_const (FILE *file, rtx x)
3246 switch (GET_CODE (x))
3251 s390_output_symbolic_const (file, XEXP (x, 0));
3255 s390_output_symbolic_const (file, XEXP (x, 0));
3256 fprintf (file, "+");
3257 s390_output_symbolic_const (file, XEXP (x, 1));
3261 s390_output_symbolic_const (file, XEXP (x, 0));
3262 fprintf (file, "-");
3263 s390_output_symbolic_const (file, XEXP (x, 1));
3270 output_addr_const (file, x);
3274 if (XVECLEN (x, 0) != 1)
3275 output_operand_lossage ("invalid UNSPEC as operand (1)");
3276 switch (XINT (x, 1))
3278 case UNSPEC_LTREL_OFFSET:
3279 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3280 fprintf (file, "-");
3281 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3284 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3285 fprintf (file, "@GOTENT");
3288 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3289 fprintf (file, "@GOT");
3292 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3293 fprintf (file, "@GOTOFF");
3296 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3297 fprintf (file, "@PLT");
3300 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3301 fprintf (file, "@PLTOFF");
3304 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3305 fprintf (file, "@TLSGD");
3308 assemble_name (file, get_some_local_dynamic_name ());
3309 fprintf (file, "@TLSLDM");
3312 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3313 fprintf (file, "@DTPOFF");
3316 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3317 fprintf (file, "@NTPOFF");
3319 case UNSPEC_GOTNTPOFF:
3320 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3321 fprintf (file, "@GOTNTPOFF");
3323 case UNSPEC_INDNTPOFF:
3324 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3325 fprintf (file, "@INDNTPOFF");
3328 output_operand_lossage ("invalid UNSPEC as operand (2)");
3334 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3339 /* Output address operand ADDR in assembler syntax to
3340 stdio stream FILE. */
3343 print_operand_address (FILE *file, rtx addr)
3345 struct s390_address ad;
3347 if (!s390_decompose_address (addr, &ad)
3348 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3349 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3350 output_operand_lossage ("Cannot decompose address.");
3353 s390_output_symbolic_const (file, ad.disp);
3355 fprintf (file, "0");
3357 if (ad.base && ad.indx)
3358 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3359 reg_names[REGNO (ad.base)]);
3361 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3364 /* Output operand X in assembler syntax to stdio stream FILE.
3365 CODE specified the format flag. The following format flags
3368 'C': print opcode suffix for branch condition.
3369 'D': print opcode suffix for inverse branch condition.
3370 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3371 'O': print only the displacement of a memory reference.
3372 'R': print only the base register of a memory reference.
3373 'N': print the second word of a DImode operand.
3374 'M': print the second word of a TImode operand.
3376 'b': print integer X as if it's an unsigned byte.
3377 'x': print integer X as if it's an unsigned word.
3378 'h': print integer X as if it's a signed word. */
3381 print_operand (FILE *file, rtx x, int code)
3386 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3390 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3394 if (GET_CODE (x) == SYMBOL_REF)
3396 fprintf (file, "%s", ":tls_load:");
3397 output_addr_const (file, x);
3399 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3401 fprintf (file, "%s", ":tls_gdcall:");
3402 output_addr_const (file, XVECEXP (x, 0, 0));
3404 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3406 fprintf (file, "%s", ":tls_ldcall:");
3407 assemble_name (file, get_some_local_dynamic_name ());
3415 struct s390_address ad;
3417 if (GET_CODE (x) != MEM
3418 || !s390_decompose_address (XEXP (x, 0), &ad)
3419 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3424 s390_output_symbolic_const (file, ad.disp);
3426 fprintf (file, "0");
3432 struct s390_address ad;
3434 if (GET_CODE (x) != MEM
3435 || !s390_decompose_address (XEXP (x, 0), &ad)
3436 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3441 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3443 fprintf (file, "0");
3448 if (GET_CODE (x) == REG)
3449 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3450 else if (GET_CODE (x) == MEM)
3451 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3457 if (GET_CODE (x) == REG)
3458 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3459 else if (GET_CODE (x) == MEM)
3460 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3466 switch (GET_CODE (x))
3469 fprintf (file, "%s", reg_names[REGNO (x)]);
3473 output_address (XEXP (x, 0));
3480 s390_output_symbolic_const (file, x);
3485 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3486 else if (code == 'x')
3487 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3488 else if (code == 'h')
3489 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3491 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3495 if (GET_MODE (x) != VOIDmode)
3498 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3499 else if (code == 'x')
3500 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3501 else if (code == 'h')
3502 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3508 fatal_insn ("UNKNOWN in print_operand !?", x);
3513 /* Target hook for assembling integer objects. We need to define it
3514 here to work a round a bug in some versions of GAS, which couldn't
3515 handle values smaller than INT_MIN when printed in decimal. */
3518 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
3520 if (size == 8 && aligned_p
3521 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3523 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3527 return default_assemble_integer (x, size, aligned_p);
3530 /* Returns true if register REGNO is used for forming
3531 a memory address in expression X. */
3534 reg_used_in_mem_p (int regno, rtx x)
3536 enum rtx_code code = GET_CODE (x);
3542 if (refers_to_regno_p (regno, regno+1,
3546 else if (code == SET
3547 && GET_CODE (SET_DEST (x)) == PC)
3549 if (refers_to_regno_p (regno, regno+1,
3554 fmt = GET_RTX_FORMAT (code);
3555 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3558 && reg_used_in_mem_p (regno, XEXP (x, i)))
3561 else if (fmt[i] == 'E')
3562 for (j = 0; j < XVECLEN (x, i); j++)
3563 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3569 /* Returns true if expression DEP_RTX sets an address register
3570 used by instruction INSN to address memory. */
3573 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
3577 if (GET_CODE (dep_rtx) == INSN)
3578 dep_rtx = PATTERN (dep_rtx);
3580 if (GET_CODE (dep_rtx) == SET)
3582 target = SET_DEST (dep_rtx);
3583 if (GET_CODE (target) == STRICT_LOW_PART)
3584 target = XEXP (target, 0);
3585 while (GET_CODE (target) == SUBREG)
3586 target = SUBREG_REG (target);
3588 if (GET_CODE (target) == REG)
3590 int regno = REGNO (target);
3592 if (s390_safe_attr_type (insn) == TYPE_LA)
3594 pat = PATTERN (insn);
3595 if (GET_CODE (pat) == PARALLEL)
3597 if (XVECLEN (pat, 0) != 2)
3599 pat = XVECEXP (pat, 0, 0);
3601 if (GET_CODE (pat) == SET)
3602 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3606 else if (get_attr_atype (insn) == ATYPE_AGEN)
3607 return reg_used_in_mem_p (regno, PATTERN (insn));
3613 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3616 s390_agen_dep_p (rtx dep_insn, rtx insn)
3618 rtx dep_rtx = PATTERN (dep_insn);
3621 if (GET_CODE (dep_rtx) == SET
3622 && addr_generation_dependency_p (dep_rtx, insn))
3624 else if (GET_CODE (dep_rtx) == PARALLEL)
3626 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3628 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3635 /* Return the modified cost of the dependency of instruction INSN
3636 on instruction DEP_INSN through the link LINK. COST is the
3637 default cost of that dependency.
3639 Data dependencies are all handled without delay. However, if a
3640 register is modified and subsequently used as base or index
3641 register of a memory reference, at least 4 cycles need to pass
3642 between setting and using the register to avoid pipeline stalls.
3643 An exception is the LA instruction. An address generated by LA can
3644 be used by introducing only a one cycle stall on the pipeline. */
3647 s390_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3652 /* If the dependence is an anti-dependence, there is no cost. For an
3653 output dependence, there is sometimes a cost, but it doesn't seem
3654 worth handling those few cases. */
3656 if (REG_NOTE_KIND (link) != 0)
3659 /* If we can't recognize the insns, we can't really do anything. */
3660 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3663 /* DFA based scheduling checks address dependency in md file. */
3664 if (s390_use_dfa_pipeline_interface ())
3666 /* Operand forward in case of lr, load and la. */
3667 if (s390_tune == PROCESSOR_2084_Z990
3669 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3670 || s390_safe_attr_type (dep_insn) == TYPE_LR
3671 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3676 dep_rtx = PATTERN (dep_insn);
3678 if (GET_CODE (dep_rtx) == SET
3679 && addr_generation_dependency_p (dep_rtx, insn))
3680 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3681 else if (GET_CODE (dep_rtx) == PARALLEL)
3683 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3685 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3686 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3692 /* A C statement (sans semicolon) to update the integer scheduling priority
3693 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3694 reduce the priority to execute INSN later. Do not define this macro if
3695 you do not need to adjust the scheduling priorities of insns.
3697 A STD instruction should be scheduled earlier,
3698 in order to use the bypass. */
3701 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
3703 if (! INSN_P (insn))
3706 if (s390_tune != PROCESSOR_2084_Z990)
3709 switch (s390_safe_attr_type (insn))
3713 priority = priority << 3;
3716 priority = priority << 1;
3724 /* The number of instructions that can be issued per cycle. */
3727 s390_issue_rate (void)
3729 if (s390_tune == PROCESSOR_2084_Z990)
3734 /* If the following function returns TRUE, we will use the the DFA
3738 s390_use_dfa_pipeline_interface (void)
3740 if (s390_tune == PROCESSOR_2064_Z900
3741 || s390_tune == PROCESSOR_2084_Z990)
3748 s390_first_cycle_multipass_dfa_lookahead (void)
3750 return s390_use_dfa_pipeline_interface () ? 4 : 0;
3753 /* Called after issuing each insn.
3754 Triggers default sort algorithm to better slot instructions. */
3757 s390_sched_reorder2 (FILE *dump ATTRIBUTE_UNUSED,
3758 int sched_verbose ATTRIBUTE_UNUSED,
3759 rtx *ready ATTRIBUTE_UNUSED,
3760 int *pn_ready ATTRIBUTE_UNUSED,
3761 int clock_var ATTRIBUTE_UNUSED)
3763 return s390_issue_rate();
3767 /* Split all branches that exceed the maximum distance.
3768 Returns true if this created a new literal pool entry.
3770 Code generated by this routine is allowed to use
3771 TEMP_REG as temporary scratch register. If this is
3772 done, TEMP_USED is set to true. */
3775 s390_split_branches (rtx temp_reg, bool *temp_used)
3777 int new_literal = 0;
3778 rtx insn, pat, tmp, target;
3781 /* We need correct insn addresses. */
3783 shorten_branches (get_insns ());
3785 /* Find all branches that exceed 64KB, and split them. */
3787 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3789 if (GET_CODE (insn) != JUMP_INSN)
3792 pat = PATTERN (insn);
3793 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3794 pat = XVECEXP (pat, 0, 0);
3795 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3798 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3800 label = &SET_SRC (pat);
3802 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3804 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3805 label = &XEXP (SET_SRC (pat), 1);
3806 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3807 label = &XEXP (SET_SRC (pat), 2);
3814 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3821 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3822 INSN_ADDRESSES_NEW (tmp, -1);
3829 tmp = force_const_mem (Pmode, *label);
3830 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3831 INSN_ADDRESSES_NEW (tmp, -1);
3838 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
3839 UNSPEC_LTREL_OFFSET);
3840 target = gen_rtx_CONST (Pmode, target);
3841 target = force_const_mem (Pmode, target);
3842 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
3843 INSN_ADDRESSES_NEW (tmp, -1);
3845 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (target, 0)),
3847 target = gen_rtx_PLUS (Pmode, temp_reg, target);
3850 if (!validate_change (insn, label, target, 0))
3858 /* Find a literal pool symbol referenced in RTX X, and store
3859 it at REF. Will abort if X contains references to more than
3860 one such pool symbol; multiple references to the same symbol
3861 are allowed, however.
3863 The rtx pointed to by REF must be initialized to NULL_RTX
3864 by the caller before calling this routine. */
3867 find_constant_pool_ref (rtx x, rtx *ref)
3872 /* Ignore LTREL_BASE references. */
3873 if (GET_CODE (x) == UNSPEC
3874 && XINT (x, 1) == UNSPEC_LTREL_BASE)
3877 if (GET_CODE (x) == SYMBOL_REF
3878 && CONSTANT_POOL_ADDRESS_P (x))
3880 if (*ref == NULL_RTX)
3886 fmt = GET_RTX_FORMAT (GET_CODE (x));
3887 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3891 find_constant_pool_ref (XEXP (x, i), ref);
3893 else if (fmt[i] == 'E')
3895 for (j = 0; j < XVECLEN (x, i); j++)
3896 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3901 /* Replace every reference to the literal pool symbol REF
3902 in X by the address ADDR. Fix up MEMs as required. */
3905 replace_constant_pool_ref (rtx *x, rtx ref, rtx addr)
3913 /* Literal pool references can only occur inside a MEM ... */
3914 if (GET_CODE (*x) == MEM)
3916 rtx memref = XEXP (*x, 0);
3920 *x = replace_equiv_address (*x, addr);
3924 if (GET_CODE (memref) == CONST
3925 && GET_CODE (XEXP (memref, 0)) == PLUS
3926 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3927 && XEXP (XEXP (memref, 0), 0) == ref)
3929 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3930 *x = replace_equiv_address (*x, plus_constant (addr, off));
3935 /* ... or a load-address type pattern. */
3936 if (GET_CODE (*x) == SET)
3938 rtx addrref = SET_SRC (*x);
3942 SET_SRC (*x) = addr;
3946 if (GET_CODE (addrref) == CONST
3947 && GET_CODE (XEXP (addrref, 0)) == PLUS
3948 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3949 && XEXP (XEXP (addrref, 0), 0) == ref)
3951 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3952 SET_SRC (*x) = plus_constant (addr, off);
3957 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3958 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3962 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3964 else if (fmt[i] == 'E')
3966 for (j = 0; j < XVECLEN (*x, i); j++)
3967 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3972 /* Check whether X contains an UNSPEC_LTREL_BASE.
3973 Return its constant pool symbol if found, NULL_RTX otherwise. */
3976 find_ltrel_base (rtx x)
3981 if (GET_CODE (x) == UNSPEC
3982 && XINT (x, 1) == UNSPEC_LTREL_BASE)
3983 return XVECEXP (x, 0, 0);
3985 fmt = GET_RTX_FORMAT (GET_CODE (x));
3986 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3990 rtx fnd = find_ltrel_base (XEXP (x, i));
3994 else if (fmt[i] == 'E')
3996 for (j = 0; j < XVECLEN (x, i); j++)
3998 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4008 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with BASE. */
4011 replace_ltrel_base (rtx *x, rtx base)
4016 if (GET_CODE (*x) == UNSPEC
4017 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4023 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4024 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4028 replace_ltrel_base (&XEXP (*x, i), base);
4030 else if (fmt[i] == 'E')
4032 for (j = 0; j < XVECLEN (*x, i); j++)
4033 replace_ltrel_base (&XVECEXP (*x, i, j), base);
4039 /* We keep a list of constants which we have to add to internal
4040 constant tables in the middle of large functions. */
4042 #define NR_C_MODES 7
4043 enum machine_mode constant_modes[NR_C_MODES] =
4052 rtx (*gen_consttable[NR_C_MODES])(rtx) =
4054 gen_consttable_ti, gen_consttable_df, gen_consttable_di, gen_consttable_sf, gen_consttable_si, gen_consttable_hi, gen_consttable_qi
4059 struct constant *next;
4064 struct constant_pool
4066 struct constant_pool *next;
4071 struct constant *constants[NR_C_MODES];
4076 static struct constant_pool * s390_chunkify_start (void);
4077 static void s390_chunkify_finish (struct constant_pool *);
4078 static void s390_chunkify_cancel (struct constant_pool *);
4080 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4081 static void s390_end_pool (struct constant_pool *, rtx);
4082 static void s390_add_pool_insn (struct constant_pool *, rtx);
4083 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4084 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4085 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4086 static rtx s390_dump_pool (struct constant_pool *);
4087 static void s390_free_pool (struct constant_pool *);
4089 /* Create new constant pool covering instructions starting at INSN
4090 and chain it to the end of POOL_LIST. */
4092 static struct constant_pool *
4093 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4095 struct constant_pool *pool, **prev;
4098 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4100 for (i = 0; i < NR_C_MODES; i++)
4101 pool->constants[i] = NULL;
4103 pool->label = gen_label_rtx ();
4104 pool->first_insn = insn;
4105 pool->pool_insn = NULL_RTX;
4106 pool->insns = BITMAP_XMALLOC ();
4109 for (prev = pool_list; *prev; prev = &(*prev)->next)
4116 /* End range of instructions covered by POOL at INSN and emit
4117 placeholder insn representing the pool. */
4120 s390_end_pool (struct constant_pool *pool, rtx insn)
4122 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4125 insn = get_last_insn ();
4127 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4128 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4131 /* Add INSN to the list of insns covered by POOL. */
4134 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4136 bitmap_set_bit (pool->insns, INSN_UID (insn));
4139 /* Return pool out of POOL_LIST that covers INSN. */
4141 static struct constant_pool *
4142 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4144 struct constant_pool *pool;
4146 for (pool = pool_list; pool; pool = pool->next)
4147 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4153 /* Add constant VAL of mode MODE to the constant pool POOL. */
4156 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4161 for (i = 0; i < NR_C_MODES; i++)
4162 if (constant_modes[i] == mode)
4164 if (i == NR_C_MODES)
4167 for (c = pool->constants[i]; c != NULL; c = c->next)
4168 if (rtx_equal_p (val, c->value))
4173 c = (struct constant *) xmalloc (sizeof *c);
4175 c->label = gen_label_rtx ();
4176 c->next = pool->constants[i];
4177 pool->constants[i] = c;
4178 pool->size += GET_MODE_SIZE (mode);
4182 /* Find constant VAL of mode MODE in the constant pool POOL.
4183 Return an RTX describing the distance from the start of
4184 the pool to the location of the new constant. */
4187 s390_find_constant (struct constant_pool *pool, rtx val,
4188 enum machine_mode mode)
4194 for (i = 0; i < NR_C_MODES; i++)
4195 if (constant_modes[i] == mode)
4197 if (i == NR_C_MODES)
4200 for (c = pool->constants[i]; c != NULL; c = c->next)
4201 if (rtx_equal_p (val, c->value))
4207 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4208 gen_rtx_LABEL_REF (Pmode, pool->label));
4209 offset = gen_rtx_CONST (Pmode, offset);
4213 /* Dump out the constants in POOL. */
4216 s390_dump_pool (struct constant_pool *pool)
4222 /* Pool start insn switches to proper section
4223 and guarantees necessary alignment. */
4225 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4227 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4228 INSN_ADDRESSES_NEW (insn, -1);
4230 insn = emit_label_after (pool->label, insn);
4231 INSN_ADDRESSES_NEW (insn, -1);
4233 /* Dump constants in descending alignment requirement order,
4234 ensuring proper alignment for every constant. */
4235 for (i = 0; i < NR_C_MODES; i++)
4236 for (c = pool->constants[i]; c; c = c->next)
4238 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4239 rtx value = c->value;
4240 if (GET_CODE (value) == CONST
4241 && GET_CODE (XEXP (value, 0)) == UNSPEC
4242 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
4243 && XVECLEN (XEXP (value, 0), 0) == 1)
4245 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4246 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4247 value = gen_rtx_CONST (VOIDmode, value);
4250 insn = emit_label_after (c->label, insn);
4251 INSN_ADDRESSES_NEW (insn, -1);
4252 insn = emit_insn_after (gen_consttable[i] (value), insn);
4253 INSN_ADDRESSES_NEW (insn, -1);
4256 /* Pool end insn switches back to previous section
4257 and guarantees necessary alignment. */
4259 insn = emit_insn_after (gen_pool_end_64 (), insn);
4261 insn = emit_insn_after (gen_pool_end_31 (), insn);
4262 INSN_ADDRESSES_NEW (insn, -1);
4264 insn = emit_barrier_after (insn);
4265 INSN_ADDRESSES_NEW (insn, -1);
4267 /* Remove placeholder insn. */
4268 remove_insn (pool->pool_insn);
4273 /* Free all memory used by POOL. */
4276 s390_free_pool (struct constant_pool *pool)
4280 for (i = 0; i < NR_C_MODES; i++)
4282 struct constant *c = pool->constants[i];
4285 struct constant *next = c->next;
4291 BITMAP_XFREE (pool->insns);
4296 /* Chunkify the literal pool if required. */
4298 #define S390_POOL_CHUNK_MIN 0xc00
4299 #define S390_POOL_CHUNK_MAX 0xe00
4301 static struct constant_pool *
4302 s390_chunkify_start (void)
4304 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4306 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4309 rtx pending_ltrel = NULL_RTX;
4312 rtx (*gen_reload_base) (rtx, rtx) =
4313 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4316 /* Do we need to chunkify the literal pool? */
4318 if (get_pool_size () < S390_POOL_CHUNK_MAX)
4321 /* We need correct insn addresses. */
4323 shorten_branches (get_insns ());
4325 /* Scan all insns and move literals to pool chunks. */
4327 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4329 /* Check for pending LTREL_BASE. */
4332 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
4335 if (ltrel_base == pending_ltrel)
4336 pending_ltrel = NULL_RTX;
4342 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4344 rtx pool_ref = NULL_RTX;
4345 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4348 rtx constant = get_pool_constant (pool_ref);
4349 enum machine_mode mode = get_pool_mode (pool_ref);
4352 curr_pool = s390_start_pool (&pool_list, insn);
4354 s390_add_constant (curr_pool, constant, mode);
4355 s390_add_pool_insn (curr_pool, insn);
4357 /* Don't split the pool chunk between a LTREL_OFFSET load
4358 and the corresponding LTREL_BASE. */
4359 if (GET_CODE (constant) == CONST
4360 && GET_CODE (XEXP (constant, 0)) == UNSPEC
4361 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
4365 pending_ltrel = pool_ref;
4370 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4373 s390_add_pool_insn (curr_pool, insn);
4374 /* An LTREL_BASE must follow within the same basic block. */
4380 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4381 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4386 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4389 s390_end_pool (curr_pool, NULL_RTX);
4394 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4395 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4398 /* We will later have to insert base register reload insns.
4399 Those will have an effect on code size, which we need to
4400 consider here. This calculation makes rather pessimistic
4401 worst-case assumptions. */
4402 if (GET_CODE (insn) == CODE_LABEL)
4405 if (chunk_size < S390_POOL_CHUNK_MIN
4406 && curr_pool->size < S390_POOL_CHUNK_MIN)
4409 /* Pool chunks can only be inserted after BARRIERs ... */
4410 if (GET_CODE (insn) == BARRIER)
4412 s390_end_pool (curr_pool, insn);
4417 /* ... so if we don't find one in time, create one. */
4418 else if ((chunk_size > S390_POOL_CHUNK_MAX
4419 || curr_pool->size > S390_POOL_CHUNK_MAX))
4421 rtx label, jump, barrier;
4423 /* We can insert the barrier only after a 'real' insn. */
4424 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4426 if (get_attr_length (insn) == 0)
4429 /* Don't separate LTREL_BASE from the corresponding
4430 LTREL_OFFSET load. */
4434 label = gen_label_rtx ();
4435 jump = emit_jump_insn_after (gen_jump (label), insn);
4436 barrier = emit_barrier_after (jump);
4437 insn = emit_label_after (label, barrier);
4438 JUMP_LABEL (jump) = label;
4439 LABEL_NUSES (label) = 1;
4441 INSN_ADDRESSES_NEW (jump, -1);
4442 INSN_ADDRESSES_NEW (barrier, -1);
4443 INSN_ADDRESSES_NEW (insn, -1);
4445 s390_end_pool (curr_pool, barrier);
4453 s390_end_pool (curr_pool, NULL_RTX);
4458 /* Find all labels that are branched into
4459 from an insn belonging to a different chunk. */
4461 far_labels = BITMAP_XMALLOC ();
4463 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4465 /* Labels marked with LABEL_PRESERVE_P can be target
4466 of non-local jumps, so we have to mark them.
4467 The same holds for named labels.
4469 Don't do that, however, if it is the label before
4472 if (GET_CODE (insn) == CODE_LABEL
4473 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4475 rtx vec_insn = next_real_insn (insn);
4476 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4477 PATTERN (vec_insn) : NULL_RTX;
4479 || !(GET_CODE (vec_pat) == ADDR_VEC
4480 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4481 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4484 /* If we have a direct jump (conditional or unconditional)
4485 or a casesi jump, check all potential targets. */
4486 else if (GET_CODE (insn) == JUMP_INSN)
4488 rtx pat = PATTERN (insn);
4489 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4490 pat = XVECEXP (pat, 0, 0);
4492 if (GET_CODE (pat) == SET)
4494 rtx label = JUMP_LABEL (insn);
4497 if (s390_find_pool (pool_list, label)
4498 != s390_find_pool (pool_list, insn))
4499 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4502 else if (GET_CODE (pat) == PARALLEL
4503 && XVECLEN (pat, 0) == 2
4504 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4505 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4506 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4508 /* Find the jump table used by this casesi jump. */
4509 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4510 rtx vec_insn = next_real_insn (vec_label);
4511 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4512 PATTERN (vec_insn) : NULL_RTX;
4514 && (GET_CODE (vec_pat) == ADDR_VEC
4515 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4517 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4519 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4521 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4523 if (s390_find_pool (pool_list, label)
4524 != s390_find_pool (pool_list, insn))
4525 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4532 /* Insert base register reload insns before every pool. */
4534 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4536 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4537 rtx insn = curr_pool->first_insn;
4538 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4541 /* Insert base register reload insns at every far label. */
4543 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4544 if (GET_CODE (insn) == CODE_LABEL
4545 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4547 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4550 rtx new_insn = gen_reload_base (base_reg, pool->label);
4551 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4556 BITMAP_XFREE (far_labels);
4559 /* Recompute insn addresses. */
4561 init_insn_lengths ();
4562 shorten_branches (get_insns ());
4567 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4568 After we have decided to use this list, finish implementing
4569 all changes to the current function as required. */
4572 s390_chunkify_finish (struct constant_pool *pool_list)
4574 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4575 struct constant_pool *curr_pool = NULL;
4579 /* Replace all literal pool references. */
4581 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4584 replace_ltrel_base (&PATTERN (insn), base_reg);
4586 curr_pool = s390_find_pool (pool_list, insn);
4590 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4592 rtx addr, pool_ref = NULL_RTX;
4593 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4596 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4597 get_pool_mode (pool_ref));
4598 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4599 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4600 INSN_CODE (insn) = -1;
4605 /* Dump out all literal pools. */
4607 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4608 s390_dump_pool (curr_pool);
4610 /* Free pool list. */
4614 struct constant_pool *next = pool_list->next;
4615 s390_free_pool (pool_list);
4620 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4621 We have decided we cannot use this list, so revert all changes
4622 to the current function that were done by s390_chunkify_start. */
4625 s390_chunkify_cancel (struct constant_pool *pool_list)
4627 struct constant_pool *curr_pool = NULL;
4630 /* Remove all pool placeholder insns. */
4632 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4634 /* Did we insert an extra barrier? Remove it. */
4635 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4636 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4637 rtx label = NEXT_INSN (curr_pool->pool_insn);
4639 if (jump && GET_CODE (jump) == JUMP_INSN
4640 && barrier && GET_CODE (barrier) == BARRIER
4641 && label && GET_CODE (label) == CODE_LABEL
4642 && GET_CODE (PATTERN (jump)) == SET
4643 && SET_DEST (PATTERN (jump)) == pc_rtx
4644 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4645 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4648 remove_insn (barrier);
4649 remove_insn (label);
4652 remove_insn (curr_pool->pool_insn);
4655 /* Remove all base register reload insns. */
4657 for (insn = get_insns (); insn; )
4659 rtx next_insn = NEXT_INSN (insn);
4661 if (GET_CODE (insn) == INSN
4662 && GET_CODE (PATTERN (insn)) == SET
4663 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4664 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
4670 /* Free pool list. */
4674 struct constant_pool *next = pool_list->next;
4675 s390_free_pool (pool_list);
4681 /* Index of constant pool chunk that is currently being processed.
4682 Set to -1 before function output has started. */
4683 int s390_pool_count = -1;
4685 /* Number of elements of current constant pool. */
4686 int s390_nr_constants;
4688 /* Output main constant pool to stdio stream FILE. */
4691 s390_output_constant_pool (rtx start_label, rtx end_label)
4695 readonly_data_section ();
4696 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4697 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4698 CODE_LABEL_NUMBER (start_label));
4702 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4703 CODE_LABEL_NUMBER (start_label));
4704 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4707 s390_pool_count = 0;
4708 output_constant_pool (current_function_name, current_function_decl);
4709 s390_pool_count = -1;
4711 function_section (current_function_decl);
4714 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4715 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4716 CODE_LABEL_NUMBER (end_label));
4720 /* Rework the prolog/epilog to avoid saving/restoring
4721 registers unnecessarily. If TEMP_REGNO is nonnegative,
4722 it specifies the number of a caller-saved register used
4723 as temporary scratch register by code emitted during
4724 machine dependent reorg. */
4727 s390_optimize_prolog (int temp_regno)
4729 int save_first, save_last, restore_first, restore_last;
4731 rtx insn, new_insn, next_insn;
4733 /* Recompute regs_ever_live data for special registers. */
4734 regs_ever_live[BASE_REGISTER] = 0;
4735 regs_ever_live[RETURN_REGNUM] = 0;
4736 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4738 /* If there is (possibly) any pool entry, we need to
4739 load the base register.
4740 ??? FIXME: this should be more precise. */
4741 if (get_pool_size ())
4742 regs_ever_live[BASE_REGISTER] = 1;
4744 /* In non-leaf functions, the prolog/epilog code relies
4745 on RETURN_REGNUM being saved in any case. */
4746 if (!current_function_is_leaf)
4747 regs_ever_live[RETURN_REGNUM] = 1;
4749 /* We need to save/restore the temporary register. */
4750 if (temp_regno >= 0)
4751 regs_ever_live[temp_regno] = 1;
4754 /* Find first and last gpr to be saved. */
4756 for (i = 6; i < 16; i++)
4757 if (regs_ever_live[i])
4759 || i == STACK_POINTER_REGNUM
4760 || i == RETURN_REGNUM
4761 || i == BASE_REGISTER
4762 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4765 for (j = 15; j > i; j--)
4766 if (regs_ever_live[j])
4768 || j == STACK_POINTER_REGNUM
4769 || j == RETURN_REGNUM
4770 || j == BASE_REGISTER
4771 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4776 /* Nothing to save/restore. */
4777 save_first = restore_first = -1;
4778 save_last = restore_last = -1;
4782 /* Save/restore from i to j. */
4783 save_first = restore_first = i;
4784 save_last = restore_last = j;
4787 /* Varargs functions need to save gprs 2 to 6. */
4788 if (current_function_stdarg)
4796 /* If all special registers are in fact used, there's nothing we
4797 can do, so no point in walking the insn list. */
4798 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4799 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4803 /* Search for prolog/epilog insns and replace them. */
4805 for (insn = get_insns (); insn; insn = next_insn)
4807 int first, last, off;
4808 rtx set, base, offset;
4810 next_insn = NEXT_INSN (insn);
4812 if (GET_CODE (insn) != INSN)
4814 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4817 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4819 set = XVECEXP (PATTERN (insn), 0, 0);
4820 first = REGNO (SET_SRC (set));
4821 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4822 offset = const0_rtx;
4823 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4824 off = INTVAL (offset) - first * UNITS_PER_WORD;
4826 if (GET_CODE (base) != REG || off < 0)
4828 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4830 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4833 if (save_first != -1)
4835 new_insn = save_gprs (base, off, save_first, save_last);
4836 new_insn = emit_insn_before (new_insn, insn);
4837 INSN_ADDRESSES_NEW (new_insn, -1);
4843 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4845 set = XVECEXP (PATTERN (insn), 0, 0);
4846 first = REGNO (SET_DEST (set));
4847 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4848 offset = const0_rtx;
4849 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4850 off = INTVAL (offset) - first * UNITS_PER_WORD;
4852 if (GET_CODE (base) != REG || off < 0)
4854 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4856 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4859 if (restore_first != -1)
4861 new_insn = restore_gprs (base, off, restore_first, restore_last);
4862 new_insn = emit_insn_before (new_insn, insn);
4863 INSN_ADDRESSES_NEW (new_insn, -1);
4871 /* Check whether any insn in the function makes use of the original
4872 value of RETURN_REG (e.g. for __builtin_return_address).
4873 If so, insert an insn reloading that value.
4875 Return true if any such insn was found. */
4878 s390_fixup_clobbered_return_reg (rtx return_reg)
4880 bool replacement_done = 0;
4883 /* If we never called __builtin_return_address, register 14
4884 might have been used as temp during the prolog; we do
4885 not want to touch those uses. */
4886 if (!has_hard_reg_initial_val (Pmode, REGNO (return_reg)))
4889 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4891 rtx reg, off, new_insn;
4893 if (GET_CODE (insn) != INSN)
4895 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4897 if (GET_CODE (PATTERN (insn)) == PARALLEL
4898 && store_multiple_operation (PATTERN (insn), VOIDmode))
4901 if (frame_pointer_needed)
4902 reg = hard_frame_pointer_rtx;
4904 reg = stack_pointer_rtx;
4906 off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4907 if (!DISP_IN_RANGE (INTVAL (off)))
4909 off = force_const_mem (Pmode, off);
4910 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4911 new_insn = emit_insn_before (new_insn, insn);
4912 INSN_ADDRESSES_NEW (new_insn, -1);
4916 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4917 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4918 new_insn = emit_insn_before (new_insn, insn);
4919 INSN_ADDRESSES_NEW (new_insn, -1);
4921 replacement_done = 1;
4924 return replacement_done;
4927 /* Perform machine-dependent processing. */
4932 bool fixed_up_clobbered_return_reg = 0;
4933 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4936 /* Make sure all splits have been performed; splits after
4937 machine_dependent_reorg might confuse insn length counts. */
4938 split_all_insns_noflow ();
4941 /* There are two problematic situations we need to correct:
4943 - the literal pool might be > 4096 bytes in size, so that
4944 some of its elements cannot be directly accessed
4946 - a branch target might be > 64K away from the branch, so that
4947 it is not possible to use a PC-relative instruction.
4949 To fix those, we split the single literal pool into multiple
4950 pool chunks, reloading the pool base register at various
4951 points throughout the function to ensure it always points to
4952 the pool chunk the following code expects, and / or replace
4953 PC-relative branches by absolute branches.
4955 However, the two problems are interdependent: splitting the
4956 literal pool can move a branch further away from its target,
4957 causing the 64K limit to overflow, and on the other hand,
4958 replacing a PC-relative branch by an absolute branch means
4959 we need to put the branch target address into the literal
4960 pool, possibly causing it to overflow.
4962 So, we loop trying to fix up both problems until we manage
4963 to satisfy both conditions at the same time. Note that the
4964 loop is guaranteed to terminate as every pass of the loop
4965 strictly decreases the total number of PC-relative branches
4966 in the function. (This is not completely true as there
4967 might be branch-over-pool insns introduced by chunkify_start.
4968 Those never need to be split however.) */
4972 struct constant_pool *pool_list;
4974 /* Try to chunkify the literal pool. */
4975 pool_list = s390_chunkify_start ();
4977 /* Split out-of-range branches. If this has created new
4978 literal pool entries, cancel current chunk list and
4980 if (s390_split_branches (temp_reg, &temp_used))
4983 s390_chunkify_cancel (pool_list);
4988 /* Check whether we have clobbered a use of the return
4989 register (e.g. for __builtin_return_address). If so,
4990 add insns reloading the register where necessary. */
4991 if (temp_used && !fixed_up_clobbered_return_reg
4992 && s390_fixup_clobbered_return_reg (temp_reg))
4994 fixed_up_clobbered_return_reg = 1;
4996 /* The fixup insns might have caused a jump to overflow. */
4998 s390_chunkify_cancel (pool_list);
5003 /* If we made it up to here, both conditions are satisfied.
5004 Finish up pool chunkification if required. */
5006 s390_chunkify_finish (pool_list);
5011 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
5015 /* Return an RTL expression representing the value of the return address
5016 for the frame COUNT steps up from the current frame. FRAME is the
5017 frame pointer of that frame. */
5020 s390_return_addr_rtx (int count, rtx frame)
5024 /* For the current frame, we use the initial value of RETURN_REGNUM.
5025 This works both in leaf and non-leaf functions. */
5028 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
5030 /* For frames farther back, we read the stack slot where the
5031 corresponding RETURN_REGNUM value was saved. */
5033 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5034 addr = memory_address (Pmode, addr);
5035 return gen_rtx_MEM (Pmode, addr);
5038 /* Find first call clobbered register unsused in a function.
5039 This could be used as base register in a leaf function
5040 or for holding the return address before epilogue. */
5043 find_unused_clobbered_reg (void)
5046 for (i = 0; i < 6; i++)
5047 if (!regs_ever_live[i])
5052 /* Fill FRAME with info about frame of current function. */
5055 s390_frame_info (void)
5057 char gprs_ever_live[16];
5059 HOST_WIDE_INT fsize = get_frame_size ();
5061 if (fsize > 0x7fff0000)
5062 fatal_error ("Total size of local variables exceeds architecture limit.");
5064 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5065 cfun->machine->save_fprs_p = 0;
5067 for (i = 24; i < 32; i++)
5068 if (regs_ever_live[i] && !global_regs[i])
5070 cfun->machine->save_fprs_p = 1;
5074 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5076 /* Does function need to setup frame and save area. */
5078 if (! current_function_is_leaf
5079 || cfun->machine->frame_size > 0
5080 || current_function_calls_alloca
5081 || current_function_stdarg)
5082 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5084 /* Find first and last gpr to be saved. Note that at this point,
5085 we assume the return register and the base register always
5086 need to be saved. This is done because the usage of these
5087 register might change even after the prolog was emitted.
5088 If it turns out later that we really don't need them, the
5089 prolog/epilog code is modified again. */
5091 for (i = 0; i < 16; i++)
5092 gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5095 gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5096 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5097 gprs_ever_live[BASE_REGISTER] = 1;
5098 gprs_ever_live[RETURN_REGNUM] = 1;
5099 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5101 for (i = 6; i < 16; i++)
5102 if (gprs_ever_live[i])
5105 for (j = 15; j > i; j--)
5106 if (gprs_ever_live[j])
5110 /* Save / Restore from gpr i to j. */
5111 cfun->machine->first_save_gpr = i;
5112 cfun->machine->first_restore_gpr = i;
5113 cfun->machine->last_save_gpr = j;
5115 /* Varargs functions need to save gprs 2 to 6. */
5116 if (current_function_stdarg)
5117 cfun->machine->first_save_gpr = 2;
5120 /* Return offset between argument pointer and frame pointer
5121 initially after prologue. */
5124 s390_arg_frame_offset (void)
5126 HOST_WIDE_INT fsize = get_frame_size ();
5129 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5132 for (i = 24; i < 32; i++)
5133 if (regs_ever_live[i] && !global_regs[i])
5139 fsize = fsize + save_fprs_p * 64;
5141 /* Does function need to setup frame and save area. */
5143 if (! current_function_is_leaf
5145 || current_function_calls_alloca
5146 || current_function_stdarg)
5147 fsize += STARTING_FRAME_OFFSET;
5148 return fsize + STACK_POINTER_OFFSET;
5151 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5152 to register BASE. Return generated insn. */
5155 save_fpr (rtx base, int offset, int regnum)
5158 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5159 set_mem_alias_set (addr, s390_sr_alias_set);
5161 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5164 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5165 to register BASE. Return generated insn. */
5168 restore_fpr (rtx base, int offset, int regnum)
5171 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5172 set_mem_alias_set (addr, s390_sr_alias_set);
5174 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5177 /* Generate insn to save registers FIRST to LAST into
5178 the register save area located at offset OFFSET
5179 relative to register BASE. */
5182 save_gprs (rtx base, int offset, int first, int last)
5184 rtx addr, insn, note;
5187 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5188 addr = gen_rtx_MEM (Pmode, addr);
5189 set_mem_alias_set (addr, s390_sr_alias_set);
5191 /* Special-case single register. */
5195 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5197 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5199 RTX_FRAME_RELATED_P (insn) = 1;
5204 insn = gen_store_multiple (addr,
5205 gen_rtx_REG (Pmode, first),
5206 GEN_INT (last - first + 1));
5209 /* We need to set the FRAME_RELATED flag on all SETs
5210 inside the store-multiple pattern.
5212 However, we must not emit DWARF records for registers 2..5
5213 if they are stored for use by variable arguments ...
5215 ??? Unfortunately, it is not enough to simply not the the
5216 FRAME_RELATED flags for those SETs, because the first SET
5217 of the PARALLEL is always treated as if it had the flag
5218 set, even if it does not. Therefore we emit a new pattern
5219 without those registers as REG_FRAME_RELATED_EXPR note. */
5223 rtx pat = PATTERN (insn);
5225 for (i = 0; i < XVECLEN (pat, 0); i++)
5226 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5227 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5229 RTX_FRAME_RELATED_P (insn) = 1;
5233 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5234 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5235 gen_rtx_REG (Pmode, 6),
5236 GEN_INT (last - 6 + 1));
5237 note = PATTERN (note);
5240 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5241 note, REG_NOTES (insn));
5243 for (i = 0; i < XVECLEN (note, 0); i++)
5244 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5245 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5247 RTX_FRAME_RELATED_P (insn) = 1;
5253 /* Generate insn to restore registers FIRST to LAST from
5254 the register save area located at offset OFFSET
5255 relative to register BASE. */
5258 restore_gprs (rtx base, int offset, int first, int last)
5262 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5263 addr = gen_rtx_MEM (Pmode, addr);
5264 set_mem_alias_set (addr, s390_sr_alias_set);
5266 /* Special-case single register. */
5270 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5272 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5277 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5279 GEN_INT (last - first + 1));
5283 /* Emit code to load the GOT register. If MAYBE_DEAD is true,
5284 annotate generated insns with REG_MAYBE_DEAD notes. */
5286 static GTY(()) rtx got_symbol;
5288 s390_load_got (int maybe_dead)
5292 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5293 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5298 rtx insn = emit_move_insn (pic_offset_table_rtx, got_symbol);
5300 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5307 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
5308 UNSPEC_LTREL_OFFSET);
5309 offset = gen_rtx_CONST (Pmode, offset);
5310 offset = force_const_mem (Pmode, offset);
5312 insn = emit_move_insn (pic_offset_table_rtx, offset);
5314 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5317 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
5319 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
5321 insn = emit_move_insn (pic_offset_table_rtx, offset);
5323 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5328 /* Expand the prologue into a bunch of separate insns. */
5331 s390_emit_prologue (void)
5335 rtx pool_start_label, pool_end_label;
5338 /* Compute frame_info. */
5342 /* Choose best register to use for temp use within prologue.
5343 See below for why TPF must use the register 1. */
5345 if (!current_function_is_leaf
5346 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
5347 && get_pool_size () < S390_POOL_CHUNK_MAX / 2
5349 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5351 temp_reg = gen_rtx_REG (Pmode, 1);
5353 /* Save call saved gprs. */
5355 insn = save_gprs (stack_pointer_rtx, 0,
5356 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5359 /* Dump constant pool and set constant pool register. */
5361 pool_start_label = gen_label_rtx();
5362 pool_end_label = gen_label_rtx();
5363 cfun->machine->literal_pool_label = pool_start_label;
5366 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5367 pool_start_label, pool_end_label));
5369 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5370 pool_start_label, pool_end_label));
5372 /* Save fprs for variable args. */
5374 if (current_function_stdarg)
5376 /* Save fpr 0 and 2. */
5378 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5379 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5383 /* Save fpr 4 and 6. */
5385 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5386 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5390 /* Save fprs 4 and 6 if used (31 bit ABI). */
5394 /* Save fpr 4 and 6. */
5395 if (regs_ever_live[18] && !global_regs[18])
5397 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5398 RTX_FRAME_RELATED_P (insn) = 1;
5400 if (regs_ever_live[19] && !global_regs[19])
5402 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5403 RTX_FRAME_RELATED_P (insn) = 1;
5407 /* Decrement stack pointer. */
5409 if (cfun->machine->frame_size > 0)
5411 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5413 /* Save incoming stack pointer into temp reg. */
5415 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5417 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5420 /* Subtract frame size from stack pointer. */
5422 if (DISP_IN_RANGE (INTVAL (frame_off)))
5424 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5425 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5427 insn = emit_insn (insn);
5431 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5432 frame_off = force_const_mem (Pmode, frame_off);
5434 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5437 RTX_FRAME_RELATED_P (insn) = 1;
5439 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5440 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5441 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5442 GEN_INT (-cfun->machine->frame_size))),
5445 /* Set backchain. */
5447 if (TARGET_BACKCHAIN)
5449 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5450 set_mem_alias_set (addr, s390_sr_alias_set);
5451 insn = emit_insn (gen_move_insn (addr, temp_reg));
5454 /* If we support asynchronous exceptions (e.g. for Java),
5455 we need to make sure the backchain pointer is set up
5456 before any possibly trapping memory access. */
5458 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5460 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5461 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5465 /* Save fprs 8 - 15 (64 bit ABI). */
5467 if (cfun->machine->save_fprs_p)
5469 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5471 for (i = 24; i < 32; i++)
5472 if (regs_ever_live[i] && !global_regs[i])
5474 rtx addr = plus_constant (stack_pointer_rtx,
5475 cfun->machine->frame_size - 64 + (i-24)*8);
5477 insn = save_fpr (temp_reg, (i-24)*8, i);
5478 RTX_FRAME_RELATED_P (insn) = 1;
5480 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5481 gen_rtx_SET (VOIDmode,
5482 gen_rtx_MEM (DFmode, addr),
5483 gen_rtx_REG (DFmode, i)),
5488 /* Set frame pointer, if needed. */
5490 if (frame_pointer_needed)
5492 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5493 RTX_FRAME_RELATED_P (insn) = 1;
5496 /* Set up got pointer, if needed. */
5498 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5499 s390_load_got(true);
5503 /* Generate a BAS instruction to serve as a function
5504 entry intercept to facilitate the use of tracing
5505 algorithms located at the branch target.
5507 This must use register 1. */
5512 addr = GEN_INT (0xfe0);
5513 unkn = CONST0_RTX (SImode);
5514 link = gen_rtx_REG (Pmode, 1);
5516 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5518 /* Emit a blockage here so that all code
5519 lies between the profiling mechanisms. */
5520 emit_insn (gen_blockage ());
5524 /* Expand the epilogue into a bunch of separate insns. */
5527 s390_emit_epilogue (void)
5529 rtx frame_pointer, return_reg;
5530 int area_bottom, area_top, offset = 0;
5536 /* Generate a BAS instruction to serve as a function
5537 entry intercept to facilitate the use of tracing
5538 algorithms located at the branch target.
5540 This must use register 1. */
5546 addr = GEN_INT (0xfe6);
5547 unkn = CONST0_RTX (SImode);
5548 link = gen_rtx_REG (Pmode, 1);
5550 /* Emit a blockage here so that all code
5551 lies between the profiling mechanisms. */
5552 emit_insn (gen_blockage ());
5554 emit_call_insn (gen_call_exp (gen_rtx_MEM (QImode, addr), unkn, link));
5557 /* Check whether to use frame or stack pointer for restore. */
5559 frame_pointer = frame_pointer_needed ?
5560 hard_frame_pointer_rtx : stack_pointer_rtx;
5562 /* Compute which parts of the save area we need to access. */
5564 if (cfun->machine->first_restore_gpr != -1)
5566 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5567 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5571 area_bottom = INT_MAX;
5577 if (cfun->machine->save_fprs_p)
5579 if (area_bottom > -64)
5587 if (regs_ever_live[18] && !global_regs[18])
5589 if (area_bottom > STACK_POINTER_OFFSET - 16)
5590 area_bottom = STACK_POINTER_OFFSET - 16;
5591 if (area_top < STACK_POINTER_OFFSET - 8)
5592 area_top = STACK_POINTER_OFFSET - 8;
5594 if (regs_ever_live[19] && !global_regs[19])
5596 if (area_bottom > STACK_POINTER_OFFSET - 8)
5597 area_bottom = STACK_POINTER_OFFSET - 8;
5598 if (area_top < STACK_POINTER_OFFSET)
5599 area_top = STACK_POINTER_OFFSET;
5603 /* Check whether we can access the register save area.
5604 If not, increment the frame pointer as required. */
5606 if (area_top <= area_bottom)
5608 /* Nothing to restore. */
5610 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
5611 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
5613 /* Area is in range. */
5614 offset = cfun->machine->frame_size;
5618 rtx insn, frame_off;
5620 offset = area_bottom < 0 ? -area_bottom : 0;
5621 frame_off = GEN_INT (cfun->machine->frame_size - offset);
5623 if (DISP_IN_RANGE (INTVAL (frame_off)))
5625 insn = gen_rtx_SET (VOIDmode, frame_pointer,
5626 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
5627 insn = emit_insn (insn);
5631 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5632 frame_off = force_const_mem (Pmode, frame_off);
5634 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5638 /* Restore call saved fprs. */
5644 if (cfun->machine->save_fprs_p)
5645 for (i = 24; i < 32; i++)
5646 if (regs_ever_live[i] && !global_regs[i])
5647 restore_fpr (frame_pointer,
5648 offset - 64 + (i-24) * 8, i);
5652 if (regs_ever_live[18] && !global_regs[18])
5653 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5654 if (regs_ever_live[19] && !global_regs[19])
5655 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5658 /* Return register. */
5660 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5662 /* Restore call saved gprs. */
5664 if (cfun->machine->first_restore_gpr != -1)
5669 /* Check for global register and save them
5670 to stack location from where they get restored. */
5672 for (i = cfun->machine->first_restore_gpr;
5673 i <= cfun->machine->last_save_gpr;
5676 /* These registers are special and need to be
5677 restored in any case. */
5678 if (i == STACK_POINTER_REGNUM
5679 || i == RETURN_REGNUM
5680 || i == BASE_REGISTER
5681 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5686 addr = plus_constant (frame_pointer,
5687 offset + i * UNITS_PER_WORD);
5688 addr = gen_rtx_MEM (Pmode, addr);
5689 set_mem_alias_set (addr, s390_sr_alias_set);
5690 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5694 /* Fetch return address from stack before load multiple,
5695 this will do good for scheduling. */
5697 if (!current_function_is_leaf)
5699 int return_regnum = find_unused_clobbered_reg();
5702 return_reg = gen_rtx_REG (Pmode, return_regnum);
5704 addr = plus_constant (frame_pointer,
5705 offset + RETURN_REGNUM * UNITS_PER_WORD);
5706 addr = gen_rtx_MEM (Pmode, addr);
5707 set_mem_alias_set (addr, s390_sr_alias_set);
5708 emit_move_insn (return_reg, addr);
5711 /* ??? As references to the base register are not made
5712 explicit in insn RTX code, we have to add a barrier here
5713 to prevent incorrect scheduling. */
5715 emit_insn (gen_blockage());
5717 insn = restore_gprs (frame_pointer, offset,
5718 cfun->machine->first_restore_gpr,
5719 cfun->machine->last_save_gpr);
5723 /* Return to caller. */
5725 p = rtvec_alloc (2);
5727 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5728 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5729 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5733 /* Return the size in bytes of a function argument of
5734 type TYPE and/or mode MODE. At least one of TYPE or
5735 MODE must be specified. */
5738 s390_function_arg_size (enum machine_mode mode, tree type)
5741 return int_size_in_bytes (type);
5743 /* No type info available for some library calls ... */
5744 if (mode != BLKmode)
5745 return GET_MODE_SIZE (mode);
5747 /* If we have neither type nor mode, abort */
5751 /* Return true if a function argument of type TYPE and mode MODE
5752 is to be passed in a floating-point register, if available. */
5755 s390_function_arg_float (enum machine_mode mode, tree type)
5757 /* Soft-float changes the ABI: no floating-point registers are used. */
5758 if (TARGET_SOFT_FLOAT)
5761 /* No type info available for some library calls ... */
5763 return mode == SFmode || mode == DFmode;
5765 /* The ABI says that record types with a single member are treated
5766 just like that member would be. */
5767 while (TREE_CODE (type) == RECORD_TYPE)
5769 tree field, single = NULL_TREE;
5771 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
5773 if (TREE_CODE (field) != FIELD_DECL)
5776 if (single == NULL_TREE)
5777 single = TREE_TYPE (field);
5782 if (single == NULL_TREE)
5788 return TREE_CODE (type) == REAL_TYPE;
5791 /* Return 1 if a function argument of type TYPE and mode MODE
5792 is to be passed by reference. The ABI specifies that only
5793 structures of size 1, 2, 4, or 8 bytes are passed by value,
5794 all other structures (and complex numbers) are passed by
5798 s390_function_arg_pass_by_reference (enum machine_mode mode, tree type)
5800 int size = s390_function_arg_size (mode, type);
5804 if (AGGREGATE_TYPE_P (type) &&
5805 size != 1 && size != 2 && size != 4 && size != 8
5806 && !s390_function_arg_float (mode, type))
5809 if (TREE_CODE (type) == COMPLEX_TYPE)
5816 /* Update the data in CUM to advance over an argument of mode MODE and
5817 data type TYPE. (TYPE is null for libcalls where that information
5818 may not be available.). The boolean NAMED specifies whether the
5819 argument is a named argument (as opposed to an unnamed argument
5820 matching an ellipsis). */
5823 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
5824 tree type, int named ATTRIBUTE_UNUSED)
5826 if (s390_function_arg_pass_by_reference (mode, type))
5830 else if (s390_function_arg_float (mode, type))
5836 int size = s390_function_arg_size (mode, type);
5837 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5841 /* Define where to put the arguments to a function.
5842 Value is zero to push the argument on the stack,
5843 or a hard register in which to store the argument.
5845 MODE is the argument's machine mode.
5846 TYPE is the data type of the argument (as a tree).
5847 This is null for libcalls where that information may
5849 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5850 the preceding args and about the function being called.
5851 NAMED is nonzero if this argument is a named parameter
5852 (otherwise it is an extra parameter matching an ellipsis).
5854 On S/390, we use general purpose registers 2 through 6 to
5855 pass integer, pointer, and certain structure arguments, and
5856 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5857 to pass floating point arguments. All remaining arguments
5858 are pushed to the stack. */
5861 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
5862 int named ATTRIBUTE_UNUSED)
5864 if (s390_function_arg_pass_by_reference (mode, type))
5867 if (s390_function_arg_float (mode, type))
5869 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5872 return gen_rtx (REG, mode, cum->fprs + 16);
5876 int size = s390_function_arg_size (mode, type);
5877 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5879 if (cum->gprs + n_gprs > 5)
5882 return gen_rtx (REG, mode, cum->gprs + 2);
5887 /* Create and return the va_list datatype.
5889 On S/390, va_list is an array type equivalent to
5891 typedef struct __va_list_tag
5895 void *__overflow_arg_area;
5896 void *__reg_save_area;
5900 where __gpr and __fpr hold the number of general purpose
5901 or floating point arguments used up to now, respectively,
5902 __overflow_arg_area points to the stack location of the
5903 next argument passed on the stack, and __reg_save_area
5904 always points to the start of the register area in the
5905 call frame of the current function. The function prologue
5906 saves all registers used for argument passing into this
5907 area if the function uses variable arguments. */
5910 s390_build_va_list (void)
5912 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5914 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5917 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5919 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5920 long_integer_type_node);
5921 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5922 long_integer_type_node);
5923 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5925 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5928 DECL_FIELD_CONTEXT (f_gpr) = record;
5929 DECL_FIELD_CONTEXT (f_fpr) = record;
5930 DECL_FIELD_CONTEXT (f_ovf) = record;
5931 DECL_FIELD_CONTEXT (f_sav) = record;
5933 TREE_CHAIN (record) = type_decl;
5934 TYPE_NAME (record) = type_decl;
5935 TYPE_FIELDS (record) = f_gpr;
5936 TREE_CHAIN (f_gpr) = f_fpr;
5937 TREE_CHAIN (f_fpr) = f_ovf;
5938 TREE_CHAIN (f_ovf) = f_sav;
5940 layout_type (record);
5942 /* The correct type is an array type of one element. */
5943 return build_array_type (record, build_index_type (size_zero_node));
5946 /* Implement va_start by filling the va_list structure VALIST.
5947 STDARG_P is always true, and ignored.
5948 NEXTARG points to the first anonymous stack argument.
5950 The following global variables are used to initialize
5951 the va_list structure:
5953 current_function_args_info:
5954 holds number of gprs and fprs used for named arguments.
5955 current_function_arg_offset_rtx:
5956 holds the offset of the first anonymous stack argument
5957 (relative to the virtual arg pointer). */
5960 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
5962 HOST_WIDE_INT n_gpr, n_fpr;
5964 tree f_gpr, f_fpr, f_ovf, f_sav;
5965 tree gpr, fpr, ovf, sav, t;
5967 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5968 f_fpr = TREE_CHAIN (f_gpr);
5969 f_ovf = TREE_CHAIN (f_fpr);
5970 f_sav = TREE_CHAIN (f_ovf);
5972 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5973 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5974 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5975 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5976 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5978 /* Count number of gp and fp argument registers used. */
5980 n_gpr = current_function_args_info.gprs;
5981 n_fpr = current_function_args_info.fprs;
5983 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5984 TREE_SIDE_EFFECTS (t) = 1;
5985 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5987 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5988 TREE_SIDE_EFFECTS (t) = 1;
5989 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5991 /* Find the overflow area. */
5992 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5994 off = INTVAL (current_function_arg_offset_rtx);
5995 off = off < 0 ? 0 : off;
5996 if (TARGET_DEBUG_ARG)
5997 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5998 (int)n_gpr, (int)n_fpr, off);
6000 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
6002 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6003 TREE_SIDE_EFFECTS (t) = 1;
6004 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6006 /* Find the register save area. */
6007 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
6008 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
6009 build_int_2 (-STACK_POINTER_OFFSET, -1));
6010 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6011 TREE_SIDE_EFFECTS (t) = 1;
6012 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6015 /* Implement va_arg by updating the va_list structure
6016 VALIST as required to retrieve an argument of type
6017 TYPE, and returning that argument.
6019 Generates code equivalent to:
6021 if (integral value) {
6022 if (size <= 4 && args.gpr < 5 ||
6023 size > 4 && args.gpr < 4 )
6024 ret = args.reg_save_area[args.gpr+8]
6026 ret = *args.overflow_arg_area++;
6027 } else if (float value) {
6029 ret = args.reg_save_area[args.fpr+64]
6031 ret = *args.overflow_arg_area++;
6032 } else if (aggregate value) {
6034 ret = *args.reg_save_area[args.gpr]
6036 ret = **args.overflow_arg_area++;
6040 s390_va_arg (tree valist, tree type)
6042 tree f_gpr, f_fpr, f_ovf, f_sav;
6043 tree gpr, fpr, ovf, sav, reg, t, u;
6044 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6045 rtx lab_false, lab_over, addr_rtx, r;
6047 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6048 f_fpr = TREE_CHAIN (f_gpr);
6049 f_ovf = TREE_CHAIN (f_fpr);
6050 f_sav = TREE_CHAIN (f_ovf);
6052 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6053 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
6054 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
6055 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
6056 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
6058 size = int_size_in_bytes (type);
6060 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
6062 if (TARGET_DEBUG_ARG)
6064 fprintf (stderr, "va_arg: aggregate type");
6068 /* Aggregates are passed by reference. */
6072 sav_ofs = 2 * UNITS_PER_WORD;
6073 sav_scale = UNITS_PER_WORD;
6074 size = UNITS_PER_WORD;
6077 else if (s390_function_arg_float (TYPE_MODE (type), type))
6079 if (TARGET_DEBUG_ARG)
6081 fprintf (stderr, "va_arg: float type");
6085 /* FP args go in FP registers, if present. */
6089 sav_ofs = 16 * UNITS_PER_WORD;
6091 /* TARGET_64BIT has up to 4 parameter in fprs */
6092 max_reg = TARGET_64BIT ? 3 : 1;
6096 if (TARGET_DEBUG_ARG)
6098 fprintf (stderr, "va_arg: other type");
6102 /* Otherwise into GP registers. */
6105 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6106 sav_ofs = 2 * UNITS_PER_WORD;
6108 if (size < UNITS_PER_WORD)
6109 sav_ofs += UNITS_PER_WORD - size;
6111 sav_scale = UNITS_PER_WORD;
6118 /* Pull the value out of the saved registers ... */
6120 lab_false = gen_label_rtx ();
6121 lab_over = gen_label_rtx ();
6122 addr_rtx = gen_reg_rtx (Pmode);
6124 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6126 GT, const1_rtx, Pmode, 0, lab_false);
6129 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6133 u = build (MULT_EXPR, long_integer_type_node,
6134 reg, build_int_2 (sav_scale, 0));
6135 TREE_SIDE_EFFECTS (u) = 1;
6137 t = build (PLUS_EXPR, ptr_type_node, t, u);
6138 TREE_SIDE_EFFECTS (t) = 1;
6140 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6142 emit_move_insn (addr_rtx, r);
6145 emit_jump_insn (gen_jump (lab_over));
6147 emit_label (lab_false);
6149 /* ... Otherwise out of the overflow area. */
6151 t = save_expr (ovf);
6154 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6155 if (size < UNITS_PER_WORD)
6157 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6158 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6159 TREE_SIDE_EFFECTS (t) = 1;
6160 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6162 t = save_expr (ovf);
6165 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6167 emit_move_insn (addr_rtx, r);
6169 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6170 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6171 TREE_SIDE_EFFECTS (t) = 1;
6172 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6174 emit_label (lab_over);
6176 /* If less than max_regs a registers are retrieved out
6177 of register save area, increment. */
6179 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6180 build_int_2 (n_reg, 0));
6181 TREE_SIDE_EFFECTS (u) = 1;
6182 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6186 r = gen_rtx_MEM (Pmode, addr_rtx);
6187 set_mem_alias_set (r, get_varargs_alias_set ());
6188 emit_move_insn (addr_rtx, r);
6200 S390_BUILTIN_THREAD_POINTER,
6201 S390_BUILTIN_SET_THREAD_POINTER,
6206 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6211 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6217 s390_init_builtins (void)
6221 ftype = build_function_type (ptr_type_node, void_list_node);
6222 builtin_function ("__builtin_thread_pointer", ftype,
6223 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6226 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6227 builtin_function ("__builtin_set_thread_pointer", ftype,
6228 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6232 /* Expand an expression EXP that calls a built-in function,
6233 with result going to TARGET if that's convenient
6234 (and in mode MODE if that's convenient).
6235 SUBTARGET may be used as the target for computing one of EXP's operands.
6236 IGNORE is nonzero if the value is to be ignored. */
6239 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6240 enum machine_mode mode ATTRIBUTE_UNUSED,
6241 int ignore ATTRIBUTE_UNUSED)
6245 unsigned int const *code_for_builtin =
6246 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6248 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6249 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6250 tree arglist = TREE_OPERAND (exp, 1);
6251 enum insn_code icode;
6252 rtx op[MAX_ARGS], pat;
6256 if (fcode >= S390_BUILTIN_max)
6257 internal_error ("bad builtin fcode");
6258 icode = code_for_builtin[fcode];
6260 internal_error ("bad builtin fcode");
6262 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6264 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6266 arglist = TREE_CHAIN (arglist), arity++)
6268 const struct insn_operand_data *insn_op;
6270 tree arg = TREE_VALUE (arglist);
6271 if (arg == error_mark_node)
6273 if (arity > MAX_ARGS)
6276 insn_op = &insn_data[icode].operand[arity + nonvoid];
6278 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6280 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6281 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6286 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6288 || GET_MODE (target) != tmode
6289 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6290 target = gen_reg_rtx (tmode);
6296 pat = GEN_FCN (icode) (target);
6300 pat = GEN_FCN (icode) (target, op[0]);
6302 pat = GEN_FCN (icode) (op[0]);
6305 pat = GEN_FCN (icode) (target, op[0], op[1]);
6321 /* Output assembly code for the trampoline template to
6324 On S/390, we use gpr 1 internally in the trampoline code;
6325 gpr 0 is used to hold the static chain. */
6328 s390_trampoline_template (FILE *file)
6332 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6333 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6334 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6335 fprintf (file, "br\t%s\n", reg_names[1]);
6336 fprintf (file, "0:\t.quad\t0\n");
6337 fprintf (file, ".quad\t0\n");
6341 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6342 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6343 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6344 fprintf (file, "br\t%s\n", reg_names[1]);
6345 fprintf (file, ".long\t0\n");
6346 fprintf (file, ".long\t0\n");
6350 /* Emit RTL insns to initialize the variable parts of a trampoline.
6351 FNADDR is an RTX for the address of the function's pure code.
6352 CXT is an RTX for the static chain value for the function. */
6355 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
6357 emit_move_insn (gen_rtx
6359 memory_address (Pmode,
6360 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6361 emit_move_insn (gen_rtx
6363 memory_address (Pmode,
6364 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6367 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6368 LOW and HIGH, independent of the host word size. */
6371 s390_gen_rtx_const_DI (int high, int low)
6373 #if HOST_BITS_PER_WIDE_INT >= 64
6375 val = (HOST_WIDE_INT)high;
6377 val |= (HOST_WIDE_INT)low;
6379 return GEN_INT (val);
6381 #if HOST_BITS_PER_WIDE_INT >= 32
6382 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6389 /* Output assembler code to FILE to increment profiler label # LABELNO
6390 for profiling a function entry. */
6393 s390_function_profiler (FILE *file, int labelno)
6398 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6400 fprintf (file, "# function profiler \n");
6402 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6403 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6404 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6406 op[2] = gen_rtx_REG (Pmode, 1);
6407 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6408 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
6410 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6413 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
6414 op[4] = gen_rtx_CONST (Pmode, op[4]);
6419 output_asm_insn ("stg\t%0,%1", op);
6420 output_asm_insn ("larl\t%2,%3", op);
6421 output_asm_insn ("brasl\t%0,%4", op);
6422 output_asm_insn ("lg\t%0,%1", op);
6426 op[6] = gen_label_rtx ();
6428 output_asm_insn ("st\t%0,%1", op);
6429 output_asm_insn ("bras\t%2,%l6", op);
6430 output_asm_insn (".long\t%4", op);
6431 output_asm_insn (".long\t%3", op);
6432 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6433 output_asm_insn ("l\t%0,0(%2)", op);
6434 output_asm_insn ("l\t%2,4(%2)", op);
6435 output_asm_insn ("basr\t%0,%0", op);
6436 output_asm_insn ("l\t%0,%1", op);
6440 op[5] = gen_label_rtx ();
6441 op[6] = gen_label_rtx ();
6443 output_asm_insn ("st\t%0,%1", op);
6444 output_asm_insn ("bras\t%2,%l6", op);
6445 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[5]));
6446 output_asm_insn (".long\t%4-%l5", op);
6447 output_asm_insn (".long\t%3-%l5", op);
6448 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6449 output_asm_insn ("lr\t%0,%2", op);
6450 output_asm_insn ("a\t%0,0(%2)", op);
6451 output_asm_insn ("a\t%2,4(%2)", op);
6452 output_asm_insn ("basr\t%0,%0", op);
6453 output_asm_insn ("l\t%0,%1", op);
6457 /* Select section for constant in constant pool. In 32-bit mode,
6458 constants go in the function section; in 64-bit mode in .rodata. */
6461 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
6462 rtx x ATTRIBUTE_UNUSED,
6463 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6466 readonly_data_section ();
6468 function_section (current_function_decl);
6471 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6472 into its SYMBOL_REF_FLAGS. */
6475 s390_encode_section_info (tree decl, rtx rtl, int first)
6477 default_encode_section_info (decl, rtl, first);
6479 /* If a variable has a forced alignment to < 2 bytes, mark it with
6480 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
6481 if (TREE_CODE (decl) == VAR_DECL
6482 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
6483 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
6486 /* Output thunk to FILE that implements a C++ virtual function call (with
6487 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6488 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6489 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6490 relative to the resulting this pointer. */
6493 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
6494 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
6500 /* Operand 0 is the target function. */
6501 op[0] = XEXP (DECL_RTL (function), 0);
6502 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
6505 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
6506 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
6507 op[0] = gen_rtx_CONST (Pmode, op[0]);
6510 /* Operand 1 is the 'this' pointer. */
6511 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6512 op[1] = gen_rtx_REG (Pmode, 3);
6514 op[1] = gen_rtx_REG (Pmode, 2);
6516 /* Operand 2 is the delta. */
6517 op[2] = GEN_INT (delta);
6519 /* Operand 3 is the vcall_offset. */
6520 op[3] = GEN_INT (vcall_offset);
6522 /* Operand 4 is the temporary register. */
6523 op[4] = gen_rtx_REG (Pmode, 1);
6525 /* Operands 5 to 8 can be used as labels. */
6531 /* Operand 9 can be used for temporary register. */
6534 /* Generate code. */
6537 /* Setup literal pool pointer if required. */
6538 if ((!DISP_IN_RANGE (delta)
6539 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6540 || (!DISP_IN_RANGE (vcall_offset)
6541 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6543 op[5] = gen_label_rtx ();
6544 output_asm_insn ("larl\t%4,%5", op);
6547 /* Add DELTA to this pointer. */
6550 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6551 output_asm_insn ("la\t%1,%2(%1)", op);
6552 else if (DISP_IN_RANGE (delta))
6553 output_asm_insn ("lay\t%1,%2(%1)", op);
6554 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6555 output_asm_insn ("aghi\t%1,%2", op);
6558 op[6] = gen_label_rtx ();
6559 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6563 /* Perform vcall adjustment. */
6566 if (DISP_IN_RANGE (vcall_offset))
6568 output_asm_insn ("lg\t%4,0(%1)", op);
6569 output_asm_insn ("ag\t%1,%3(%4)", op);
6571 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6573 output_asm_insn ("lghi\t%4,%3", op);
6574 output_asm_insn ("ag\t%4,0(%1)", op);
6575 output_asm_insn ("ag\t%1,0(%4)", op);
6579 op[7] = gen_label_rtx ();
6580 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6581 output_asm_insn ("ag\t%4,0(%1)", op);
6582 output_asm_insn ("ag\t%1,0(%4)", op);
6586 /* Jump to target. */
6587 output_asm_insn ("jg\t%0", op);
6589 /* Output literal pool if required. */
6592 output_asm_insn (".align\t4", op);
6593 (*targetm.asm_out.internal_label) (file, "L",
6594 CODE_LABEL_NUMBER (op[5]));
6598 (*targetm.asm_out.internal_label) (file, "L",
6599 CODE_LABEL_NUMBER (op[6]));
6600 output_asm_insn (".long\t%2", op);
6604 (*targetm.asm_out.internal_label) (file, "L",
6605 CODE_LABEL_NUMBER (op[7]));
6606 output_asm_insn (".long\t%3", op);
6611 /* Setup base pointer if required. */
6613 || (!DISP_IN_RANGE (delta)
6614 && !CONST_OK_FOR_LETTER_P (delta, 'K'))
6615 || (!DISP_IN_RANGE (delta)
6616 && !CONST_OK_FOR_LETTER_P (vcall_offset, 'K')))
6618 op[5] = gen_label_rtx ();
6619 output_asm_insn ("basr\t%4,0", op);
6620 (*targetm.asm_out.internal_label) (file, "L",
6621 CODE_LABEL_NUMBER (op[5]));
6624 /* Add DELTA to this pointer. */
6627 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6628 output_asm_insn ("la\t%1,%2(%1)", op);
6629 else if (DISP_IN_RANGE (delta))
6630 output_asm_insn ("lay\t%1,%2(%1)", op);
6631 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6632 output_asm_insn ("ahi\t%1,%2", op);
6635 op[6] = gen_label_rtx ();
6636 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6640 /* Perform vcall adjustment. */
6643 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6645 output_asm_insn ("lg\t%4,0(%1)", op);
6646 output_asm_insn ("a\t%1,%3(%4)", op);
6648 else if (DISP_IN_RANGE (vcall_offset))
6650 output_asm_insn ("lg\t%4,0(%1)", op);
6651 output_asm_insn ("ay\t%1,%3(%4)", op);
6653 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6655 output_asm_insn ("lhi\t%4,%3", op);
6656 output_asm_insn ("a\t%4,0(%1)", op);
6657 output_asm_insn ("a\t%1,0(%4)", op);
6661 op[7] = gen_label_rtx ();
6662 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6663 output_asm_insn ("a\t%4,0(%1)", op);
6664 output_asm_insn ("a\t%1,0(%4)", op);
6667 /* We had to clobber the base pointer register.
6668 Re-setup the base pointer (with a different base). */
6669 op[5] = gen_label_rtx ();
6670 output_asm_insn ("basr\t%4,0", op);
6671 (*targetm.asm_out.internal_label) (file, "L",
6672 CODE_LABEL_NUMBER (op[5]));
6675 /* Jump to target. */
6676 op[8] = gen_label_rtx ();
6679 output_asm_insn ("l\t%4,%8-%5(%4)", op);
6681 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6682 /* We cannot call through .plt, since .plt requires %r12 loaded. */
6683 else if (flag_pic == 1)
6685 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6686 output_asm_insn ("l\t%4,%0(%4)", op);
6688 else if (flag_pic == 2)
6690 op[9] = gen_rtx_REG (Pmode, 0);
6691 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
6692 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6693 output_asm_insn ("ar\t%4,%9", op);
6694 output_asm_insn ("l\t%4,0(%4)", op);
6697 output_asm_insn ("br\t%4", op);
6699 /* Output literal pool. */
6700 output_asm_insn (".align\t4", op);
6702 if (nonlocal && flag_pic == 2)
6703 output_asm_insn (".long\t%0", op);
6706 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6707 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
6710 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[8]));
6712 output_asm_insn (".long\t%0", op);
6714 output_asm_insn (".long\t%0-%5", op);
6718 (*targetm.asm_out.internal_label) (file, "L",
6719 CODE_LABEL_NUMBER (op[6]));
6720 output_asm_insn (".long\t%2", op);
6724 (*targetm.asm_out.internal_label) (file, "L",
6725 CODE_LABEL_NUMBER (op[7]));
6726 output_asm_insn (".long\t%3", op);
6732 s390_valid_pointer_mode (enum machine_mode mode)
6734 return (mode == SImode || (TARGET_64BIT && mode == DImode));
6737 /* How to allocate a 'struct machine_function'. */
6739 static struct machine_function *
6740 s390_init_machine_status (void)
6742 return ggc_alloc_cleared (sizeof (struct machine_function));
6745 #include "gt-s390.h"