1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx, unsigned int, int);
60 static void s390_select_rtx_section (enum machine_mode, rtx,
61 unsigned HOST_WIDE_INT);
62 static void s390_encode_section_info (tree, rtx, int);
63 static bool s390_cannot_force_const_mem (rtx);
64 static rtx s390_delegitimize_address (rtx);
65 static bool s390_return_in_memory (tree, tree);
66 static void s390_init_builtins (void);
67 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
68 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
70 static enum attr_type s390_safe_attr_type (rtx);
72 static int s390_adjust_cost (rtx, rtx, rtx, int);
73 static int s390_adjust_priority (rtx, int);
74 static int s390_issue_rate (void);
75 static int s390_first_cycle_multipass_dfa_lookahead (void);
76 static bool s390_rtx_costs (rtx, int, int, int *);
77 static int s390_address_cost (rtx);
78 static void s390_reorg (void);
79 static bool s390_valid_pointer_mode (enum machine_mode);
80 static tree s390_build_builtin_va_list (void);
81 static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
82 static bool s390_function_ok_for_sibcall (tree, tree);
83 static bool s390_call_saved_register_used (tree);
84 static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
87 #undef TARGET_ASM_ALIGNED_HI_OP
88 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
89 #undef TARGET_ASM_ALIGNED_DI_OP
90 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
91 #undef TARGET_ASM_INTEGER
92 #define TARGET_ASM_INTEGER s390_assemble_integer
94 #undef TARGET_ASM_OPEN_PAREN
95 #define TARGET_ASM_OPEN_PAREN ""
97 #undef TARGET_ASM_CLOSE_PAREN
98 #define TARGET_ASM_CLOSE_PAREN ""
100 #undef TARGET_ASM_SELECT_RTX_SECTION
101 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
103 #undef TARGET_ENCODE_SECTION_INFO
104 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
107 #undef TARGET_HAVE_TLS
108 #define TARGET_HAVE_TLS true
110 #undef TARGET_CANNOT_FORCE_CONST_MEM
111 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
113 #undef TARGET_DELEGITIMIZE_ADDRESS
114 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
116 #undef TARGET_RETURN_IN_MEMORY
117 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
119 #undef TARGET_INIT_BUILTINS
120 #define TARGET_INIT_BUILTINS s390_init_builtins
121 #undef TARGET_EXPAND_BUILTIN
122 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
124 #undef TARGET_ASM_OUTPUT_MI_THUNK
125 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
126 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
127 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
129 #undef TARGET_SCHED_ADJUST_COST
130 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
131 #undef TARGET_SCHED_ADJUST_PRIORITY
132 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
133 #undef TARGET_SCHED_ISSUE_RATE
134 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
135 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
136 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE hook_int_void_1
137 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
138 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
140 #undef TARGET_RTX_COSTS
141 #define TARGET_RTX_COSTS s390_rtx_costs
142 #undef TARGET_ADDRESS_COST
143 #define TARGET_ADDRESS_COST s390_address_cost
145 #undef TARGET_MACHINE_DEPENDENT_REORG
146 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
148 #undef TARGET_VALID_POINTER_MODE
149 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
151 #undef TARGET_BUILD_BUILTIN_VA_LIST
152 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
153 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
154 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
156 #undef TARGET_PROMOTE_FUNCTION_ARGS
157 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
158 #undef TARGET_PROMOTE_FUNCTION_RETURN
159 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
160 #undef TARGET_PASS_BY_REFERENCE
161 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
163 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
164 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
166 struct gcc_target targetm = TARGET_INITIALIZER;
168 extern int reload_completed;
170 /* The alias set for prologue/epilogue register save/restore. */
171 static int s390_sr_alias_set = 0;
173 /* Save information from a "cmpxx" operation until the branch or scc is
175 rtx s390_compare_op0, s390_compare_op1;
177 /* Structure used to hold the components of a S/390 memory
178 address. A legitimate address on S/390 is of the general
180 base + index + displacement
181 where any of the components is optional.
183 base and index are registers of the class ADDR_REGS,
184 displacement is an unsigned 12-bit immediate constant. */
194 /* Which cpu are we tuning for. */
195 enum processor_type s390_tune;
196 enum processor_flags s390_tune_flags;
197 /* Which instruction set architecture to use. */
198 enum processor_type s390_arch;
199 enum processor_flags s390_arch_flags;
201 /* Strings to hold which cpu and instruction set architecture to use. */
202 const char *s390_tune_string; /* for -mtune=<xxx> */
203 const char *s390_arch_string; /* for -march=<xxx> */
205 /* Define the structure for the machine field in struct function. */
207 struct machine_function GTY(())
209 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
212 /* Set if return address needs to be saved. */
213 bool save_return_addr_p;
215 /* Number of first and last gpr to be saved, restored. */
217 int first_restore_gpr;
219 int last_restore_gpr;
221 /* Size of stack frame. */
222 HOST_WIDE_INT frame_size;
224 /* Literal pool base register. */
227 /* Some local-dynamic TLS symbol name. */
228 const char *some_ld_name;
231 static int s390_match_ccmode_set (rtx, enum machine_mode);
232 static int s390_branch_condition_mask (rtx);
233 static const char *s390_branch_condition_mnemonic (rtx, int);
234 static int check_mode (rtx, enum machine_mode *);
235 static int general_s_operand (rtx, enum machine_mode, int);
236 static int s390_short_displacement (rtx);
237 static int s390_decompose_address (rtx, struct s390_address *);
238 static rtx get_thread_pointer (void);
239 static rtx legitimize_tls_address (rtx, rtx);
240 static void print_shift_count_operand (FILE *, rtx);
241 static const char *get_some_local_dynamic_name (void);
242 static int get_some_local_dynamic_name_1 (rtx *, void *);
243 static int reg_used_in_mem_p (int, rtx);
244 static int addr_generation_dependency_p (rtx, rtx);
245 static int s390_split_branches (void);
246 static void annotate_constant_pool_refs (rtx *x);
247 static void find_constant_pool_ref (rtx, rtx *);
248 static void replace_constant_pool_ref (rtx *, rtx, rtx);
249 static rtx find_ltrel_base (rtx);
250 static void replace_ltrel_base (rtx *);
251 static void s390_optimize_prolog (bool);
252 static int find_unused_clobbered_reg (void);
253 static void s390_frame_info (int, int);
254 static rtx save_fpr (rtx, int, int);
255 static rtx restore_fpr (rtx, int, int);
256 static rtx save_gprs (rtx, int, int, int);
257 static rtx restore_gprs (rtx, int, int, int);
258 static int s390_function_arg_size (enum machine_mode, tree);
259 static bool s390_function_arg_float (enum machine_mode, tree);
260 static struct machine_function * s390_init_machine_status (void);
262 /* Check whether integer displacement is in range. */
263 #define DISP_IN_RANGE(d) \
264 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
265 : ((d) >= 0 && (d) <= 4095))
267 /* Return true if SET either doesn't set the CC register, or else
268 the source and destination have matching CC modes and that
269 CC mode is at least as constrained as REQ_MODE. */
272 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
274 enum machine_mode set_mode;
276 if (GET_CODE (set) != SET)
279 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
282 set_mode = GET_MODE (SET_DEST (set));
296 if (req_mode != set_mode)
301 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
302 && req_mode != CCSRmode && req_mode != CCURmode)
308 if (req_mode != CCAmode)
316 return (GET_MODE (SET_SRC (set)) == set_mode);
319 /* Return true if every SET in INSN that sets the CC register
320 has source and destination with matching CC modes and that
321 CC mode is at least as constrained as REQ_MODE.
322 If REQ_MODE is VOIDmode, always return false. */
325 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
329 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
330 if (req_mode == VOIDmode)
333 if (GET_CODE (PATTERN (insn)) == SET)
334 return s390_match_ccmode_set (PATTERN (insn), req_mode);
336 if (GET_CODE (PATTERN (insn)) == PARALLEL)
337 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
339 rtx set = XVECEXP (PATTERN (insn), 0, i);
340 if (GET_CODE (set) == SET)
341 if (!s390_match_ccmode_set (set, req_mode))
348 /* If a test-under-mask instruction can be used to implement
349 (compare (and ... OP1) OP2), return the CC mode required
350 to do that. Otherwise, return VOIDmode.
351 MIXED is true if the instruction can distinguish between
352 CC1 and CC2 for mixed selected bits (TMxx), it is false
353 if the instruction cannot (TM). */
356 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
360 /* ??? Fixme: should work on CONST_DOUBLE as well. */
361 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
364 /* Selected bits all zero: CC0. */
365 if (INTVAL (op2) == 0)
368 /* Selected bits all one: CC3. */
369 if (INTVAL (op2) == INTVAL (op1))
372 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
375 bit1 = exact_log2 (INTVAL (op2));
376 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
377 if (bit0 != -1 && bit1 != -1)
378 return bit0 > bit1 ? CCT1mode : CCT2mode;
384 /* Given a comparison code OP (EQ, NE, etc.) and the operands
385 OP0 and OP1 of a COMPARE, return the mode to be used for the
389 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
395 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
396 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
398 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
399 || GET_CODE (op1) == NEG)
400 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
403 if (GET_CODE (op0) == AND)
405 /* Check whether we can potentially do it via TM. */
406 enum machine_mode ccmode;
407 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
408 if (ccmode != VOIDmode)
410 /* Relax CCTmode to CCZmode to allow fall-back to AND
411 if that turns out to be beneficial. */
412 return ccmode == CCTmode ? CCZmode : ccmode;
416 if (register_operand (op0, HImode)
417 && GET_CODE (op1) == CONST_INT
418 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
420 if (register_operand (op0, QImode)
421 && GET_CODE (op1) == CONST_INT
422 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
431 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
432 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
434 if (INTVAL (XEXP((op0), 1)) < 0)
447 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
448 && GET_CODE (op1) != CONST_INT)
454 if (GET_CODE (op0) == PLUS
455 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
458 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
459 && GET_CODE (op1) != CONST_INT)
465 if (GET_CODE (op0) == MINUS
466 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
469 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
470 && GET_CODE (op1) != CONST_INT)
479 /* Emit a compare instruction suitable to implement the comparison
480 OP0 CODE OP1. Return the correct condition RTL to be placed in
481 the IF_THEN_ELSE of the conditional branch testing the result. */
484 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
486 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
487 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
489 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
490 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
493 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
494 unconditional jump, else a conditional jump under condition COND. */
497 s390_emit_jump (rtx target, rtx cond)
501 target = gen_rtx_LABEL_REF (VOIDmode, target);
503 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
505 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
506 emit_jump_insn (insn);
509 /* Return nonzero if OP is a valid comparison operator
510 for an ALC condition in mode MODE. */
513 s390_alc_comparison (rtx op, enum machine_mode mode)
515 if (mode != VOIDmode && mode != GET_MODE (op))
518 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
521 if (!COMPARISON_P (op))
524 if (GET_CODE (XEXP (op, 0)) != REG
525 || REGNO (XEXP (op, 0)) != CC_REGNUM
526 || XEXP (op, 1) != const0_rtx)
529 switch (GET_MODE (XEXP (op, 0)))
532 return GET_CODE (op) == LTU;
535 return GET_CODE (op) == LEU;
538 return GET_CODE (op) == GEU;
541 return GET_CODE (op) == GTU;
544 return GET_CODE (op) == LTU;
547 return GET_CODE (op) == UNGT;
550 return GET_CODE (op) == UNLT;
557 /* Return nonzero if OP is a valid comparison operator
558 for an SLB condition in mode MODE. */
561 s390_slb_comparison (rtx op, enum machine_mode mode)
563 if (mode != VOIDmode && mode != GET_MODE (op))
566 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
569 if (!COMPARISON_P (op))
572 if (GET_CODE (XEXP (op, 0)) != REG
573 || REGNO (XEXP (op, 0)) != CC_REGNUM
574 || XEXP (op, 1) != const0_rtx)
577 switch (GET_MODE (XEXP (op, 0)))
580 return GET_CODE (op) == GEU;
583 return GET_CODE (op) == GTU;
586 return GET_CODE (op) == LTU;
589 return GET_CODE (op) == LEU;
592 return GET_CODE (op) == GEU;
595 return GET_CODE (op) == LE;
598 return GET_CODE (op) == GE;
605 /* Return branch condition mask to implement a branch
606 specified by CODE. */
609 s390_branch_condition_mask (rtx code)
611 const int CC0 = 1 << 3;
612 const int CC1 = 1 << 2;
613 const int CC2 = 1 << 1;
614 const int CC3 = 1 << 0;
616 if (GET_CODE (XEXP (code, 0)) != REG
617 || REGNO (XEXP (code, 0)) != CC_REGNUM
618 || XEXP (code, 1) != const0_rtx)
621 switch (GET_MODE (XEXP (code, 0)))
624 switch (GET_CODE (code))
627 case NE: return CC1 | CC2 | CC3;
634 switch (GET_CODE (code))
637 case NE: return CC0 | CC2 | CC3;
644 switch (GET_CODE (code))
647 case NE: return CC0 | CC1 | CC3;
654 switch (GET_CODE (code))
657 case NE: return CC0 | CC1 | CC2;
664 switch (GET_CODE (code))
666 case EQ: return CC0 | CC2;
667 case NE: return CC1 | CC3;
674 switch (GET_CODE (code))
676 case LTU: return CC2 | CC3; /* carry */
677 case GEU: return CC0 | CC1; /* no carry */
684 switch (GET_CODE (code))
686 case GTU: return CC0 | CC1; /* borrow */
687 case LEU: return CC2 | CC3; /* no borrow */
694 switch (GET_CODE (code))
696 case EQ: return CC0 | CC2;
697 case NE: return CC1 | CC3;
698 case LTU: return CC1;
699 case GTU: return CC3;
700 case LEU: return CC1 | CC2;
701 case GEU: return CC2 | CC3;
707 switch (GET_CODE (code))
710 case NE: return CC1 | CC2 | CC3;
711 case LTU: return CC1;
712 case GTU: return CC2;
713 case LEU: return CC0 | CC1;
714 case GEU: return CC0 | CC2;
721 switch (GET_CODE (code))
724 case NE: return CC2 | CC1 | CC3;
725 case LTU: return CC2;
726 case GTU: return CC1;
727 case LEU: return CC0 | CC2;
728 case GEU: return CC0 | CC1;
735 switch (GET_CODE (code))
738 case NE: return CC1 | CC2 | CC3;
739 case LT: return CC1 | CC3;
741 case LE: return CC0 | CC1 | CC3;
742 case GE: return CC0 | CC2;
749 switch (GET_CODE (code))
752 case NE: return CC1 | CC2 | CC3;
754 case GT: return CC2 | CC3;
755 case LE: return CC0 | CC1;
756 case GE: return CC0 | CC2 | CC3;
763 switch (GET_CODE (code))
766 case NE: return CC1 | CC2 | CC3;
769 case LE: return CC0 | CC1;
770 case GE: return CC0 | CC2;
771 case UNORDERED: return CC3;
772 case ORDERED: return CC0 | CC1 | CC2;
773 case UNEQ: return CC0 | CC3;
774 case UNLT: return CC1 | CC3;
775 case UNGT: return CC2 | CC3;
776 case UNLE: return CC0 | CC1 | CC3;
777 case UNGE: return CC0 | CC2 | CC3;
778 case LTGT: return CC1 | CC2;
785 switch (GET_CODE (code))
788 case NE: return CC2 | CC1 | CC3;
791 case LE: return CC0 | CC2;
792 case GE: return CC0 | CC1;
793 case UNORDERED: return CC3;
794 case ORDERED: return CC0 | CC2 | CC1;
795 case UNEQ: return CC0 | CC3;
796 case UNLT: return CC2 | CC3;
797 case UNGT: return CC1 | CC3;
798 case UNLE: return CC0 | CC2 | CC3;
799 case UNGE: return CC0 | CC1 | CC3;
800 case LTGT: return CC2 | CC1;
811 /* If INV is false, return assembler mnemonic string to implement
812 a branch specified by CODE. If INV is true, return mnemonic
813 for the corresponding inverted branch. */
816 s390_branch_condition_mnemonic (rtx code, int inv)
818 static const char *const mnemonic[16] =
820 NULL, "o", "h", "nle",
821 "l", "nhe", "lh", "ne",
822 "e", "nlh", "he", "nl",
823 "le", "nh", "no", NULL
826 int mask = s390_branch_condition_mask (code);
831 if (mask < 1 || mask > 14)
834 return mnemonic[mask];
837 /* Return the part of op which has a value different from def.
838 The size of the part is determined by mode.
839 Use this function only if you already know that op really
840 contains such a part. */
842 unsigned HOST_WIDE_INT
843 s390_extract_part (rtx op, enum machine_mode mode, int def)
845 unsigned HOST_WIDE_INT value = 0;
846 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
847 int part_bits = GET_MODE_BITSIZE (mode);
848 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
851 for (i = 0; i < max_parts; i++)
854 value = (unsigned HOST_WIDE_INT) INTVAL (op);
858 if ((value & part_mask) != (def & part_mask))
859 return value & part_mask;
865 /* If OP is an integer constant of mode MODE with exactly one
866 part of mode PART_MODE unequal to DEF, return the number of that
867 part. Otherwise, return -1. */
870 s390_single_part (rtx op,
871 enum machine_mode mode,
872 enum machine_mode part_mode,
875 unsigned HOST_WIDE_INT value = 0;
876 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
877 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
880 if (GET_CODE (op) != CONST_INT)
883 for (i = 0; i < n_parts; i++)
886 value = (unsigned HOST_WIDE_INT) INTVAL (op);
888 value >>= GET_MODE_BITSIZE (part_mode);
890 if ((value & part_mask) != (def & part_mask))
898 return part == -1 ? -1 : n_parts - 1 - part;
901 /* Check whether we can (and want to) split a double-word
902 move in mode MODE from SRC to DST into two single-word
903 moves, moving the subword FIRST_SUBWORD first. */
906 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
908 /* Floating point registers cannot be split. */
909 if (FP_REG_P (src) || FP_REG_P (dst))
912 /* We don't need to split if operands are directly accessible. */
913 if (s_operand (src, mode) || s_operand (dst, mode))
916 /* Non-offsettable memory references cannot be split. */
917 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
918 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
921 /* Moving the first subword must not clobber a register
922 needed to move the second subword. */
923 if (register_operand (dst, mode))
925 rtx subreg = operand_subword (dst, first_subword, 0, mode);
926 if (reg_overlap_mentioned_p (subreg, src))
934 /* Change optimizations to be performed, depending on the
937 LEVEL is the optimization level specified; 2 if `-O2' is
938 specified, 1 if `-O' is specified, and 0 if neither is specified.
940 SIZE is nonzero if `-Os' is specified and zero otherwise. */
943 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
945 /* ??? There are apparently still problems with -fcaller-saves. */
946 flag_caller_saves = 0;
948 /* By default, always emit DWARF-2 unwind info. This allows debugging
949 without maintaining a stack frame back-chain. */
950 flag_asynchronous_unwind_tables = 1;
954 override_options (void)
959 const char *const name; /* processor name or nickname. */
960 const enum processor_type processor;
961 const enum processor_flags flags;
963 const processor_alias_table[] =
965 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
966 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
967 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
968 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
969 | PF_LONG_DISPLACEMENT},
972 int const pta_size = ARRAY_SIZE (processor_alias_table);
974 /* Acquire a unique set number for our register saves and restores. */
975 s390_sr_alias_set = new_alias_set ();
977 /* Set up function hooks. */
978 init_machine_status = s390_init_machine_status;
980 /* Architecture mode defaults according to ABI. */
981 if (!(target_flags_explicit & MASK_ZARCH))
984 target_flags |= MASK_ZARCH;
986 target_flags &= ~MASK_ZARCH;
989 /* Determine processor architectural level. */
990 if (!s390_arch_string)
991 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
993 for (i = 0; i < pta_size; i++)
994 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
996 s390_arch = processor_alias_table[i].processor;
997 s390_arch_flags = processor_alias_table[i].flags;
1001 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1003 /* Determine processor to tune for. */
1004 if (!s390_tune_string)
1006 s390_tune = s390_arch;
1007 s390_tune_flags = s390_arch_flags;
1008 s390_tune_string = s390_arch_string;
1012 for (i = 0; i < pta_size; i++)
1013 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1015 s390_tune = processor_alias_table[i].processor;
1016 s390_tune_flags = processor_alias_table[i].flags;
1020 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1023 /* Sanity checks. */
1024 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1025 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1026 if (TARGET_64BIT && !TARGET_ZARCH)
1027 error ("64-bit ABI not supported in ESA/390 mode.");
1030 /* Map for smallest class containing reg regno. */
1032 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1033 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1034 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1035 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1036 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1037 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1038 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1039 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1040 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1041 ADDR_REGS, NO_REGS, ADDR_REGS, ADDR_REGS
1044 /* Return attribute type of insn. */
1046 static enum attr_type
1047 s390_safe_attr_type (rtx insn)
1049 if (recog_memoized (insn) >= 0)
1050 return get_attr_type (insn);
1055 /* Return true if OP a (const_int 0) operand.
1056 OP is the current operation.
1057 MODE is the current operation mode. */
1060 const0_operand (register rtx op, enum machine_mode mode)
1062 return op == CONST0_RTX (mode);
1065 /* Return true if OP is constant.
1066 OP is the current operation.
1067 MODE is the current operation mode. */
1070 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1072 return CONSTANT_P (op);
1075 /* Return true if the mode of operand OP matches MODE.
1076 If MODE is set to VOIDmode, set it to the mode of OP. */
1079 check_mode (register rtx op, enum machine_mode *mode)
1081 if (*mode == VOIDmode)
1082 *mode = GET_MODE (op);
1085 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1091 /* Return true if OP a valid operand for the LARL instruction.
1092 OP is the current operation.
1093 MODE is the current operation mode. */
1096 larl_operand (register rtx op, enum machine_mode mode)
1098 if (! check_mode (op, &mode))
1101 /* Allow labels and local symbols. */
1102 if (GET_CODE (op) == LABEL_REF)
1104 if (GET_CODE (op) == SYMBOL_REF)
1105 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1106 && SYMBOL_REF_TLS_MODEL (op) == 0
1107 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1109 /* Everything else must have a CONST, so strip it. */
1110 if (GET_CODE (op) != CONST)
1114 /* Allow adding *even* in-range constants. */
1115 if (GET_CODE (op) == PLUS)
1117 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1118 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1120 #if HOST_BITS_PER_WIDE_INT > 32
1121 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1122 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1128 /* Labels and local symbols allowed here as well. */
1129 if (GET_CODE (op) == LABEL_REF)
1131 if (GET_CODE (op) == SYMBOL_REF)
1132 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1133 && SYMBOL_REF_TLS_MODEL (op) == 0
1134 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1136 /* Now we must have a @GOTENT offset or @PLT stub
1137 or an @INDNTPOFF TLS offset. */
1138 if (GET_CODE (op) == UNSPEC
1139 && XINT (op, 1) == UNSPEC_GOTENT)
1141 if (GET_CODE (op) == UNSPEC
1142 && XINT (op, 1) == UNSPEC_PLT)
1144 if (GET_CODE (op) == UNSPEC
1145 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1151 /* Helper routine to implement s_operand and s_imm_operand.
1152 OP is the current operation.
1153 MODE is the current operation mode.
1154 ALLOW_IMMEDIATE specifies whether immediate operands should
1155 be accepted or not. */
1158 general_s_operand (register rtx op, enum machine_mode mode,
1159 int allow_immediate)
1161 struct s390_address addr;
1163 /* Call general_operand first, so that we don't have to
1164 check for many special cases. */
1165 if (!general_operand (op, mode))
1168 /* Just like memory_operand, allow (subreg (mem ...))
1170 if (reload_completed
1171 && GET_CODE (op) == SUBREG
1172 && GET_CODE (SUBREG_REG (op)) == MEM)
1173 op = SUBREG_REG (op);
1175 switch (GET_CODE (op))
1177 /* Constants are OK as s-operand if ALLOW_IMMEDIATE
1178 is true and we are still before reload. */
1181 if (!allow_immediate || reload_completed)
1185 /* Memory operands are OK unless they already use an
1188 if (!s390_decompose_address (XEXP (op, 0), &addr))
1192 /* Do not allow literal pool references unless ALLOW_IMMEDIATE
1193 is true. This prevents compares between two literal pool
1194 entries from being accepted. */
1195 if (!allow_immediate
1196 && addr.base && REGNO (addr.base) == BASE_REGNUM)
1207 /* Return true if OP is a valid S-type operand.
1208 OP is the current operation.
1209 MODE is the current operation mode. */
1212 s_operand (register rtx op, enum machine_mode mode)
1214 return general_s_operand (op, mode, 0);
1217 /* Return true if OP is a valid S-type operand or an immediate
1218 operand that can be addressed as S-type operand by forcing
1219 it into the literal pool.
1220 OP is the current operation.
1221 MODE is the current operation mode. */
1224 s_imm_operand (register rtx op, enum machine_mode mode)
1226 return general_s_operand (op, mode, 1);
1229 /* Return true if OP a valid shift count operand.
1230 OP is the current operation.
1231 MODE is the current operation mode. */
1234 shift_count_operand (rtx op, enum machine_mode mode)
1236 HOST_WIDE_INT offset = 0;
1238 if (! check_mode (op, &mode))
1241 /* We can have an integer constant, an address register,
1242 or a sum of the two. Note that reload already checks
1243 that any register present is an address register, so
1244 we just check for any register here. */
1245 if (GET_CODE (op) == CONST_INT)
1247 offset = INTVAL (op);
1250 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1252 offset = INTVAL (XEXP (op, 1));
1255 while (op && GET_CODE (op) == SUBREG)
1256 op = SUBREG_REG (op);
1257 if (op && GET_CODE (op) != REG)
1260 /* Unfortunately we have to reject constants that are invalid
1261 for an address, or else reload will get confused. */
1262 if (!DISP_IN_RANGE (offset))
1268 /* Return true if DISP is a valid short displacement. */
1271 s390_short_displacement (rtx disp)
1273 /* No displacement is OK. */
1277 /* Integer displacement in range. */
1278 if (GET_CODE (disp) == CONST_INT)
1279 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1281 /* GOT offset is not OK, the GOT can be large. */
1282 if (GET_CODE (disp) == CONST
1283 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1284 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1287 /* All other symbolic constants are literal pool references,
1288 which are OK as the literal pool must be small. */
1289 if (GET_CODE (disp) == CONST)
1295 /* Return true if OP is a valid operand for a C constraint. */
1298 s390_extra_constraint_str (rtx op, int c, const char * str)
1300 struct s390_address addr;
1308 if (GET_CODE (op) != MEM)
1310 if (!s390_decompose_address (XEXP (op, 0), &addr))
1315 if (TARGET_LONG_DISPLACEMENT)
1317 if (!s390_short_displacement (addr.disp))
1323 if (GET_CODE (op) != MEM)
1326 if (TARGET_LONG_DISPLACEMENT)
1328 if (!s390_decompose_address (XEXP (op, 0), &addr))
1330 if (!s390_short_displacement (addr.disp))
1336 if (!TARGET_LONG_DISPLACEMENT)
1338 if (GET_CODE (op) != MEM)
1340 if (!s390_decompose_address (XEXP (op, 0), &addr))
1344 if (s390_short_displacement (addr.disp))
1349 if (!TARGET_LONG_DISPLACEMENT)
1351 if (GET_CODE (op) != MEM)
1353 /* Any invalid address here will be fixed up by reload,
1354 so accept it for the most generic constraint. */
1355 if (s390_decompose_address (XEXP (op, 0), &addr)
1356 && s390_short_displacement (addr.disp))
1361 if (TARGET_LONG_DISPLACEMENT)
1363 if (!s390_decompose_address (op, &addr))
1365 if (!s390_short_displacement (addr.disp))
1371 if (!TARGET_LONG_DISPLACEMENT)
1373 /* Any invalid address here will be fixed up by reload,
1374 so accept it for the most generic constraint. */
1375 if (s390_decompose_address (op, &addr)
1376 && s390_short_displacement (addr.disp))
1381 return shift_count_operand (op, VOIDmode);
1390 /* Return true if VALUE matches the constraint STR. */
1393 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1397 enum machine_mode mode, part_mode;
1407 return (unsigned int)value < 256;
1410 return (unsigned int)value < 4096;
1413 return value >= -32768 && value < 32768;
1416 return (TARGET_LONG_DISPLACEMENT ?
1417 (value >= -524288 && value <= 524287)
1418 : (value >= 0 && value <= 4095));
1420 return value == 2147483647;
1423 part = str[1] - '0';
1427 case 'H': part_mode = HImode; break;
1428 case 'Q': part_mode = QImode; break;
1434 case 'H': mode = HImode; break;
1435 case 'S': mode = SImode; break;
1436 case 'D': mode = DImode; break;
1442 case '0': def = 0; break;
1443 case 'F': def = -1; break;
1447 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1450 if (s390_single_part (GEN_INT (value), mode, part_mode, def) != part)
1462 /* Compute a (partial) cost for rtx X. Return true if the complete
1463 cost has been computed, and false if subexpressions should be
1464 scanned. In either case, *TOTAL contains the cost result. */
1467 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1472 if (GET_CODE (XEXP (x, 0)) == MINUS
1473 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1480 /* Force_const_mem does not work out of reload, because the
1481 saveable_obstack is set to reload_obstack, which does not
1482 live long enough. Because of this we cannot use force_const_mem
1483 in addsi3. This leads to problems with gen_add2_insn with a
1484 constant greater than a short. Because of that we give an
1485 addition of greater constants a cost of 3 (reload1.c 10096). */
1486 /* ??? saveable_obstack no longer exists. */
1487 if (outer_code == PLUS
1488 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1489 *total = COSTS_N_INSNS (3);
1510 *total = COSTS_N_INSNS (1);
1514 if (GET_MODE (XEXP (x, 0)) == DImode)
1515 *total = COSTS_N_INSNS (40);
1517 *total = COSTS_N_INSNS (7);
1524 *total = COSTS_N_INSNS (33);
1532 /* Return the cost of an address rtx ADDR. */
1535 s390_address_cost (rtx addr)
1537 struct s390_address ad;
1538 if (!s390_decompose_address (addr, &ad))
1541 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1544 /* Return true if OP is a valid operand for the BRAS instruction.
1545 OP is the current operation.
1546 MODE is the current operation mode. */
1549 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1551 register enum rtx_code code = GET_CODE (op);
1553 /* Allow SYMBOL_REFs. */
1554 if (code == SYMBOL_REF)
1557 /* Allow @PLT stubs. */
1559 && GET_CODE (XEXP (op, 0)) == UNSPEC
1560 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1565 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1566 otherwise return 0. */
1569 tls_symbolic_operand (register rtx op)
1571 if (GET_CODE (op) != SYMBOL_REF)
1573 return SYMBOL_REF_TLS_MODEL (op);
1576 /* Return true if OP is a load multiple operation. It is known to be a
1577 PARALLEL and the first section will be tested.
1578 OP is the current operation.
1579 MODE is the current operation mode. */
1582 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1584 enum machine_mode elt_mode;
1585 int count = XVECLEN (op, 0);
1586 unsigned int dest_regno;
1591 /* Perform a quick check so we don't blow up below. */
1593 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1594 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1595 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1598 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1599 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1600 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
1602 /* Check, is base, or base + displacement. */
1604 if (GET_CODE (src_addr) == REG)
1606 else if (GET_CODE (src_addr) == PLUS
1607 && GET_CODE (XEXP (src_addr, 0)) == REG
1608 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1610 off = INTVAL (XEXP (src_addr, 1));
1611 src_addr = XEXP (src_addr, 0);
1616 for (i = 1; i < count; i++)
1618 rtx elt = XVECEXP (op, 0, i);
1620 if (GET_CODE (elt) != SET
1621 || GET_CODE (SET_DEST (elt)) != REG
1622 || GET_MODE (SET_DEST (elt)) != elt_mode
1623 || REGNO (SET_DEST (elt)) != dest_regno + i
1624 || GET_CODE (SET_SRC (elt)) != MEM
1625 || GET_MODE (SET_SRC (elt)) != elt_mode
1626 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1627 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1628 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1629 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1630 != off + i * GET_MODE_SIZE (elt_mode))
1637 /* Return true if OP is a store multiple operation. It is known to be a
1638 PARALLEL and the first section will be tested.
1639 OP is the current operation.
1640 MODE is the current operation mode. */
1643 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1645 enum machine_mode elt_mode;
1646 int count = XVECLEN (op, 0);
1647 unsigned int src_regno;
1651 /* Perform a quick check so we don't blow up below. */
1653 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1654 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1655 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1658 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1659 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1660 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
1662 /* Check, is base, or base + displacement. */
1664 if (GET_CODE (dest_addr) == REG)
1666 else if (GET_CODE (dest_addr) == PLUS
1667 && GET_CODE (XEXP (dest_addr, 0)) == REG
1668 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1670 off = INTVAL (XEXP (dest_addr, 1));
1671 dest_addr = XEXP (dest_addr, 0);
1676 for (i = 1; i < count; i++)
1678 rtx elt = XVECEXP (op, 0, i);
1680 if (GET_CODE (elt) != SET
1681 || GET_CODE (SET_SRC (elt)) != REG
1682 || GET_MODE (SET_SRC (elt)) != elt_mode
1683 || REGNO (SET_SRC (elt)) != src_regno + i
1684 || GET_CODE (SET_DEST (elt)) != MEM
1685 || GET_MODE (SET_DEST (elt)) != elt_mode
1686 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1687 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1688 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1689 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1690 != off + i * GET_MODE_SIZE (elt_mode))
1697 /* Return true if OP contains a symbol reference */
1700 symbolic_reference_mentioned_p (rtx op)
1702 register const char *fmt;
1705 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1708 fmt = GET_RTX_FORMAT (GET_CODE (op));
1709 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1715 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1716 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1720 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1727 /* Return true if OP contains a reference to a thread-local symbol. */
1730 tls_symbolic_reference_mentioned_p (rtx op)
1732 register const char *fmt;
1735 if (GET_CODE (op) == SYMBOL_REF)
1736 return tls_symbolic_operand (op);
1738 fmt = GET_RTX_FORMAT (GET_CODE (op));
1739 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1745 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1746 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1750 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1758 /* Return true if OP is a legitimate general operand when
1759 generating PIC code. It is given that flag_pic is on
1760 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1763 legitimate_pic_operand_p (register rtx op)
1765 /* Accept all non-symbolic constants. */
1766 if (!SYMBOLIC_CONST (op))
1769 /* Reject everything else; must be handled
1770 via emit_symbolic_move. */
1774 /* Returns true if the constant value OP is a legitimate general operand.
1775 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1778 legitimate_constant_p (register rtx op)
1780 /* Accept all non-symbolic constants. */
1781 if (!SYMBOLIC_CONST (op))
1784 /* Accept immediate LARL operands. */
1785 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
1788 /* Thread-local symbols are never legal constants. This is
1789 so that emit_call knows that computing such addresses
1790 might require a function call. */
1791 if (TLS_SYMBOLIC_CONST (op))
1794 /* In the PIC case, symbolic constants must *not* be
1795 forced into the literal pool. We accept them here,
1796 so that they will be handled by emit_symbolic_move. */
1800 /* All remaining non-PIC symbolic constants are
1801 forced into the literal pool. */
1805 /* Determine if it's legal to put X into the constant pool. This
1806 is not possible if X contains the address of a symbol that is
1807 not constant (TLS) or not known at final link time (PIC). */
1810 s390_cannot_force_const_mem (rtx x)
1812 switch (GET_CODE (x))
1816 /* Accept all non-symbolic constants. */
1820 /* Labels are OK iff we are non-PIC. */
1821 return flag_pic != 0;
1824 /* 'Naked' TLS symbol references are never OK,
1825 non-TLS symbols are OK iff we are non-PIC. */
1826 if (tls_symbolic_operand (x))
1829 return flag_pic != 0;
1832 return s390_cannot_force_const_mem (XEXP (x, 0));
1835 return s390_cannot_force_const_mem (XEXP (x, 0))
1836 || s390_cannot_force_const_mem (XEXP (x, 1));
1839 switch (XINT (x, 1))
1841 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1842 case UNSPEC_LTREL_OFFSET:
1850 case UNSPEC_GOTNTPOFF:
1851 case UNSPEC_INDNTPOFF:
1864 /* Returns true if the constant value OP is a legitimate general
1865 operand during and after reload. The difference to
1866 legitimate_constant_p is that this function will not accept
1867 a constant that would need to be forced to the literal pool
1868 before it can be used as operand. */
1871 legitimate_reload_constant_p (register rtx op)
1873 /* Accept la(y) operands. */
1874 if (GET_CODE (op) == CONST_INT
1875 && DISP_IN_RANGE (INTVAL (op)))
1878 /* Accept l(g)hi operands. */
1879 if (GET_CODE (op) == CONST_INT
1880 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
1883 /* Accept lliXX operands. */
1885 && s390_single_part (op, DImode, HImode, 0) >= 0)
1888 /* Accept larl operands. */
1889 if (TARGET_CPU_ZARCH
1890 && larl_operand (op, VOIDmode))
1893 /* Everything else cannot be handled without reload. */
1897 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1898 return the class of reg to actually use. */
1901 s390_preferred_reload_class (rtx op, enum reg_class class)
1903 /* This can happen if a floating point constant is being
1904 reloaded into an integer register. Leave well alone. */
1905 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1906 && class != FP_REGS)
1909 switch (GET_CODE (op))
1911 /* Constants we cannot reload must be forced into the
1916 if (legitimate_reload_constant_p (op))
1921 /* If a symbolic constant or a PLUS is reloaded,
1922 it is most likely being used as an address, so
1923 prefer ADDR_REGS. If 'class' is not a superset
1924 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1929 if (reg_class_subset_p (ADDR_REGS, class))
1941 /* Return the register class of a scratch register needed to
1942 load IN into a register of class CLASS in MODE.
1944 We need a temporary when loading a PLUS expression which
1945 is not a legitimate operand of the LOAD ADDRESS instruction. */
1948 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
1949 enum machine_mode mode, rtx in)
1951 if (s390_plus_operand (in, mode))
1957 /* Return the register class of a scratch register needed to
1958 store a register of class CLASS in MODE into OUT:
1960 We need a temporary when storing a double-word to a
1961 non-offsettable memory address. */
1964 s390_secondary_output_reload_class (enum reg_class class,
1965 enum machine_mode mode, rtx out)
1967 if ((TARGET_64BIT ? mode == TImode
1968 : (mode == DImode || mode == DFmode))
1969 && reg_classes_intersect_p (GENERAL_REGS, class)
1970 && GET_CODE (out) == MEM
1971 && !offsettable_memref_p (out)
1972 && !s_operand (out, VOIDmode))
1978 /* Return true if OP is a PLUS that is not a legitimate
1979 operand for the LA instruction.
1980 OP is the current operation.
1981 MODE is the current operation mode. */
1984 s390_plus_operand (register rtx op, enum machine_mode mode)
1986 if (!check_mode (op, &mode) || mode != Pmode)
1989 if (GET_CODE (op) != PLUS)
1992 if (legitimate_la_operand_p (op))
1998 /* Generate code to load SRC, which is PLUS that is not a
1999 legitimate operand for the LA instruction, into TARGET.
2000 SCRATCH may be used as scratch register. */
2003 s390_expand_plus_operand (register rtx target, register rtx src,
2004 register rtx scratch)
2007 struct s390_address ad;
2009 /* src must be a PLUS; get its two operands. */
2010 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2013 /* Check if any of the two operands is already scheduled
2014 for replacement by reload. This can happen e.g. when
2015 float registers occur in an address. */
2016 sum1 = find_replacement (&XEXP (src, 0));
2017 sum2 = find_replacement (&XEXP (src, 1));
2018 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2020 /* If the address is already strictly valid, there's nothing to do. */
2021 if (!s390_decompose_address (src, &ad)
2022 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2023 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2025 /* Otherwise, one of the operands cannot be an address register;
2026 we reload its value into the scratch register. */
2027 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2029 emit_move_insn (scratch, sum1);
2032 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2034 emit_move_insn (scratch, sum2);
2038 /* According to the way these invalid addresses are generated
2039 in reload.c, it should never happen (at least on s390) that
2040 *neither* of the PLUS components, after find_replacements
2041 was applied, is an address register. */
2042 if (sum1 == scratch && sum2 == scratch)
2048 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2051 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2052 is only ever performed on addresses, so we can mark the
2053 sum as legitimate for LA in any case. */
2054 s390_load_address (target, src);
2058 /* Decompose a RTL expression ADDR for a memory address into
2059 its components, returned in OUT.
2061 Returns 0 if ADDR is not a valid memory address, nonzero
2062 otherwise. If OUT is NULL, don't return the components,
2063 but check for validity only.
2065 Note: Only addresses in canonical form are recognized.
2066 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2067 canonical form so that they will be recognized. */
2070 s390_decompose_address (register rtx addr, struct s390_address *out)
2072 HOST_WIDE_INT offset = 0;
2073 rtx base = NULL_RTX;
2074 rtx indx = NULL_RTX;
2075 rtx disp = NULL_RTX;
2077 int pointer = FALSE;
2078 int base_ptr = FALSE;
2079 int indx_ptr = FALSE;
2081 /* Decompose address into base + index + displacement. */
2083 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2086 else if (GET_CODE (addr) == PLUS)
2088 rtx op0 = XEXP (addr, 0);
2089 rtx op1 = XEXP (addr, 1);
2090 enum rtx_code code0 = GET_CODE (op0);
2091 enum rtx_code code1 = GET_CODE (op1);
2093 if (code0 == REG || code0 == UNSPEC)
2095 if (code1 == REG || code1 == UNSPEC)
2097 indx = op0; /* index + base */
2103 base = op0; /* base + displacement */
2108 else if (code0 == PLUS)
2110 indx = XEXP (op0, 0); /* index + base + disp */
2111 base = XEXP (op0, 1);
2122 disp = addr; /* displacement */
2124 /* Extract integer part of displacement. */
2128 if (GET_CODE (disp) == CONST_INT)
2130 offset = INTVAL (disp);
2133 else if (GET_CODE (disp) == CONST
2134 && GET_CODE (XEXP (disp, 0)) == PLUS
2135 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2137 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2138 disp = XEXP (XEXP (disp, 0), 0);
2142 /* Strip off CONST here to avoid special case tests later. */
2143 if (disp && GET_CODE (disp) == CONST)
2144 disp = XEXP (disp, 0);
2146 /* We can convert literal pool addresses to
2147 displacements by basing them off the base register. */
2148 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2150 /* Either base or index must be free to hold the base register. */
2152 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2154 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2158 /* Mark up the displacement. */
2159 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2160 UNSPEC_LTREL_OFFSET);
2163 /* Validate base register. */
2166 if (GET_CODE (base) == UNSPEC)
2167 switch (XINT (base, 1))
2171 disp = gen_rtx_UNSPEC (Pmode,
2172 gen_rtvec (1, XVECEXP (base, 0, 0)),
2173 UNSPEC_LTREL_OFFSET);
2177 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2180 case UNSPEC_LTREL_BASE:
2181 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2188 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2191 if (REGNO (base) == BASE_REGNUM
2192 || REGNO (base) == STACK_POINTER_REGNUM
2193 || REGNO (base) == FRAME_POINTER_REGNUM
2194 || ((reload_completed || reload_in_progress)
2195 && frame_pointer_needed
2196 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2197 || REGNO (base) == ARG_POINTER_REGNUM
2199 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2200 pointer = base_ptr = TRUE;
2203 /* Validate index register. */
2206 if (GET_CODE (indx) == UNSPEC)
2207 switch (XINT (indx, 1))
2211 disp = gen_rtx_UNSPEC (Pmode,
2212 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2213 UNSPEC_LTREL_OFFSET);
2217 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2220 case UNSPEC_LTREL_BASE:
2221 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2228 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2231 if (REGNO (indx) == BASE_REGNUM
2232 || REGNO (indx) == STACK_POINTER_REGNUM
2233 || REGNO (indx) == FRAME_POINTER_REGNUM
2234 || ((reload_completed || reload_in_progress)
2235 && frame_pointer_needed
2236 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2237 || REGNO (indx) == ARG_POINTER_REGNUM
2239 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2240 pointer = indx_ptr = TRUE;
2243 /* Prefer to use pointer as base, not index. */
2244 if (base && indx && !base_ptr
2245 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2252 /* Validate displacement. */
2255 /* If the argument pointer or the return address pointer are involved,
2256 the displacement will change later anyway as the virtual registers get
2257 eliminated. This could make a valid displacement invalid, but it is
2258 more likely to make an invalid displacement valid, because we sometimes
2259 access the register save area via negative offsets to one of those
2261 Thus we don't check the displacement for validity here. If after
2262 elimination the displacement turns out to be invalid after all,
2263 this is fixed up by reload in any case. */
2264 if (base != arg_pointer_rtx
2265 && indx != arg_pointer_rtx
2266 && base != return_address_pointer_rtx
2267 && indx != return_address_pointer_rtx)
2268 if (!DISP_IN_RANGE (offset))
2273 /* All the special cases are pointers. */
2276 /* In the small-PIC case, the linker converts @GOT
2277 and @GOTNTPOFF offsets to possible displacements. */
2278 if (GET_CODE (disp) == UNSPEC
2279 && (XINT (disp, 1) == UNSPEC_GOT
2280 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2287 /* Accept chunkified literal pool symbol references. */
2288 else if (GET_CODE (disp) == MINUS
2289 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2290 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
2295 /* Accept literal pool references. */
2296 else if (GET_CODE (disp) == UNSPEC
2297 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2299 orig_disp = gen_rtx_CONST (Pmode, disp);
2302 /* If we have an offset, make sure it does not
2303 exceed the size of the constant pool entry. */
2304 rtx sym = XVECEXP (disp, 0, 0);
2305 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2308 orig_disp = plus_constant (orig_disp, offset);
2323 out->disp = orig_disp;
2324 out->pointer = pointer;
2330 /* Return nonzero if ADDR is a valid memory address.
2331 STRICT specifies whether strict register checking applies. */
2334 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2335 register rtx addr, int strict)
2337 struct s390_address ad;
2338 if (!s390_decompose_address (addr, &ad))
2343 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2345 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2350 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2352 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2359 /* Return 1 if OP is a valid operand for the LA instruction.
2360 In 31-bit, we need to prove that the result is used as an
2361 address, as LA performs only a 31-bit addition. */
2364 legitimate_la_operand_p (register rtx op)
2366 struct s390_address addr;
2367 if (!s390_decompose_address (op, &addr))
2370 if (TARGET_64BIT || addr.pointer)
2376 /* Return 1 if OP is a valid operand for the LA instruction,
2377 and we prefer to use LA over addition to compute it. */
2380 preferred_la_operand_p (register rtx op)
2382 struct s390_address addr;
2383 if (!s390_decompose_address (op, &addr))
2386 if (!TARGET_64BIT && !addr.pointer)
2392 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2393 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2399 /* Emit a forced load-address operation to load SRC into DST.
2400 This will use the LOAD ADDRESS instruction even in situations
2401 where legitimate_la_operand_p (SRC) returns false. */
2404 s390_load_address (rtx dst, rtx src)
2407 emit_move_insn (dst, src);
2409 emit_insn (gen_force_la_31 (dst, src));
2412 /* Return a legitimate reference for ORIG (an address) using the
2413 register REG. If REG is 0, a new pseudo is generated.
2415 There are two types of references that must be handled:
2417 1. Global data references must load the address from the GOT, via
2418 the PIC reg. An insn is emitted to do this load, and the reg is
2421 2. Static data references, constant pool addresses, and code labels
2422 compute the address as an offset from the GOT, whose base is in
2423 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2424 differentiate them from global data objects. The returned
2425 address is the PIC reg + an unspec constant.
2427 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2428 reg also appears in the address. */
2431 legitimize_pic_address (rtx orig, rtx reg)
2437 if (GET_CODE (addr) == LABEL_REF
2438 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2440 /* This is a local symbol. */
2441 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2443 /* Access local symbols PC-relative via LARL.
2444 This is the same as in the non-PIC case, so it is
2445 handled automatically ... */
2449 /* Access local symbols relative to the GOT. */
2451 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2453 if (reload_in_progress || reload_completed)
2454 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2456 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2457 addr = gen_rtx_CONST (Pmode, addr);
2458 addr = force_const_mem (Pmode, addr);
2459 emit_move_insn (temp, addr);
2461 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2464 emit_move_insn (reg, new);
2469 else if (GET_CODE (addr) == SYMBOL_REF)
2472 reg = gen_reg_rtx (Pmode);
2476 /* Assume GOT offset < 4k. This is handled the same way
2477 in both 31- and 64-bit code (@GOT). */
2479 if (reload_in_progress || reload_completed)
2480 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2482 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2483 new = gen_rtx_CONST (Pmode, new);
2484 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2485 new = gen_rtx_MEM (Pmode, new);
2486 RTX_UNCHANGING_P (new) = 1;
2487 emit_move_insn (reg, new);
2490 else if (TARGET_CPU_ZARCH)
2492 /* If the GOT offset might be >= 4k, we determine the position
2493 of the GOT entry via a PC-relative LARL (@GOTENT). */
2495 rtx temp = gen_reg_rtx (Pmode);
2497 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2498 new = gen_rtx_CONST (Pmode, new);
2499 emit_move_insn (temp, new);
2501 new = gen_rtx_MEM (Pmode, temp);
2502 RTX_UNCHANGING_P (new) = 1;
2503 emit_move_insn (reg, new);
2508 /* If the GOT offset might be >= 4k, we have to load it
2509 from the literal pool (@GOT). */
2511 rtx temp = gen_reg_rtx (Pmode);
2513 if (reload_in_progress || reload_completed)
2514 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2516 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2517 addr = gen_rtx_CONST (Pmode, addr);
2518 addr = force_const_mem (Pmode, addr);
2519 emit_move_insn (temp, addr);
2521 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2522 new = gen_rtx_MEM (Pmode, new);
2523 RTX_UNCHANGING_P (new) = 1;
2524 emit_move_insn (reg, new);
2530 if (GET_CODE (addr) == CONST)
2532 addr = XEXP (addr, 0);
2533 if (GET_CODE (addr) == UNSPEC)
2535 if (XVECLEN (addr, 0) != 1)
2537 switch (XINT (addr, 1))
2539 /* If someone moved a GOT-relative UNSPEC
2540 out of the literal pool, force them back in. */
2543 new = force_const_mem (Pmode, orig);
2546 /* @GOT is OK as is if small. */
2549 new = force_const_mem (Pmode, orig);
2552 /* @GOTENT is OK as is. */
2556 /* @PLT is OK as is on 64-bit, must be converted to
2557 GOT-relative @PLTOFF on 31-bit. */
2559 if (!TARGET_CPU_ZARCH)
2561 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2563 if (reload_in_progress || reload_completed)
2564 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2566 addr = XVECEXP (addr, 0, 0);
2567 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2569 addr = gen_rtx_CONST (Pmode, addr);
2570 addr = force_const_mem (Pmode, addr);
2571 emit_move_insn (temp, addr);
2573 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2576 emit_move_insn (reg, new);
2582 /* Everything else cannot happen. */
2587 else if (GET_CODE (addr) != PLUS)
2590 if (GET_CODE (addr) == PLUS)
2592 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2593 /* Check first to see if this is a constant offset
2594 from a local symbol reference. */
2595 if ((GET_CODE (op0) == LABEL_REF
2596 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2597 && GET_CODE (op1) == CONST_INT)
2599 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2601 if (INTVAL (op1) & 1)
2603 /* LARL can't handle odd offsets, so emit a
2604 pair of LARL and LA. */
2605 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2607 if (!DISP_IN_RANGE (INTVAL (op1)))
2609 int even = INTVAL (op1) - 1;
2610 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2611 op0 = gen_rtx_CONST (Pmode, op0);
2615 emit_move_insn (temp, op0);
2616 new = gen_rtx_PLUS (Pmode, temp, op1);
2620 emit_move_insn (reg, new);
2626 /* If the offset is even, we can just use LARL.
2627 This will happen automatically. */
2632 /* Access local symbols relative to the GOT. */
2634 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2636 if (reload_in_progress || reload_completed)
2637 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2639 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2641 addr = gen_rtx_PLUS (Pmode, addr, op1);
2642 addr = gen_rtx_CONST (Pmode, addr);
2643 addr = force_const_mem (Pmode, addr);
2644 emit_move_insn (temp, addr);
2646 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2649 emit_move_insn (reg, new);
2655 /* Now, check whether it is a GOT relative symbol plus offset
2656 that was pulled out of the literal pool. Force it back in. */
2658 else if (GET_CODE (op0) == UNSPEC
2659 && GET_CODE (op1) == CONST_INT
2660 && XINT (op0, 1) == UNSPEC_GOTOFF)
2662 if (XVECLEN (op0, 0) != 1)
2665 new = force_const_mem (Pmode, orig);
2668 /* Otherwise, compute the sum. */
2671 base = legitimize_pic_address (XEXP (addr, 0), reg);
2672 new = legitimize_pic_address (XEXP (addr, 1),
2673 base == reg ? NULL_RTX : reg);
2674 if (GET_CODE (new) == CONST_INT)
2675 new = plus_constant (base, INTVAL (new));
2678 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2680 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2681 new = XEXP (new, 1);
2683 new = gen_rtx_PLUS (Pmode, base, new);
2686 if (GET_CODE (new) == CONST)
2687 new = XEXP (new, 0);
2688 new = force_operand (new, 0);
2695 /* Load the thread pointer into a register. */
2698 get_thread_pointer (void)
2702 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2703 tp = force_reg (Pmode, tp);
2704 mark_reg_pointer (tp, BITS_PER_WORD);
2709 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
2710 in s390_tls_symbol which always refers to __tls_get_offset.
2711 The returned offset is written to RESULT_REG and an USE rtx is
2712 generated for TLS_CALL. */
2714 static GTY(()) rtx s390_tls_symbol;
2717 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
2724 if (!s390_tls_symbol)
2725 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2727 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
2728 gen_rtx_REG (Pmode, RETURN_REGNUM));
2730 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
2731 CONST_OR_PURE_CALL_P (insn) = 1;
2734 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2735 this (thread-local) address. REG may be used as temporary. */
2738 legitimize_tls_address (rtx addr, rtx reg)
2740 rtx new, tls_call, temp, base, r2, insn;
2742 if (GET_CODE (addr) == SYMBOL_REF)
2743 switch (tls_symbolic_operand (addr))
2745 case TLS_MODEL_GLOBAL_DYNAMIC:
2747 r2 = gen_rtx_REG (Pmode, 2);
2748 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2749 new = gen_rtx_CONST (Pmode, tls_call);
2750 new = force_const_mem (Pmode, new);
2751 emit_move_insn (r2, new);
2752 s390_emit_tls_call_insn (r2, tls_call);
2753 insn = get_insns ();
2756 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2757 temp = gen_reg_rtx (Pmode);
2758 emit_libcall_block (insn, temp, r2, new);
2760 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2763 s390_load_address (reg, new);
2768 case TLS_MODEL_LOCAL_DYNAMIC:
2770 r2 = gen_rtx_REG (Pmode, 2);
2771 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2772 new = gen_rtx_CONST (Pmode, tls_call);
2773 new = force_const_mem (Pmode, new);
2774 emit_move_insn (r2, new);
2775 s390_emit_tls_call_insn (r2, tls_call);
2776 insn = get_insns ();
2779 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2780 temp = gen_reg_rtx (Pmode);
2781 emit_libcall_block (insn, temp, r2, new);
2783 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2784 base = gen_reg_rtx (Pmode);
2785 s390_load_address (base, new);
2787 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2788 new = gen_rtx_CONST (Pmode, new);
2789 new = force_const_mem (Pmode, new);
2790 temp = gen_reg_rtx (Pmode);
2791 emit_move_insn (temp, new);
2793 new = gen_rtx_PLUS (Pmode, base, temp);
2796 s390_load_address (reg, new);
2801 case TLS_MODEL_INITIAL_EXEC:
2804 /* Assume GOT offset < 4k. This is handled the same way
2805 in both 31- and 64-bit code. */
2807 if (reload_in_progress || reload_completed)
2808 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2810 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2811 new = gen_rtx_CONST (Pmode, new);
2812 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2813 new = gen_rtx_MEM (Pmode, new);
2814 RTX_UNCHANGING_P (new) = 1;
2815 temp = gen_reg_rtx (Pmode);
2816 emit_move_insn (temp, new);
2818 else if (TARGET_CPU_ZARCH)
2820 /* If the GOT offset might be >= 4k, we determine the position
2821 of the GOT entry via a PC-relative LARL. */
2823 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2824 new = gen_rtx_CONST (Pmode, new);
2825 temp = gen_reg_rtx (Pmode);
2826 emit_move_insn (temp, new);
2828 new = gen_rtx_MEM (Pmode, temp);
2829 RTX_UNCHANGING_P (new) = 1;
2830 temp = gen_reg_rtx (Pmode);
2831 emit_move_insn (temp, new);
2835 /* If the GOT offset might be >= 4k, we have to load it
2836 from the literal pool. */
2838 if (reload_in_progress || reload_completed)
2839 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2841 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2842 new = gen_rtx_CONST (Pmode, new);
2843 new = force_const_mem (Pmode, new);
2844 temp = gen_reg_rtx (Pmode);
2845 emit_move_insn (temp, new);
2847 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2848 new = gen_rtx_MEM (Pmode, new);
2849 RTX_UNCHANGING_P (new) = 1;
2851 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2852 temp = gen_reg_rtx (Pmode);
2853 emit_insn (gen_rtx_SET (Pmode, temp, new));
2857 /* In position-dependent code, load the absolute address of
2858 the GOT entry from the literal pool. */
2860 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2861 new = gen_rtx_CONST (Pmode, new);
2862 new = force_const_mem (Pmode, new);
2863 temp = gen_reg_rtx (Pmode);
2864 emit_move_insn (temp, new);
2867 new = gen_rtx_MEM (Pmode, new);
2868 RTX_UNCHANGING_P (new) = 1;
2870 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2871 temp = gen_reg_rtx (Pmode);
2872 emit_insn (gen_rtx_SET (Pmode, temp, new));
2875 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2878 s390_load_address (reg, new);
2883 case TLS_MODEL_LOCAL_EXEC:
2884 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2885 new = gen_rtx_CONST (Pmode, new);
2886 new = force_const_mem (Pmode, new);
2887 temp = gen_reg_rtx (Pmode);
2888 emit_move_insn (temp, new);
2890 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2893 s390_load_address (reg, new);
2902 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2904 switch (XINT (XEXP (addr, 0), 1))
2906 case UNSPEC_INDNTPOFF:
2907 if (TARGET_CPU_ZARCH)
2918 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
2919 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
2921 new = XEXP (XEXP (addr, 0), 0);
2922 if (GET_CODE (new) != SYMBOL_REF)
2923 new = gen_rtx_CONST (Pmode, new);
2925 new = legitimize_tls_address (new, reg);
2926 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
2927 new = force_operand (new, 0);
2931 abort (); /* for now ... */
2936 /* Emit insns to move operands[1] into operands[0]. */
2939 emit_symbolic_move (rtx *operands)
2941 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2943 if (GET_CODE (operands[0]) == MEM)
2944 operands[1] = force_reg (Pmode, operands[1]);
2945 else if (TLS_SYMBOLIC_CONST (operands[1]))
2946 operands[1] = legitimize_tls_address (operands[1], temp);
2948 operands[1] = legitimize_pic_address (operands[1], temp);
2951 /* Try machine-dependent ways of modifying an illegitimate address X
2952 to be legitimate. If we find one, return the new, valid address.
2954 OLDX is the address as it was before break_out_memory_refs was called.
2955 In some cases it is useful to look at this to decide what needs to be done.
2957 MODE is the mode of the operand pointed to by X.
2959 When -fpic is used, special handling is needed for symbolic references.
2960 See comments by legitimize_pic_address for details. */
2963 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
2964 enum machine_mode mode ATTRIBUTE_UNUSED)
2966 rtx constant_term = const0_rtx;
2968 if (TLS_SYMBOLIC_CONST (x))
2970 x = legitimize_tls_address (x, 0);
2972 if (legitimate_address_p (mode, x, FALSE))
2977 if (SYMBOLIC_CONST (x)
2978 || (GET_CODE (x) == PLUS
2979 && (SYMBOLIC_CONST (XEXP (x, 0))
2980 || SYMBOLIC_CONST (XEXP (x, 1)))))
2981 x = legitimize_pic_address (x, 0);
2983 if (legitimate_address_p (mode, x, FALSE))
2987 x = eliminate_constant_term (x, &constant_term);
2989 /* Optimize loading of large displacements by splitting them
2990 into the multiple of 4K and the rest; this allows the
2991 former to be CSE'd if possible.
2993 Don't do this if the displacement is added to a register
2994 pointing into the stack frame, as the offsets will
2995 change later anyway. */
2997 if (GET_CODE (constant_term) == CONST_INT
2998 && !TARGET_LONG_DISPLACEMENT
2999 && !DISP_IN_RANGE (INTVAL (constant_term))
3000 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3002 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3003 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3005 rtx temp = gen_reg_rtx (Pmode);
3006 rtx val = force_operand (GEN_INT (upper), temp);
3008 emit_move_insn (temp, val);
3010 x = gen_rtx_PLUS (Pmode, x, temp);
3011 constant_term = GEN_INT (lower);
3014 if (GET_CODE (x) == PLUS)
3016 if (GET_CODE (XEXP (x, 0)) == REG)
3018 register rtx temp = gen_reg_rtx (Pmode);
3019 register rtx val = force_operand (XEXP (x, 1), temp);
3021 emit_move_insn (temp, val);
3023 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3026 else if (GET_CODE (XEXP (x, 1)) == REG)
3028 register rtx temp = gen_reg_rtx (Pmode);
3029 register rtx val = force_operand (XEXP (x, 0), temp);
3031 emit_move_insn (temp, val);
3033 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3037 if (constant_term != const0_rtx)
3038 x = gen_rtx_PLUS (Pmode, x, constant_term);
3043 /* Emit code to move LEN bytes from DST to SRC. */
3046 s390_expand_movmem (rtx dst, rtx src, rtx len)
3048 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3050 if (INTVAL (len) > 0)
3051 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3054 else if (TARGET_MVCLE)
3056 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3061 rtx dst_addr, src_addr, count, blocks, temp;
3062 rtx loop_start_label = gen_label_rtx ();
3063 rtx loop_end_label = gen_label_rtx ();
3064 rtx end_label = gen_label_rtx ();
3065 enum machine_mode mode;
3067 mode = GET_MODE (len);
3068 if (mode == VOIDmode)
3071 dst_addr = gen_reg_rtx (Pmode);
3072 src_addr = gen_reg_rtx (Pmode);
3073 count = gen_reg_rtx (mode);
3074 blocks = gen_reg_rtx (mode);
3076 convert_move (count, len, 1);
3077 emit_cmp_and_jump_insns (count, const0_rtx,
3078 EQ, NULL_RTX, mode, 1, end_label);
3080 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3081 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3082 dst = change_address (dst, VOIDmode, dst_addr);
3083 src = change_address (src, VOIDmode, src_addr);
3085 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3087 emit_move_insn (count, temp);
3089 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3091 emit_move_insn (blocks, temp);
3093 emit_cmp_and_jump_insns (blocks, const0_rtx,
3094 EQ, NULL_RTX, mode, 1, loop_end_label);
3096 emit_label (loop_start_label);
3098 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3099 s390_load_address (dst_addr,
3100 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3101 s390_load_address (src_addr,
3102 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3104 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3106 emit_move_insn (blocks, temp);
3108 emit_cmp_and_jump_insns (blocks, const0_rtx,
3109 EQ, NULL_RTX, mode, 1, loop_end_label);
3111 emit_jump (loop_start_label);
3112 emit_label (loop_end_label);
3114 emit_insn (gen_movmem_short (dst, src,
3115 convert_to_mode (Pmode, count, 1)));
3116 emit_label (end_label);
3120 /* Emit code to clear LEN bytes at DST. */
3123 s390_expand_clrmem (rtx dst, rtx len)
3125 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3127 if (INTVAL (len) > 0)
3128 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3131 else if (TARGET_MVCLE)
3133 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
3138 rtx dst_addr, src_addr, count, blocks, temp;
3139 rtx loop_start_label = gen_label_rtx ();
3140 rtx loop_end_label = gen_label_rtx ();
3141 rtx end_label = gen_label_rtx ();
3142 enum machine_mode mode;
3144 mode = GET_MODE (len);
3145 if (mode == VOIDmode)
3148 dst_addr = gen_reg_rtx (Pmode);
3149 src_addr = gen_reg_rtx (Pmode);
3150 count = gen_reg_rtx (mode);
3151 blocks = gen_reg_rtx (mode);
3153 convert_move (count, len, 1);
3154 emit_cmp_and_jump_insns (count, const0_rtx,
3155 EQ, NULL_RTX, mode, 1, end_label);
3157 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3158 dst = change_address (dst, VOIDmode, dst_addr);
3160 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3162 emit_move_insn (count, temp);
3164 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3166 emit_move_insn (blocks, temp);
3168 emit_cmp_and_jump_insns (blocks, const0_rtx,
3169 EQ, NULL_RTX, mode, 1, loop_end_label);
3171 emit_label (loop_start_label);
3173 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3174 s390_load_address (dst_addr,
3175 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3177 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3179 emit_move_insn (blocks, temp);
3181 emit_cmp_and_jump_insns (blocks, const0_rtx,
3182 EQ, NULL_RTX, mode, 1, loop_end_label);
3184 emit_jump (loop_start_label);
3185 emit_label (loop_end_label);
3187 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3188 emit_label (end_label);
3192 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3193 and return the result in TARGET. */
3196 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3198 rtx (*gen_result) (rtx) =
3199 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
3201 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3203 if (INTVAL (len) > 0)
3205 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3206 emit_insn (gen_result (target));
3209 emit_move_insn (target, const0_rtx);
3212 else /* if (TARGET_MVCLE) */
3214 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3215 emit_insn (gen_result (target));
3219 /* Deactivate for now as profile code cannot cope with
3220 CC being live across basic block boundaries. */
3223 rtx addr0, addr1, count, blocks, temp;
3224 rtx loop_start_label = gen_label_rtx ();
3225 rtx loop_end_label = gen_label_rtx ();
3226 rtx end_label = gen_label_rtx ();
3227 enum machine_mode mode;
3229 mode = GET_MODE (len);
3230 if (mode == VOIDmode)
3233 addr0 = gen_reg_rtx (Pmode);
3234 addr1 = gen_reg_rtx (Pmode);
3235 count = gen_reg_rtx (mode);
3236 blocks = gen_reg_rtx (mode);
3238 convert_move (count, len, 1);
3239 emit_cmp_and_jump_insns (count, const0_rtx,
3240 EQ, NULL_RTX, mode, 1, end_label);
3242 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3243 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3244 op0 = change_address (op0, VOIDmode, addr0);
3245 op1 = change_address (op1, VOIDmode, addr1);
3247 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3249 emit_move_insn (count, temp);
3251 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3253 emit_move_insn (blocks, temp);
3255 emit_cmp_and_jump_insns (blocks, const0_rtx,
3256 EQ, NULL_RTX, mode, 1, loop_end_label);
3258 emit_label (loop_start_label);
3260 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3261 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3262 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3263 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3264 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3265 emit_jump_insn (temp);
3267 s390_load_address (addr0,
3268 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3269 s390_load_address (addr1,
3270 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3272 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3274 emit_move_insn (blocks, temp);
3276 emit_cmp_and_jump_insns (blocks, const0_rtx,
3277 EQ, NULL_RTX, mode, 1, loop_end_label);
3279 emit_jump (loop_start_label);
3280 emit_label (loop_end_label);
3282 emit_insn (gen_cmpmem_short (op0, op1,
3283 convert_to_mode (Pmode, count, 1)));
3284 emit_label (end_label);
3286 emit_insn (gen_result (target));
3292 /* Expand conditional increment or decrement using alc/slb instructions.
3293 Should generate code setting DST to either SRC or SRC + INCREMENT,
3294 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3295 Returns true if successful, false otherwise. */
3298 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3299 rtx dst, rtx src, rtx increment)
3301 enum machine_mode cmp_mode;
3302 enum machine_mode cc_mode;
3307 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3308 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3310 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3311 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3316 /* Try ADD LOGICAL WITH CARRY. */
3317 if (increment == const1_rtx)
3319 /* Determine CC mode to use. */
3320 if (cmp_code == EQ || cmp_code == NE)
3322 if (cmp_op1 != const0_rtx)
3324 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3325 NULL_RTX, 0, OPTAB_WIDEN);
3326 cmp_op1 = const0_rtx;
3329 cmp_code = cmp_code == EQ ? LEU : GTU;
3332 if (cmp_code == LTU || cmp_code == LEU)
3337 cmp_code = swap_condition (cmp_code);
3354 /* Emit comparison instruction pattern. */
3355 if (!register_operand (cmp_op0, cmp_mode))
3356 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3358 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3359 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3360 /* We use insn_invalid_p here to add clobbers if required. */
3361 if (insn_invalid_p (emit_insn (insn)))
3364 /* Emit ALC instruction pattern. */
3365 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3366 gen_rtx_REG (cc_mode, CC_REGNUM),
3369 if (src != const0_rtx)
3371 if (!register_operand (src, GET_MODE (dst)))
3372 src = force_reg (GET_MODE (dst), src);
3374 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3375 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3378 p = rtvec_alloc (2);
3380 gen_rtx_SET (VOIDmode, dst, op_res);
3382 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3383 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3388 /* Try SUBTRACT LOGICAL WITH BORROW. */
3389 if (increment == constm1_rtx)
3391 /* Determine CC mode to use. */
3392 if (cmp_code == EQ || cmp_code == NE)
3394 if (cmp_op1 != const0_rtx)
3396 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3397 NULL_RTX, 0, OPTAB_WIDEN);
3398 cmp_op1 = const0_rtx;
3401 cmp_code = cmp_code == EQ ? LEU : GTU;
3404 if (cmp_code == GTU || cmp_code == GEU)
3409 cmp_code = swap_condition (cmp_code);
3426 /* Emit comparison instruction pattern. */
3427 if (!register_operand (cmp_op0, cmp_mode))
3428 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3430 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3431 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3432 /* We use insn_invalid_p here to add clobbers if required. */
3433 if (insn_invalid_p (emit_insn (insn)))
3436 /* Emit SLB instruction pattern. */
3437 if (!register_operand (src, GET_MODE (dst)))
3438 src = force_reg (GET_MODE (dst), src);
3440 op_res = gen_rtx_MINUS (GET_MODE (dst),
3441 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3442 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3443 gen_rtx_REG (cc_mode, CC_REGNUM),
3445 p = rtvec_alloc (2);
3447 gen_rtx_SET (VOIDmode, dst, op_res);
3449 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3450 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3459 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3460 We need to emit DTP-relative relocations. */
3463 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3468 fputs ("\t.long\t", file);
3471 fputs ("\t.quad\t", file);
3476 output_addr_const (file, x);
3477 fputs ("@DTPOFF", file);
3480 /* In the name of slightly smaller debug output, and to cater to
3481 general assembler losage, recognize various UNSPEC sequences
3482 and turn them back into a direct symbol reference. */
3485 s390_delegitimize_address (rtx orig_x)
3489 if (GET_CODE (x) != MEM)
3493 if (GET_CODE (x) == PLUS
3494 && GET_CODE (XEXP (x, 1)) == CONST
3495 && GET_CODE (XEXP (x, 0)) == REG
3496 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3498 y = XEXP (XEXP (x, 1), 0);
3499 if (GET_CODE (y) == UNSPEC
3500 && XINT (y, 1) == UNSPEC_GOT)
3501 return XVECEXP (y, 0, 0);
3505 if (GET_CODE (x) == CONST)
3508 if (GET_CODE (y) == UNSPEC
3509 && XINT (y, 1) == UNSPEC_GOTENT)
3510 return XVECEXP (y, 0, 0);
3517 /* Output shift count operand OP to stdio stream FILE. */
3520 print_shift_count_operand (FILE *file, rtx op)
3522 HOST_WIDE_INT offset = 0;
3524 /* We can have an integer constant, an address register,
3525 or a sum of the two. */
3526 if (GET_CODE (op) == CONST_INT)
3528 offset = INTVAL (op);
3531 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3533 offset = INTVAL (XEXP (op, 1));
3536 while (op && GET_CODE (op) == SUBREG)
3537 op = SUBREG_REG (op);
3540 if (op && (GET_CODE (op) != REG
3541 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3542 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3545 /* Shift counts are truncated to the low six bits anyway. */
3546 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3548 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3551 /* Locate some local-dynamic symbol still in use by this function
3552 so that we can print its name in local-dynamic base patterns. */
3555 get_some_local_dynamic_name (void)
3559 if (cfun->machine->some_ld_name)
3560 return cfun->machine->some_ld_name;
3562 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3564 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3565 return cfun->machine->some_ld_name;
3571 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3575 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3577 x = get_pool_constant (x);
3578 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3581 if (GET_CODE (x) == SYMBOL_REF
3582 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3584 cfun->machine->some_ld_name = XSTR (x, 0);
3591 /* Output machine-dependent UNSPECs occurring in address constant X
3592 in assembler syntax to stdio stream FILE. Returns true if the
3593 constant X could be recognized, false otherwise. */
3596 s390_output_addr_const_extra (FILE *file, rtx x)
3598 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3599 switch (XINT (x, 1))
3602 output_addr_const (file, XVECEXP (x, 0, 0));
3603 fprintf (file, "@GOTENT");
3606 output_addr_const (file, XVECEXP (x, 0, 0));
3607 fprintf (file, "@GOT");
3610 output_addr_const (file, XVECEXP (x, 0, 0));
3611 fprintf (file, "@GOTOFF");
3614 output_addr_const (file, XVECEXP (x, 0, 0));
3615 fprintf (file, "@PLT");
3618 output_addr_const (file, XVECEXP (x, 0, 0));
3619 fprintf (file, "@PLTOFF");
3622 output_addr_const (file, XVECEXP (x, 0, 0));
3623 fprintf (file, "@TLSGD");
3626 assemble_name (file, get_some_local_dynamic_name ());
3627 fprintf (file, "@TLSLDM");
3630 output_addr_const (file, XVECEXP (x, 0, 0));
3631 fprintf (file, "@DTPOFF");
3634 output_addr_const (file, XVECEXP (x, 0, 0));
3635 fprintf (file, "@NTPOFF");
3637 case UNSPEC_GOTNTPOFF:
3638 output_addr_const (file, XVECEXP (x, 0, 0));
3639 fprintf (file, "@GOTNTPOFF");
3641 case UNSPEC_INDNTPOFF:
3642 output_addr_const (file, XVECEXP (x, 0, 0));
3643 fprintf (file, "@INDNTPOFF");
3650 /* Output address operand ADDR in assembler syntax to
3651 stdio stream FILE. */
3654 print_operand_address (FILE *file, rtx addr)
3656 struct s390_address ad;
3658 if (!s390_decompose_address (addr, &ad)
3659 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3660 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3661 output_operand_lossage ("Cannot decompose address.");
3664 output_addr_const (file, ad.disp);
3666 fprintf (file, "0");
3668 if (ad.base && ad.indx)
3669 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3670 reg_names[REGNO (ad.base)]);
3672 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3675 /* Output operand X in assembler syntax to stdio stream FILE.
3676 CODE specified the format flag. The following format flags
3679 'C': print opcode suffix for branch condition.
3680 'D': print opcode suffix for inverse branch condition.
3681 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3682 'O': print only the displacement of a memory reference.
3683 'R': print only the base register of a memory reference.
3684 'N': print the second word of a DImode operand.
3685 'M': print the second word of a TImode operand.
3686 'Y': print shift count operand.
3688 'b': print integer X as if it's an unsigned byte.
3689 'x': print integer X as if it's an unsigned word.
3690 'h': print integer X as if it's a signed word.
3691 'i': print the first nonzero HImode part of X.
3692 'j': print the first HImode part unequal to 0xffff of X. */
3695 print_operand (FILE *file, rtx x, int code)
3700 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3704 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3708 if (GET_CODE (x) == SYMBOL_REF)
3710 fprintf (file, "%s", ":tls_load:");
3711 output_addr_const (file, x);
3713 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3715 fprintf (file, "%s", ":tls_gdcall:");
3716 output_addr_const (file, XVECEXP (x, 0, 0));
3718 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3720 fprintf (file, "%s", ":tls_ldcall:");
3721 assemble_name (file, get_some_local_dynamic_name ());
3729 struct s390_address ad;
3731 if (GET_CODE (x) != MEM
3732 || !s390_decompose_address (XEXP (x, 0), &ad)
3733 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3738 output_addr_const (file, ad.disp);
3740 fprintf (file, "0");
3746 struct s390_address ad;
3748 if (GET_CODE (x) != MEM
3749 || !s390_decompose_address (XEXP (x, 0), &ad)
3750 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3755 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3757 fprintf (file, "0");
3762 if (GET_CODE (x) == REG)
3763 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3764 else if (GET_CODE (x) == MEM)
3765 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3771 if (GET_CODE (x) == REG)
3772 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3773 else if (GET_CODE (x) == MEM)
3774 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3780 print_shift_count_operand (file, x);
3784 switch (GET_CODE (x))
3787 fprintf (file, "%s", reg_names[REGNO (x)]);
3791 output_address (XEXP (x, 0));
3798 output_addr_const (file, x);
3803 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3804 else if (code == 'x')
3805 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3806 else if (code == 'h')
3807 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3808 else if (code == 'i')
3809 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
3810 s390_extract_part (x, HImode, 0));
3811 else if (code == 'j')
3812 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
3813 s390_extract_part (x, HImode, -1));
3815 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3819 if (GET_MODE (x) != VOIDmode)
3822 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3823 else if (code == 'x')
3824 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3825 else if (code == 'h')
3826 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3832 fatal_insn ("UNKNOWN in print_operand !?", x);
3837 /* Target hook for assembling integer objects. We need to define it
3838 here to work a round a bug in some versions of GAS, which couldn't
3839 handle values smaller than INT_MIN when printed in decimal. */
3842 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
3844 if (size == 8 && aligned_p
3845 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3847 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
3851 return default_assemble_integer (x, size, aligned_p);
3854 /* Returns true if register REGNO is used for forming
3855 a memory address in expression X. */
3858 reg_used_in_mem_p (int regno, rtx x)
3860 enum rtx_code code = GET_CODE (x);
3866 if (refers_to_regno_p (regno, regno+1,
3870 else if (code == SET
3871 && GET_CODE (SET_DEST (x)) == PC)
3873 if (refers_to_regno_p (regno, regno+1,
3878 fmt = GET_RTX_FORMAT (code);
3879 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3882 && reg_used_in_mem_p (regno, XEXP (x, i)))
3885 else if (fmt[i] == 'E')
3886 for (j = 0; j < XVECLEN (x, i); j++)
3887 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3893 /* Returns true if expression DEP_RTX sets an address register
3894 used by instruction INSN to address memory. */
3897 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
3901 if (GET_CODE (dep_rtx) == INSN)
3902 dep_rtx = PATTERN (dep_rtx);
3904 if (GET_CODE (dep_rtx) == SET)
3906 target = SET_DEST (dep_rtx);
3907 if (GET_CODE (target) == STRICT_LOW_PART)
3908 target = XEXP (target, 0);
3909 while (GET_CODE (target) == SUBREG)
3910 target = SUBREG_REG (target);
3912 if (GET_CODE (target) == REG)
3914 int regno = REGNO (target);
3916 if (s390_safe_attr_type (insn) == TYPE_LA)
3918 pat = PATTERN (insn);
3919 if (GET_CODE (pat) == PARALLEL)
3921 if (XVECLEN (pat, 0) != 2)
3923 pat = XVECEXP (pat, 0, 0);
3925 if (GET_CODE (pat) == SET)
3926 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3930 else if (get_attr_atype (insn) == ATYPE_AGEN)
3931 return reg_used_in_mem_p (regno, PATTERN (insn));
3937 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3940 s390_agen_dep_p (rtx dep_insn, rtx insn)
3942 rtx dep_rtx = PATTERN (dep_insn);
3945 if (GET_CODE (dep_rtx) == SET
3946 && addr_generation_dependency_p (dep_rtx, insn))
3948 else if (GET_CODE (dep_rtx) == PARALLEL)
3950 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3952 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3959 /* Return the modified cost of the dependency of instruction INSN
3960 on instruction DEP_INSN through the link LINK. COST is the
3961 default cost of that dependency.
3963 Data dependencies are all handled without delay. However, if a
3964 register is modified and subsequently used as base or index
3965 register of a memory reference, at least 4 cycles need to pass
3966 between setting and using the register to avoid pipeline stalls.
3967 An exception is the LA instruction. An address generated by LA can
3968 be used by introducing only a one cycle stall on the pipeline. */
3971 s390_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
3973 /* If the dependence is an anti-dependence, there is no cost. For an
3974 output dependence, there is sometimes a cost, but it doesn't seem
3975 worth handling those few cases. */
3977 if (REG_NOTE_KIND (link) != 0)
3980 /* If we can't recognize the insns, we can't really do anything. */
3981 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3984 /* Operand forward in case of lr, load and la. */
3985 if (s390_tune == PROCESSOR_2084_Z990
3987 && (s390_safe_attr_type (dep_insn) == TYPE_LA
3988 || s390_safe_attr_type (dep_insn) == TYPE_LR
3989 || s390_safe_attr_type (dep_insn) == TYPE_LOAD))
3994 /* A C statement (sans semicolon) to update the integer scheduling priority
3995 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
3996 reduce the priority to execute INSN later. Do not define this macro if
3997 you do not need to adjust the scheduling priorities of insns.
3999 A STD instruction should be scheduled earlier,
4000 in order to use the bypass. */
4003 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4005 if (! INSN_P (insn))
4008 if (s390_tune != PROCESSOR_2084_Z990)
4011 switch (s390_safe_attr_type (insn))
4015 priority = priority << 3;
4018 priority = priority << 1;
4026 /* The number of instructions that can be issued per cycle. */
4029 s390_issue_rate (void)
4031 if (s390_tune == PROCESSOR_2084_Z990)
4037 s390_first_cycle_multipass_dfa_lookahead (void)
4043 /* Split all branches that exceed the maximum distance.
4044 Returns true if this created a new literal pool entry. */
4047 s390_split_branches (void)
4049 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4050 int new_literal = 0;
4051 rtx insn, pat, tmp, target;
4054 /* We need correct insn addresses. */
4056 shorten_branches (get_insns ());
4058 /* Find all branches that exceed 64KB, and split them. */
4060 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4062 if (GET_CODE (insn) != JUMP_INSN)
4065 pat = PATTERN (insn);
4066 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4067 pat = XVECEXP (pat, 0, 0);
4068 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4071 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4073 label = &SET_SRC (pat);
4075 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4077 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4078 label = &XEXP (SET_SRC (pat), 1);
4079 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4080 label = &XEXP (SET_SRC (pat), 2);
4087 if (get_attr_length (insn) <= 4)
4090 /* We are going to use the return register as scratch register,
4091 make sure it will be saved/restored by the prologue/epilogue. */
4092 cfun->machine->save_return_addr_p = 1;
4097 tmp = force_const_mem (Pmode, *label);
4098 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4099 INSN_ADDRESSES_NEW (tmp, -1);
4100 annotate_constant_pool_refs (&PATTERN (tmp));
4107 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4108 UNSPEC_LTREL_OFFSET);
4109 target = gen_rtx_CONST (Pmode, target);
4110 target = force_const_mem (Pmode, target);
4111 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4112 INSN_ADDRESSES_NEW (tmp, -1);
4113 annotate_constant_pool_refs (&PATTERN (tmp));
4115 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4116 cfun->machine->base_reg),
4118 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4121 if (!validate_change (insn, label, target, 0))
4128 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4129 Fix up MEMs as required. */
4132 annotate_constant_pool_refs (rtx *x)
4137 if (GET_CODE (*x) == SYMBOL_REF
4138 && CONSTANT_POOL_ADDRESS_P (*x))
4141 /* Literal pool references can only occur inside a MEM ... */
4142 if (GET_CODE (*x) == MEM)
4144 rtx memref = XEXP (*x, 0);
4146 if (GET_CODE (memref) == SYMBOL_REF
4147 && CONSTANT_POOL_ADDRESS_P (memref))
4149 rtx base = cfun->machine->base_reg;
4150 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4153 *x = replace_equiv_address (*x, addr);
4157 if (GET_CODE (memref) == CONST
4158 && GET_CODE (XEXP (memref, 0)) == PLUS
4159 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4160 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4161 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4163 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4164 rtx sym = XEXP (XEXP (memref, 0), 0);
4165 rtx base = cfun->machine->base_reg;
4166 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4169 *x = replace_equiv_address (*x, plus_constant (addr, off));
4174 /* ... or a load-address type pattern. */
4175 if (GET_CODE (*x) == SET)
4177 rtx addrref = SET_SRC (*x);
4179 if (GET_CODE (addrref) == SYMBOL_REF
4180 && CONSTANT_POOL_ADDRESS_P (addrref))
4182 rtx base = cfun->machine->base_reg;
4183 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4186 SET_SRC (*x) = addr;
4190 if (GET_CODE (addrref) == CONST
4191 && GET_CODE (XEXP (addrref, 0)) == PLUS
4192 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4193 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4194 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4196 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4197 rtx sym = XEXP (XEXP (addrref, 0), 0);
4198 rtx base = cfun->machine->base_reg;
4199 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4202 SET_SRC (*x) = plus_constant (addr, off);
4207 /* Annotate LTREL_BASE as well. */
4208 if (GET_CODE (*x) == UNSPEC
4209 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4211 rtx base = cfun->machine->base_reg;
4212 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4217 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4218 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4222 annotate_constant_pool_refs (&XEXP (*x, i));
4224 else if (fmt[i] == 'E')
4226 for (j = 0; j < XVECLEN (*x, i); j++)
4227 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4233 /* Find an annotated literal pool symbol referenced in RTX X,
4234 and store it at REF. Will abort if X contains references to
4235 more than one such pool symbol; multiple references to the same
4236 symbol are allowed, however.
4238 The rtx pointed to by REF must be initialized to NULL_RTX
4239 by the caller before calling this routine. */
4242 find_constant_pool_ref (rtx x, rtx *ref)
4247 /* Ignore LTREL_BASE references. */
4248 if (GET_CODE (x) == UNSPEC
4249 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4251 /* Likewise POOL_ENTRY insns. */
4252 if (GET_CODE (x) == UNSPEC_VOLATILE
4253 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4256 if (GET_CODE (x) == SYMBOL_REF
4257 && CONSTANT_POOL_ADDRESS_P (x))
4260 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4262 rtx sym = XVECEXP (x, 0, 0);
4263 if (GET_CODE (sym) != SYMBOL_REF
4264 || !CONSTANT_POOL_ADDRESS_P (sym))
4267 if (*ref == NULL_RTX)
4269 else if (*ref != sym)
4275 fmt = GET_RTX_FORMAT (GET_CODE (x));
4276 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4280 find_constant_pool_ref (XEXP (x, i), ref);
4282 else if (fmt[i] == 'E')
4284 for (j = 0; j < XVECLEN (x, i); j++)
4285 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4290 /* Replace every reference to the annotated literal pool
4291 symbol REF in X by its base plus OFFSET. */
4294 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4302 if (GET_CODE (*x) == UNSPEC
4303 && XINT (*x, 1) == UNSPEC_LTREF
4304 && XVECEXP (*x, 0, 0) == ref)
4306 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4310 if (GET_CODE (*x) == PLUS
4311 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4312 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4313 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4314 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4316 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4317 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4321 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4322 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4326 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4328 else if (fmt[i] == 'E')
4330 for (j = 0; j < XVECLEN (*x, i); j++)
4331 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4336 /* Check whether X contains an UNSPEC_LTREL_BASE.
4337 Return its constant pool symbol if found, NULL_RTX otherwise. */
4340 find_ltrel_base (rtx x)
4345 if (GET_CODE (x) == UNSPEC
4346 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4347 return XVECEXP (x, 0, 0);
4349 fmt = GET_RTX_FORMAT (GET_CODE (x));
4350 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4354 rtx fnd = find_ltrel_base (XEXP (x, i));
4358 else if (fmt[i] == 'E')
4360 for (j = 0; j < XVECLEN (x, i); j++)
4362 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4372 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4375 replace_ltrel_base (rtx *x)
4380 if (GET_CODE (*x) == UNSPEC
4381 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4383 *x = XVECEXP (*x, 0, 1);
4387 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4388 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4392 replace_ltrel_base (&XEXP (*x, i));
4394 else if (fmt[i] == 'E')
4396 for (j = 0; j < XVECLEN (*x, i); j++)
4397 replace_ltrel_base (&XVECEXP (*x, i, j));
4403 /* We keep a list of constants which we have to add to internal
4404 constant tables in the middle of large functions. */
4406 #define NR_C_MODES 7
4407 enum machine_mode constant_modes[NR_C_MODES] =
4418 struct constant *next;
4423 struct constant_pool
4425 struct constant_pool *next;
4430 struct constant *constants[NR_C_MODES];
4435 static struct constant_pool * s390_mainpool_start (void);
4436 static void s390_mainpool_finish (struct constant_pool *);
4437 static void s390_mainpool_cancel (struct constant_pool *);
4439 static struct constant_pool * s390_chunkify_start (void);
4440 static void s390_chunkify_finish (struct constant_pool *);
4441 static void s390_chunkify_cancel (struct constant_pool *);
4443 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4444 static void s390_end_pool (struct constant_pool *, rtx);
4445 static void s390_add_pool_insn (struct constant_pool *, rtx);
4446 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4447 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4448 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4449 static rtx s390_dump_pool (struct constant_pool *, bool);
4450 static struct constant_pool *s390_alloc_pool (void);
4451 static void s390_free_pool (struct constant_pool *);
4453 /* Create new constant pool covering instructions starting at INSN
4454 and chain it to the end of POOL_LIST. */
4456 static struct constant_pool *
4457 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4459 struct constant_pool *pool, **prev;
4461 pool = s390_alloc_pool ();
4462 pool->first_insn = insn;
4464 for (prev = pool_list; *prev; prev = &(*prev)->next)
4471 /* End range of instructions covered by POOL at INSN and emit
4472 placeholder insn representing the pool. */
4475 s390_end_pool (struct constant_pool *pool, rtx insn)
4477 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4480 insn = get_last_insn ();
4482 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4483 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4486 /* Add INSN to the list of insns covered by POOL. */
4489 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4491 bitmap_set_bit (pool->insns, INSN_UID (insn));
4494 /* Return pool out of POOL_LIST that covers INSN. */
4496 static struct constant_pool *
4497 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4499 struct constant_pool *pool;
4501 for (pool = pool_list; pool; pool = pool->next)
4502 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4508 /* Add constant VAL of mode MODE to the constant pool POOL. */
4511 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4516 for (i = 0; i < NR_C_MODES; i++)
4517 if (constant_modes[i] == mode)
4519 if (i == NR_C_MODES)
4522 for (c = pool->constants[i]; c != NULL; c = c->next)
4523 if (rtx_equal_p (val, c->value))
4528 c = (struct constant *) xmalloc (sizeof *c);
4530 c->label = gen_label_rtx ();
4531 c->next = pool->constants[i];
4532 pool->constants[i] = c;
4533 pool->size += GET_MODE_SIZE (mode);
4537 /* Find constant VAL of mode MODE in the constant pool POOL.
4538 Return an RTX describing the distance from the start of
4539 the pool to the location of the new constant. */
4542 s390_find_constant (struct constant_pool *pool, rtx val,
4543 enum machine_mode mode)
4549 for (i = 0; i < NR_C_MODES; i++)
4550 if (constant_modes[i] == mode)
4552 if (i == NR_C_MODES)
4555 for (c = pool->constants[i]; c != NULL; c = c->next)
4556 if (rtx_equal_p (val, c->value))
4562 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4563 gen_rtx_LABEL_REF (Pmode, pool->label));
4564 offset = gen_rtx_CONST (Pmode, offset);
4568 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
4569 do not emit the pool base label. */
4572 s390_dump_pool (struct constant_pool *pool, bool remote_label)
4578 /* Pool start insn switches to proper section
4579 and guarantees necessary alignment. */
4580 if (TARGET_CPU_ZARCH)
4581 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4583 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4584 INSN_ADDRESSES_NEW (insn, -1);
4588 insn = emit_label_after (pool->label, insn);
4589 INSN_ADDRESSES_NEW (insn, -1);
4592 /* Dump constants in descending alignment requirement order,
4593 ensuring proper alignment for every constant. */
4594 for (i = 0; i < NR_C_MODES; i++)
4595 for (c = pool->constants[i]; c; c = c->next)
4597 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
4598 rtx value = c->value;
4599 if (GET_CODE (value) == CONST
4600 && GET_CODE (XEXP (value, 0)) == UNSPEC
4601 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
4602 && XVECLEN (XEXP (value, 0), 0) == 1)
4604 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4605 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4606 value = gen_rtx_CONST (VOIDmode, value);
4609 insn = emit_label_after (c->label, insn);
4610 INSN_ADDRESSES_NEW (insn, -1);
4612 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
4613 gen_rtvec (1, value),
4614 UNSPECV_POOL_ENTRY);
4615 insn = emit_insn_after (value, insn);
4616 INSN_ADDRESSES_NEW (insn, -1);
4619 /* Pool end insn switches back to previous section
4620 and guarantees necessary alignment. */
4621 if (TARGET_CPU_ZARCH)
4622 insn = emit_insn_after (gen_pool_end_64 (), insn);
4624 insn = emit_insn_after (gen_pool_end_31 (), insn);
4625 INSN_ADDRESSES_NEW (insn, -1);
4627 insn = emit_barrier_after (insn);
4628 INSN_ADDRESSES_NEW (insn, -1);
4630 /* Remove placeholder insn. */
4631 remove_insn (pool->pool_insn);
4636 /* Allocate new constant_pool structure. */
4638 static struct constant_pool *
4639 s390_alloc_pool (void)
4641 struct constant_pool *pool;
4644 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4646 for (i = 0; i < NR_C_MODES; i++)
4647 pool->constants[i] = NULL;
4649 pool->label = gen_label_rtx ();
4650 pool->first_insn = NULL_RTX;
4651 pool->pool_insn = NULL_RTX;
4652 pool->insns = BITMAP_XMALLOC ();
4658 /* Free all memory used by POOL. */
4661 s390_free_pool (struct constant_pool *pool)
4665 for (i = 0; i < NR_C_MODES; i++)
4667 struct constant *c = pool->constants[i];
4670 struct constant *next = c->next;
4676 BITMAP_XFREE (pool->insns);
4681 /* Collect main literal pool. Return NULL on overflow. */
4683 static struct constant_pool *
4684 s390_mainpool_start (void)
4686 struct constant_pool *pool;
4689 pool = s390_alloc_pool ();
4691 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4693 if (GET_CODE (insn) == INSN
4694 && GET_CODE (PATTERN (insn)) == SET
4695 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
4696 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
4698 if (pool->pool_insn)
4700 pool->pool_insn = insn;
4703 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4705 rtx pool_ref = NULL_RTX;
4706 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4709 rtx constant = get_pool_constant (pool_ref);
4710 enum machine_mode mode = get_pool_mode (pool_ref);
4711 s390_add_constant (pool, constant, mode);
4716 if (!pool->pool_insn)
4719 if (pool->size >= 4096)
4721 /* We're going to chunkify the pool, so remove the main
4722 pool placeholder insn. */
4723 remove_insn (pool->pool_insn);
4725 s390_free_pool (pool);
4732 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4733 Modify the current function to output the pool constants as well as
4734 the pool register setup instruction. */
4737 s390_mainpool_finish (struct constant_pool *pool)
4739 rtx base_reg = SET_DEST (PATTERN (pool->pool_insn));
4742 /* If the pool is empty, we're done. */
4743 if (pool->size == 0)
4745 remove_insn (pool->pool_insn);
4746 s390_free_pool (pool);
4750 /* We need correct insn addresses. */
4751 shorten_branches (get_insns ());
4753 /* On zSeries, we use a LARL to load the pool register. The pool is
4754 located in the .rodata section, so we emit it after the function. */
4755 if (TARGET_CPU_ZARCH)
4757 insn = gen_main_base_64 (base_reg, pool->label);
4758 insn = emit_insn_after (insn, pool->pool_insn);
4759 INSN_ADDRESSES_NEW (insn, -1);
4760 remove_insn (pool->pool_insn);
4762 insn = get_last_insn ();
4763 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4764 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4766 s390_dump_pool (pool, 0);
4769 /* On S/390, if the total size of the function's code plus literal pool
4770 does not exceed 4096 bytes, we use BASR to set up a function base
4771 pointer, and emit the literal pool at the end of the function. */
4772 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
4773 + pool->size + 8 /* alignment slop */ < 4096)
4775 insn = gen_main_base_31_small (base_reg, pool->label);
4776 insn = emit_insn_after (insn, pool->pool_insn);
4777 INSN_ADDRESSES_NEW (insn, -1);
4778 remove_insn (pool->pool_insn);
4780 insn = emit_label_after (pool->label, insn);
4781 INSN_ADDRESSES_NEW (insn, -1);
4783 insn = get_last_insn ();
4784 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4785 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4787 s390_dump_pool (pool, 1);
4790 /* Otherwise, we emit an inline literal pool and use BASR to branch
4791 over it, setting up the pool register at the same time. */
4794 rtx pool_end = gen_label_rtx ();
4796 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
4797 insn = emit_insn_after (insn, pool->pool_insn);
4798 INSN_ADDRESSES_NEW (insn, -1);
4799 remove_insn (pool->pool_insn);
4801 insn = emit_label_after (pool->label, insn);
4802 INSN_ADDRESSES_NEW (insn, -1);
4804 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
4805 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4807 insn = emit_label_after (pool_end, pool->pool_insn);
4808 INSN_ADDRESSES_NEW (insn, -1);
4810 s390_dump_pool (pool, 1);
4814 /* Replace all literal pool references. */
4816 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4819 replace_ltrel_base (&PATTERN (insn));
4821 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4823 rtx addr, pool_ref = NULL_RTX;
4824 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4827 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
4828 get_pool_mode (pool_ref));
4829 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4830 INSN_CODE (insn) = -1;
4836 /* Free the pool. */
4837 s390_free_pool (pool);
4840 /* POOL holds the main literal pool as collected by s390_mainpool_start.
4841 We have decided we cannot use this pool, so revert all changes
4842 to the current function that were done by s390_mainpool_start. */
4844 s390_mainpool_cancel (struct constant_pool *pool)
4846 /* We didn't actually change the instruction stream, so simply
4847 free the pool memory. */
4848 s390_free_pool (pool);
4852 /* Chunkify the literal pool. */
4854 #define S390_POOL_CHUNK_MIN 0xc00
4855 #define S390_POOL_CHUNK_MAX 0xe00
4857 static struct constant_pool *
4858 s390_chunkify_start (void)
4860 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4863 rtx pending_ltrel = NULL_RTX;
4866 rtx (*gen_reload_base) (rtx, rtx) =
4867 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
4870 /* We need correct insn addresses. */
4872 shorten_branches (get_insns ());
4874 /* Scan all insns and move literals to pool chunks. */
4876 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4878 /* Check for pending LTREL_BASE. */
4881 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
4884 if (ltrel_base == pending_ltrel)
4885 pending_ltrel = NULL_RTX;
4891 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4893 rtx pool_ref = NULL_RTX;
4894 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4897 rtx constant = get_pool_constant (pool_ref);
4898 enum machine_mode mode = get_pool_mode (pool_ref);
4901 curr_pool = s390_start_pool (&pool_list, insn);
4903 s390_add_constant (curr_pool, constant, mode);
4904 s390_add_pool_insn (curr_pool, insn);
4906 /* Don't split the pool chunk between a LTREL_OFFSET load
4907 and the corresponding LTREL_BASE. */
4908 if (GET_CODE (constant) == CONST
4909 && GET_CODE (XEXP (constant, 0)) == UNSPEC
4910 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
4914 pending_ltrel = pool_ref;
4919 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4922 s390_add_pool_insn (curr_pool, insn);
4923 /* An LTREL_BASE must follow within the same basic block. */
4929 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4930 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4933 if (TARGET_CPU_ZARCH)
4935 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4938 s390_end_pool (curr_pool, NULL_RTX);
4943 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4944 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4947 /* We will later have to insert base register reload insns.
4948 Those will have an effect on code size, which we need to
4949 consider here. This calculation makes rather pessimistic
4950 worst-case assumptions. */
4951 if (GET_CODE (insn) == CODE_LABEL)
4954 if (chunk_size < S390_POOL_CHUNK_MIN
4955 && curr_pool->size < S390_POOL_CHUNK_MIN)
4958 /* Pool chunks can only be inserted after BARRIERs ... */
4959 if (GET_CODE (insn) == BARRIER)
4961 s390_end_pool (curr_pool, insn);
4966 /* ... so if we don't find one in time, create one. */
4967 else if ((chunk_size > S390_POOL_CHUNK_MAX
4968 || curr_pool->size > S390_POOL_CHUNK_MAX))
4970 rtx label, jump, barrier;
4972 /* We can insert the barrier only after a 'real' insn. */
4973 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4975 if (get_attr_length (insn) == 0)
4978 /* Don't separate LTREL_BASE from the corresponding
4979 LTREL_OFFSET load. */
4983 label = gen_label_rtx ();
4984 jump = emit_jump_insn_after (gen_jump (label), insn);
4985 barrier = emit_barrier_after (jump);
4986 insn = emit_label_after (label, barrier);
4987 JUMP_LABEL (jump) = label;
4988 LABEL_NUSES (label) = 1;
4990 INSN_ADDRESSES_NEW (jump, -1);
4991 INSN_ADDRESSES_NEW (barrier, -1);
4992 INSN_ADDRESSES_NEW (insn, -1);
4994 s390_end_pool (curr_pool, barrier);
5002 s390_end_pool (curr_pool, NULL_RTX);
5007 /* Find all labels that are branched into
5008 from an insn belonging to a different chunk. */
5010 far_labels = BITMAP_XMALLOC ();
5012 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5014 /* Labels marked with LABEL_PRESERVE_P can be target
5015 of non-local jumps, so we have to mark them.
5016 The same holds for named labels.
5018 Don't do that, however, if it is the label before
5021 if (GET_CODE (insn) == CODE_LABEL
5022 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5024 rtx vec_insn = next_real_insn (insn);
5025 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5026 PATTERN (vec_insn) : NULL_RTX;
5028 || !(GET_CODE (vec_pat) == ADDR_VEC
5029 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5030 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5033 /* If we have a direct jump (conditional or unconditional)
5034 or a casesi jump, check all potential targets. */
5035 else if (GET_CODE (insn) == JUMP_INSN)
5037 rtx pat = PATTERN (insn);
5038 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5039 pat = XVECEXP (pat, 0, 0);
5041 if (GET_CODE (pat) == SET)
5043 rtx label = JUMP_LABEL (insn);
5046 if (s390_find_pool (pool_list, label)
5047 != s390_find_pool (pool_list, insn))
5048 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5051 else if (GET_CODE (pat) == PARALLEL
5052 && XVECLEN (pat, 0) == 2
5053 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5054 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5055 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5057 /* Find the jump table used by this casesi jump. */
5058 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5059 rtx vec_insn = next_real_insn (vec_label);
5060 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5061 PATTERN (vec_insn) : NULL_RTX;
5063 && (GET_CODE (vec_pat) == ADDR_VEC
5064 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5066 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5068 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5070 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5072 if (s390_find_pool (pool_list, label)
5073 != s390_find_pool (pool_list, insn))
5074 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5081 /* Insert base register reload insns before every pool. */
5083 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5085 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5087 rtx insn = curr_pool->first_insn;
5088 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5091 /* Insert base register reload insns at every far label. */
5093 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5094 if (GET_CODE (insn) == CODE_LABEL
5095 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5097 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5100 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5102 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5107 BITMAP_XFREE (far_labels);
5110 /* Recompute insn addresses. */
5112 init_insn_lengths ();
5113 shorten_branches (get_insns ());
5118 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5119 After we have decided to use this list, finish implementing
5120 all changes to the current function as required. */
5123 s390_chunkify_finish (struct constant_pool *pool_list)
5125 struct constant_pool *curr_pool = NULL;
5129 /* Replace all literal pool references. */
5131 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5134 replace_ltrel_base (&PATTERN (insn));
5136 curr_pool = s390_find_pool (pool_list, insn);
5140 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5142 rtx addr, pool_ref = NULL_RTX;
5143 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5146 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
5147 get_pool_mode (pool_ref));
5148 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5149 INSN_CODE (insn) = -1;
5154 /* Dump out all literal pools. */
5156 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5157 s390_dump_pool (curr_pool, 0);
5159 /* Free pool list. */
5163 struct constant_pool *next = pool_list->next;
5164 s390_free_pool (pool_list);
5169 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5170 We have decided we cannot use this list, so revert all changes
5171 to the current function that were done by s390_chunkify_start. */
5174 s390_chunkify_cancel (struct constant_pool *pool_list)
5176 struct constant_pool *curr_pool = NULL;
5179 /* Remove all pool placeholder insns. */
5181 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5183 /* Did we insert an extra barrier? Remove it. */
5184 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5185 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5186 rtx label = NEXT_INSN (curr_pool->pool_insn);
5188 if (jump && GET_CODE (jump) == JUMP_INSN
5189 && barrier && GET_CODE (barrier) == BARRIER
5190 && label && GET_CODE (label) == CODE_LABEL
5191 && GET_CODE (PATTERN (jump)) == SET
5192 && SET_DEST (PATTERN (jump)) == pc_rtx
5193 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5194 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5197 remove_insn (barrier);
5198 remove_insn (label);
5201 remove_insn (curr_pool->pool_insn);
5204 /* Remove all base register reload insns. */
5206 for (insn = get_insns (); insn; )
5208 rtx next_insn = NEXT_INSN (insn);
5210 if (GET_CODE (insn) == INSN
5211 && GET_CODE (PATTERN (insn)) == SET
5212 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5213 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5219 /* Free pool list. */
5223 struct constant_pool *next = pool_list->next;
5224 s390_free_pool (pool_list);
5230 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5233 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5237 switch (GET_MODE_CLASS (mode))
5240 if (GET_CODE (exp) != CONST_DOUBLE)
5243 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5244 assemble_real (r, mode, align);
5248 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5257 /* Rework the prolog/epilog to avoid saving/restoring
5258 registers unnecessarily. BASE_USED specifies whether
5259 the literal pool base register needs to be saved. */
5262 s390_optimize_prolog (bool base_used)
5264 rtx insn, new_insn, next_insn;
5266 /* Do a final recompute of the frame-related data. */
5268 s390_frame_info (base_used, cfun->machine->save_return_addr_p);
5269 regs_ever_live[BASE_REGNUM] = base_used;
5270 regs_ever_live[RETURN_REGNUM] = cfun->machine->save_return_addr_p;
5271 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5273 /* If all special registers are in fact used, there's nothing we
5274 can do, so no point in walking the insn list. */
5276 if (cfun->machine->first_save_gpr <= BASE_REGNUM
5277 && cfun->machine->last_save_gpr >= BASE_REGNUM
5278 && (TARGET_CPU_ZARCH
5279 || (cfun->machine->first_save_gpr <= RETURN_REGNUM
5280 && cfun->machine->last_save_gpr >= RETURN_REGNUM)))
5283 /* Search for prolog/epilog insns and replace them. */
5285 for (insn = get_insns (); insn; insn = next_insn)
5287 int first, last, off;
5288 rtx set, base, offset;
5290 next_insn = NEXT_INSN (insn);
5292 if (GET_CODE (insn) != INSN)
5295 if (GET_CODE (PATTERN (insn)) == PARALLEL
5296 && store_multiple_operation (PATTERN (insn), VOIDmode))
5298 set = XVECEXP (PATTERN (insn), 0, 0);
5299 first = REGNO (SET_SRC (set));
5300 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5301 offset = const0_rtx;
5302 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5303 off = INTVAL (offset) - first * UNITS_PER_WORD;
5305 if (GET_CODE (base) != REG || off < 0)
5307 if (first > BASE_REGNUM || last < BASE_REGNUM)
5310 if (cfun->machine->first_save_gpr != -1)
5312 new_insn = save_gprs (base, off, cfun->machine->first_save_gpr,
5313 cfun->machine->last_save_gpr);
5314 new_insn = emit_insn_before (new_insn, insn);
5315 INSN_ADDRESSES_NEW (new_insn, -1);
5322 if (GET_CODE (PATTERN (insn)) == SET
5323 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5324 && REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5325 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5327 set = PATTERN (insn);
5328 offset = const0_rtx;
5329 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5330 off = INTVAL (offset) - BASE_REGNUM * UNITS_PER_WORD;
5332 if (GET_CODE (base) != REG || off < 0)
5335 if (cfun->machine->first_save_gpr != -1)
5337 new_insn = save_gprs (base, off, cfun->machine->first_save_gpr,
5338 cfun->machine->last_save_gpr);
5339 new_insn = emit_insn_before (new_insn, insn);
5340 INSN_ADDRESSES_NEW (new_insn, -1);
5347 if (GET_CODE (PATTERN (insn)) == PARALLEL
5348 && load_multiple_operation (PATTERN (insn), VOIDmode))
5350 set = XVECEXP (PATTERN (insn), 0, 0);
5351 first = REGNO (SET_DEST (set));
5352 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5353 offset = const0_rtx;
5354 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5355 off = INTVAL (offset) - first * UNITS_PER_WORD;
5357 if (GET_CODE (base) != REG || off < 0)
5359 if (first > BASE_REGNUM || last < BASE_REGNUM)
5362 if (cfun->machine->first_restore_gpr != -1)
5364 new_insn = restore_gprs (base, off, cfun->machine->first_restore_gpr,
5365 cfun->machine->last_restore_gpr);
5366 new_insn = emit_insn_before (new_insn, insn);
5367 INSN_ADDRESSES_NEW (new_insn, -1);
5374 if (GET_CODE (PATTERN (insn)) == SET
5375 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5376 && REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5377 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5379 set = PATTERN (insn);
5380 offset = const0_rtx;
5381 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5382 off = INTVAL (offset) - BASE_REGNUM * UNITS_PER_WORD;
5384 if (GET_CODE (base) != REG || off < 0)
5387 if (cfun->machine->first_restore_gpr != -1)
5389 new_insn = restore_gprs (base, off, cfun->machine->first_restore_gpr,
5390 cfun->machine->last_restore_gpr);
5391 new_insn = emit_insn_before (new_insn, insn);
5392 INSN_ADDRESSES_NEW (new_insn, -1);
5401 /* Perform machine-dependent processing. */
5406 bool base_used = false;
5407 bool pool_overflow = false;
5409 /* Make sure all splits have been performed; splits after
5410 machine_dependent_reorg might confuse insn length counts. */
5411 split_all_insns_noflow ();
5414 /* Install the main literal pool and the associated base
5415 register load insns.
5417 In addition, there are two problematic situations we need
5420 - the literal pool might be > 4096 bytes in size, so that
5421 some of its elements cannot be directly accessed
5423 - a branch target might be > 64K away from the branch, so that
5424 it is not possible to use a PC-relative instruction.
5426 To fix those, we split the single literal pool into multiple
5427 pool chunks, reloading the pool base register at various
5428 points throughout the function to ensure it always points to
5429 the pool chunk the following code expects, and / or replace
5430 PC-relative branches by absolute branches.
5432 However, the two problems are interdependent: splitting the
5433 literal pool can move a branch further away from its target,
5434 causing the 64K limit to overflow, and on the other hand,
5435 replacing a PC-relative branch by an absolute branch means
5436 we need to put the branch target address into the literal
5437 pool, possibly causing it to overflow.
5439 So, we loop trying to fix up both problems until we manage
5440 to satisfy both conditions at the same time. Note that the
5441 loop is guaranteed to terminate as every pass of the loop
5442 strictly decreases the total number of PC-relative branches
5443 in the function. (This is not completely true as there
5444 might be branch-over-pool insns introduced by chunkify_start.
5445 Those never need to be split however.) */
5449 struct constant_pool *pool = NULL;
5451 /* Collect the literal pool. */
5454 pool = s390_mainpool_start ();
5456 pool_overflow = true;
5459 /* If literal pool overflowed, start to chunkify it. */
5461 pool = s390_chunkify_start ();
5463 /* Split out-of-range branches. If this has created new
5464 literal pool entries, cancel current chunk list and
5465 recompute it. zSeries machines have large branch
5466 instructions, so we never need to split a branch. */
5467 if (!TARGET_CPU_ZARCH && s390_split_branches ())
5470 s390_chunkify_cancel (pool);
5472 s390_mainpool_cancel (pool);
5477 /* If we made it up to here, both conditions are satisfied.
5478 Finish up literal pool related changes. */
5479 if ((pool_overflow || pool->size > 0)
5480 && REGNO (cfun->machine->base_reg) == BASE_REGNUM)
5484 s390_chunkify_finish (pool);
5486 s390_mainpool_finish (pool);
5491 s390_optimize_prolog (base_used);
5495 /* Return an RTL expression representing the value of the return address
5496 for the frame COUNT steps up from the current frame. FRAME is the
5497 frame pointer of that frame. */
5500 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
5504 /* Without backchain, we fail for all but the current frame. */
5506 if (!TARGET_BACKCHAIN && count > 0)
5509 /* For the current frame, we need to make sure the initial
5510 value of RETURN_REGNUM is actually saved. */
5514 cfun->machine->save_return_addr_p = true;
5515 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
5518 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5519 addr = memory_address (Pmode, addr);
5520 return gen_rtx_MEM (Pmode, addr);
5523 /* Find first call clobbered register unused in a function.
5524 This could be used as base register in a leaf function
5525 or for holding the return address before epilogue. */
5528 find_unused_clobbered_reg (void)
5531 for (i = 0; i < 6; i++)
5532 if (!regs_ever_live[i])
5537 /* Fill cfun->machine with info about frame of current function.
5538 BASE_USED and RETURN_ADDR_USED specify whether we assume the
5539 base and return address register will need to be saved. */
5542 s390_frame_info (int base_used, int return_addr_used)
5546 HOST_WIDE_INT fsize = get_frame_size ();
5548 if (!TARGET_64BIT && fsize > 0x7fff0000)
5549 fatal_error ("Total size of local variables exceeds architecture limit.");
5551 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5552 cfun->machine->save_fprs_p = 0;
5554 for (i = 24; i < 32; i++)
5555 if (regs_ever_live[i] && !global_regs[i])
5557 cfun->machine->save_fprs_p = 1;
5561 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5563 /* Does function need to setup frame and save area. */
5565 if (!current_function_is_leaf
5566 || TARGET_TPF_PROFILING
5567 || cfun->machine->frame_size > 0
5568 || current_function_calls_alloca
5569 || current_function_stdarg)
5570 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5572 /* Find first and last gpr to be saved. We trust regs_ever_live
5573 data, except that we don't save and restore global registers.
5575 Also, all registers with special meaning to the compiler need
5576 to be handled extra. */
5578 for (i = 0; i < 16; i++)
5579 live_regs[i] = regs_ever_live[i] && !global_regs[i];
5582 live_regs[PIC_OFFSET_TABLE_REGNUM] =
5583 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5585 live_regs[BASE_REGNUM] = base_used;
5586 live_regs[RETURN_REGNUM] = return_addr_used;
5587 live_regs[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5589 for (i = 6; i < 16; i++)
5592 for (j = 15; j > i; j--)
5598 /* Nothing to save/restore. */
5599 cfun->machine->first_save_gpr = -1;
5600 cfun->machine->first_restore_gpr = -1;
5601 cfun->machine->last_save_gpr = -1;
5602 cfun->machine->last_restore_gpr = -1;
5606 /* Save / Restore from gpr i to j. */
5607 cfun->machine->first_save_gpr = i;
5608 cfun->machine->first_restore_gpr = i;
5609 cfun->machine->last_save_gpr = j;
5610 cfun->machine->last_restore_gpr = j;
5613 /* Varargs functions need to save gprs 2 to 6. */
5614 if (current_function_stdarg)
5616 if (cfun->machine->first_save_gpr == -1
5617 || cfun->machine->first_save_gpr > 2)
5618 cfun->machine->first_save_gpr = 2;
5620 if (cfun->machine->last_save_gpr == -1
5621 || cfun->machine->last_save_gpr < 6)
5622 cfun->machine->last_save_gpr = 6;
5626 /* Return offset between argument pointer and frame pointer
5627 initially after prologue. */
5630 s390_arg_frame_offset (void)
5632 /* See the comment in s390_emit_prologue about the assumptions we make
5633 whether or not the base and return address register need to be saved. */
5634 int return_addr_used = !current_function_is_leaf
5635 || TARGET_TPF_PROFILING
5636 || regs_ever_live[RETURN_REGNUM]
5637 || cfun->machine->save_return_addr_p;
5639 s390_frame_info (1, !TARGET_CPU_ZARCH || return_addr_used);
5640 return cfun->machine->frame_size + STACK_POINTER_OFFSET;
5643 /* Return offset between return address pointer (location of r14
5644 on the stack) and frame pointer initially after prologue. */
5647 s390_return_address_offset (void)
5649 s390_frame_info (1, 1);
5651 return cfun->machine->frame_size + RETURN_REGNUM * UNITS_PER_WORD;
5654 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5655 to register BASE. Return generated insn. */
5658 save_fpr (rtx base, int offset, int regnum)
5661 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5662 set_mem_alias_set (addr, s390_sr_alias_set);
5664 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5667 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5668 to register BASE. Return generated insn. */
5671 restore_fpr (rtx base, int offset, int regnum)
5674 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5675 set_mem_alias_set (addr, s390_sr_alias_set);
5677 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5680 /* Generate insn to save registers FIRST to LAST into
5681 the register save area located at offset OFFSET
5682 relative to register BASE. */
5685 save_gprs (rtx base, int offset, int first, int last)
5687 rtx addr, insn, note;
5690 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5691 addr = gen_rtx_MEM (Pmode, addr);
5692 set_mem_alias_set (addr, s390_sr_alias_set);
5694 /* Special-case single register. */
5698 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5700 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5702 RTX_FRAME_RELATED_P (insn) = 1;
5707 insn = gen_store_multiple (addr,
5708 gen_rtx_REG (Pmode, first),
5709 GEN_INT (last - first + 1));
5712 /* We need to set the FRAME_RELATED flag on all SETs
5713 inside the store-multiple pattern.
5715 However, we must not emit DWARF records for registers 2..5
5716 if they are stored for use by variable arguments ...
5718 ??? Unfortunately, it is not enough to simply not the the
5719 FRAME_RELATED flags for those SETs, because the first SET
5720 of the PARALLEL is always treated as if it had the flag
5721 set, even if it does not. Therefore we emit a new pattern
5722 without those registers as REG_FRAME_RELATED_EXPR note. */
5726 rtx pat = PATTERN (insn);
5728 for (i = 0; i < XVECLEN (pat, 0); i++)
5729 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5730 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5732 RTX_FRAME_RELATED_P (insn) = 1;
5736 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5737 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5738 gen_rtx_REG (Pmode, 6),
5739 GEN_INT (last - 6 + 1));
5740 note = PATTERN (note);
5743 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5744 note, REG_NOTES (insn));
5746 for (i = 0; i < XVECLEN (note, 0); i++)
5747 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5748 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5750 RTX_FRAME_RELATED_P (insn) = 1;
5756 /* Generate insn to restore registers FIRST to LAST from
5757 the register save area located at offset OFFSET
5758 relative to register BASE. */
5761 restore_gprs (rtx base, int offset, int first, int last)
5765 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5766 addr = gen_rtx_MEM (Pmode, addr);
5767 set_mem_alias_set (addr, s390_sr_alias_set);
5769 /* Special-case single register. */
5773 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5775 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5780 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5782 GEN_INT (last - first + 1));
5786 /* Return insn sequence to load the GOT register. */
5788 static GTY(()) rtx got_symbol;
5790 s390_load_got (void)
5796 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5797 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
5802 if (TARGET_CPU_ZARCH)
5804 emit_move_insn (pic_offset_table_rtx, got_symbol);
5810 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
5811 UNSPEC_LTREL_OFFSET);
5812 offset = gen_rtx_CONST (Pmode, offset);
5813 offset = force_const_mem (Pmode, offset);
5815 emit_move_insn (pic_offset_table_rtx, offset);
5817 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
5819 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
5821 emit_move_insn (pic_offset_table_rtx, offset);
5824 insns = get_insns ();
5829 /* Expand the prologue into a bunch of separate insns. */
5832 s390_emit_prologue (void)
5838 /* At this point, we decide whether we'll need to save/restore the
5839 return address register. This decision is final on zSeries machines;
5840 on S/390 it can still be overridden in s390_split_branches. */
5842 if (!current_function_is_leaf
5843 || TARGET_TPF_PROFILING
5844 || regs_ever_live[RETURN_REGNUM])
5845 cfun->machine->save_return_addr_p = 1;
5847 /* Decide which register to use as literal pool base. In small leaf
5848 functions, try to use an unused call-clobbered register as base
5849 register to avoid save/restore overhead. */
5851 if (current_function_is_leaf && !regs_ever_live[5])
5852 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
5854 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
5856 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
5858 /* Compute frame info. Note that at this point, we assume the base
5859 register and -on S/390- the return register always need to be saved.
5860 This is done because the usage of these registers might change even
5861 after the prolog was emitted. If it turns out later that we really
5862 don't need them, the prolog/epilog code is modified again. */
5864 s390_frame_info (1, !TARGET_CPU_ZARCH || cfun->machine->save_return_addr_p);
5866 /* We need to update regs_ever_live to avoid data-flow problems. */
5868 regs_ever_live[BASE_REGNUM] = 1;
5869 regs_ever_live[RETURN_REGNUM] = !TARGET_CPU_ZARCH
5870 || cfun->machine->save_return_addr_p;
5871 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5873 /* Annotate all constant pool references to let the scheduler know
5874 they implicitly use the base register. */
5876 push_topmost_sequence ();
5878 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5880 annotate_constant_pool_refs (&PATTERN (insn));
5882 pop_topmost_sequence ();
5884 /* Choose best register to use for temp use within prologue.
5885 See below for why TPF must use the register 1. */
5887 if (!current_function_is_leaf
5888 && !TARGET_TPF_PROFILING)
5889 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5891 temp_reg = gen_rtx_REG (Pmode, 1);
5893 /* Save call saved gprs. */
5895 insn = save_gprs (stack_pointer_rtx, 0,
5896 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5899 /* Dummy insn to mark literal pool slot. */
5901 emit_insn (gen_main_pool (cfun->machine->base_reg));
5903 /* Save fprs for variable args. */
5905 if (current_function_stdarg)
5906 for (i = 16; i < (TARGET_64BIT ? 20 : 18); i++)
5907 save_fpr (stack_pointer_rtx, 16*UNITS_PER_WORD + 8*(i-16), i);
5909 /* Save fprs 4 and 6 if used (31 bit ABI). */
5912 for (i = 18; i < 20; i++)
5913 if (regs_ever_live[i] && !global_regs[i])
5915 insn = save_fpr (stack_pointer_rtx, 16*UNITS_PER_WORD + 8*(i-16), i);
5916 RTX_FRAME_RELATED_P (insn) = 1;
5919 /* Decrement stack pointer. */
5921 if (cfun->machine->frame_size > 0)
5923 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5925 /* Save incoming stack pointer into temp reg. */
5927 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5929 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5932 /* Subtract frame size from stack pointer. */
5934 if (DISP_IN_RANGE (INTVAL (frame_off)))
5936 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5937 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5939 insn = emit_insn (insn);
5943 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
5944 frame_off = force_const_mem (Pmode, frame_off);
5946 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5947 annotate_constant_pool_refs (&PATTERN (insn));
5950 RTX_FRAME_RELATED_P (insn) = 1;
5952 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5953 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5954 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5955 GEN_INT (-cfun->machine->frame_size))),
5958 /* Set backchain. */
5960 if (TARGET_BACKCHAIN)
5962 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5963 set_mem_alias_set (addr, s390_sr_alias_set);
5964 insn = emit_insn (gen_move_insn (addr, temp_reg));
5967 /* If we support asynchronous exceptions (e.g. for Java),
5968 we need to make sure the backchain pointer is set up
5969 before any possibly trapping memory access. */
5971 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
5973 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
5974 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
5978 /* Save fprs 8 - 15 (64 bit ABI). */
5980 if (cfun->machine->save_fprs_p)
5982 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5984 for (i = 24; i < 32; i++)
5985 if (regs_ever_live[i] && !global_regs[i])
5987 rtx addr = plus_constant (stack_pointer_rtx,
5988 cfun->machine->frame_size - 64 + (i-24)*8);
5990 insn = save_fpr (temp_reg, (i-24)*8, i);
5991 RTX_FRAME_RELATED_P (insn) = 1;
5993 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5994 gen_rtx_SET (VOIDmode,
5995 gen_rtx_MEM (DFmode, addr),
5996 gen_rtx_REG (DFmode, i)),
6001 /* Set frame pointer, if needed. */
6003 if (frame_pointer_needed)
6005 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6006 RTX_FRAME_RELATED_P (insn) = 1;
6009 /* Set up got pointer, if needed. */
6011 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6013 rtx insns = s390_load_got ();
6015 for (insn = insns; insn; insn = NEXT_INSN (insn))
6017 annotate_constant_pool_refs (&PATTERN (insn));
6019 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6026 if (TARGET_TPF_PROFILING)
6028 /* Generate a BAS instruction to serve as a function
6029 entry intercept to facilitate the use of tracing
6030 algorithms located at the branch target. */
6031 emit_insn (gen_prologue_tpf ());
6033 /* Emit a blockage here so that all code
6034 lies between the profiling mechanisms. */
6035 emit_insn (gen_blockage ());
6039 /* Expand the epilogue into a bunch of separate insns. */
6042 s390_emit_epilogue (bool sibcall)
6044 rtx frame_pointer, return_reg;
6045 int area_bottom, area_top, offset = 0;
6049 if (TARGET_TPF_PROFILING)
6052 /* Generate a BAS instruction to serve as a function
6053 entry intercept to facilitate the use of tracing
6054 algorithms located at the branch target. */
6056 /* Emit a blockage here so that all code
6057 lies between the profiling mechanisms. */
6058 emit_insn (gen_blockage ());
6060 emit_insn (gen_epilogue_tpf ());
6063 /* Check whether to use frame or stack pointer for restore. */
6065 frame_pointer = frame_pointer_needed ?
6066 hard_frame_pointer_rtx : stack_pointer_rtx;
6068 /* Compute which parts of the save area we need to access. */
6070 if (cfun->machine->first_restore_gpr != -1)
6072 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
6073 area_top = (cfun->machine->last_restore_gpr + 1) * UNITS_PER_WORD;
6077 area_bottom = INT_MAX;
6083 if (cfun->machine->save_fprs_p)
6085 if (area_bottom > -64)
6093 for (i = 18; i < 20; i++)
6094 if (regs_ever_live[i] && !global_regs[i])
6096 if (area_bottom > 16*UNITS_PER_WORD + 8*(i-16))
6097 area_bottom = 16*UNITS_PER_WORD + 8*(i-16);
6098 if (area_top < 16*UNITS_PER_WORD + 8*(i-16) + 8)
6099 area_top = 16*UNITS_PER_WORD + 8*(i-16) + 8;
6103 /* Check whether we can access the register save area.
6104 If not, increment the frame pointer as required. */
6106 if (area_top <= area_bottom)
6108 /* Nothing to restore. */
6110 else if (DISP_IN_RANGE (cfun->machine->frame_size + area_bottom)
6111 && DISP_IN_RANGE (cfun->machine->frame_size + area_top-1))
6113 /* Area is in range. */
6114 offset = cfun->machine->frame_size;
6118 rtx insn, frame_off;
6120 offset = area_bottom < 0 ? -area_bottom : 0;
6121 frame_off = GEN_INT (cfun->machine->frame_size - offset);
6123 if (DISP_IN_RANGE (INTVAL (frame_off)))
6125 insn = gen_rtx_SET (VOIDmode, frame_pointer,
6126 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
6127 insn = emit_insn (insn);
6131 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6132 frame_off = force_const_mem (Pmode, frame_off);
6134 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
6135 annotate_constant_pool_refs (&PATTERN (insn));
6139 /* Restore call saved fprs. */
6143 if (cfun->machine->save_fprs_p)
6144 for (i = 24; i < 32; i++)
6145 if (regs_ever_live[i] && !global_regs[i])
6146 restore_fpr (frame_pointer,
6147 offset - 64 + (i-24) * 8, i);
6151 for (i = 18; i < 20; i++)
6152 if (regs_ever_live[i] && !global_regs[i])
6153 restore_fpr (frame_pointer,
6154 offset + 16*UNITS_PER_WORD + 8*(i-16), i);
6157 /* Return register. */
6159 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6161 /* Restore call saved gprs. */
6163 if (cfun->machine->first_restore_gpr != -1)
6168 /* Check for global register and save them
6169 to stack location from where they get restored. */
6171 for (i = cfun->machine->first_restore_gpr;
6172 i <= cfun->machine->last_restore_gpr;
6175 /* These registers are special and need to be
6176 restored in any case. */
6177 if (i == STACK_POINTER_REGNUM
6178 || i == RETURN_REGNUM
6180 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
6185 addr = plus_constant (frame_pointer,
6186 offset + i * UNITS_PER_WORD);
6187 addr = gen_rtx_MEM (Pmode, addr);
6188 set_mem_alias_set (addr, s390_sr_alias_set);
6189 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
6195 /* Fetch return address from stack before load multiple,
6196 this will do good for scheduling. */
6198 if (cfun->machine->save_return_addr_p
6199 || (cfun->machine->first_restore_gpr < BASE_REGNUM
6200 && cfun->machine->last_restore_gpr > RETURN_REGNUM))
6202 int return_regnum = find_unused_clobbered_reg();
6205 return_reg = gen_rtx_REG (Pmode, return_regnum);
6207 addr = plus_constant (frame_pointer,
6208 offset + RETURN_REGNUM * UNITS_PER_WORD);
6209 addr = gen_rtx_MEM (Pmode, addr);
6210 set_mem_alias_set (addr, s390_sr_alias_set);
6211 emit_move_insn (return_reg, addr);
6215 insn = restore_gprs (frame_pointer, offset,
6216 cfun->machine->first_restore_gpr,
6217 cfun->machine->last_restore_gpr);
6224 /* Return to caller. */
6226 p = rtvec_alloc (2);
6228 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
6229 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
6230 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
6235 /* Return the size in bytes of a function argument of
6236 type TYPE and/or mode MODE. At least one of TYPE or
6237 MODE must be specified. */
6240 s390_function_arg_size (enum machine_mode mode, tree type)
6243 return int_size_in_bytes (type);
6245 /* No type info available for some library calls ... */
6246 if (mode != BLKmode)
6247 return GET_MODE_SIZE (mode);
6249 /* If we have neither type nor mode, abort */
6253 /* Return true if a function argument of type TYPE and mode MODE
6254 is to be passed in a floating-point register, if available. */
6257 s390_function_arg_float (enum machine_mode mode, tree type)
6259 int size = s390_function_arg_size (mode, type);
6263 /* Soft-float changes the ABI: no floating-point registers are used. */
6264 if (TARGET_SOFT_FLOAT)
6267 /* No type info available for some library calls ... */
6269 return mode == SFmode || mode == DFmode;
6271 /* The ABI says that record types with a single member are treated
6272 just like that member would be. */
6273 while (TREE_CODE (type) == RECORD_TYPE)
6275 tree field, single = NULL_TREE;
6277 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6279 if (TREE_CODE (field) != FIELD_DECL)
6282 if (single == NULL_TREE)
6283 single = TREE_TYPE (field);
6288 if (single == NULL_TREE)
6294 return TREE_CODE (type) == REAL_TYPE;
6297 /* Return true if a function argument of type TYPE and mode MODE
6298 is to be passed in an integer register, or a pair of integer
6299 registers, if available. */
6302 s390_function_arg_integer (enum machine_mode mode, tree type)
6304 int size = s390_function_arg_size (mode, type);
6308 /* No type info available for some library calls ... */
6310 return GET_MODE_CLASS (mode) == MODE_INT
6311 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
6313 /* We accept small integral (and similar) types. */
6314 if (INTEGRAL_TYPE_P (type)
6315 || POINTER_TYPE_P (type)
6316 || TREE_CODE (type) == OFFSET_TYPE
6317 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
6320 /* We also accept structs of size 1, 2, 4, 8 that are not
6321 passed in floating-point registers. */
6322 if (AGGREGATE_TYPE_P (type)
6323 && exact_log2 (size) >= 0
6324 && !s390_function_arg_float (mode, type))
6330 /* Return 1 if a function argument of type TYPE and mode MODE
6331 is to be passed by reference. The ABI specifies that only
6332 structures of size 1, 2, 4, or 8 bytes are passed by value,
6333 all other structures (and complex numbers) are passed by
6337 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
6338 enum machine_mode mode, tree type,
6339 bool named ATTRIBUTE_UNUSED)
6341 int size = s390_function_arg_size (mode, type);
6347 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
6350 if (TREE_CODE (type) == COMPLEX_TYPE
6351 || TREE_CODE (type) == VECTOR_TYPE)
6358 /* Update the data in CUM to advance over an argument of mode MODE and
6359 data type TYPE. (TYPE is null for libcalls where that information
6360 may not be available.). The boolean NAMED specifies whether the
6361 argument is a named argument (as opposed to an unnamed argument
6362 matching an ellipsis). */
6365 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
6366 tree type, int named ATTRIBUTE_UNUSED)
6368 if (s390_function_arg_float (mode, type))
6372 else if (s390_function_arg_integer (mode, type))
6374 int size = s390_function_arg_size (mode, type);
6375 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
6381 /* Define where to put the arguments to a function.
6382 Value is zero to push the argument on the stack,
6383 or a hard register in which to store the argument.
6385 MODE is the argument's machine mode.
6386 TYPE is the data type of the argument (as a tree).
6387 This is null for libcalls where that information may
6389 CUM is a variable of type CUMULATIVE_ARGS which gives info about
6390 the preceding args and about the function being called.
6391 NAMED is nonzero if this argument is a named parameter
6392 (otherwise it is an extra parameter matching an ellipsis).
6394 On S/390, we use general purpose registers 2 through 6 to
6395 pass integer, pointer, and certain structure arguments, and
6396 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
6397 to pass floating point arguments. All remaining arguments
6398 are pushed to the stack. */
6401 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
6402 int named ATTRIBUTE_UNUSED)
6404 if (s390_function_arg_float (mode, type))
6406 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
6409 return gen_rtx_REG (mode, cum->fprs + 16);
6411 else if (s390_function_arg_integer (mode, type))
6413 int size = s390_function_arg_size (mode, type);
6414 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
6416 if (cum->gprs + n_gprs > 5)
6419 return gen_rtx_REG (mode, cum->gprs + 2);
6422 /* After the real arguments, expand_call calls us once again
6423 with a void_type_node type. Whatever we return here is
6424 passed as operand 2 to the call expanders.
6426 We don't need this feature ... */
6427 else if (type == void_type_node)
6433 /* Return true if return values of type TYPE should be returned
6434 in a memory buffer whose address is passed by the caller as
6435 hidden first argument. */
6438 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
6440 /* We accept small integral (and similar) types. */
6441 if (INTEGRAL_TYPE_P (type)
6442 || POINTER_TYPE_P (type)
6443 || TREE_CODE (type) == OFFSET_TYPE
6444 || TREE_CODE (type) == REAL_TYPE)
6445 return int_size_in_bytes (type) > 8;
6447 /* Aggregates and similar constructs are always returned
6449 if (AGGREGATE_TYPE_P (type)
6450 || TREE_CODE (type) == COMPLEX_TYPE
6451 || TREE_CODE (type) == VECTOR_TYPE)
6454 /* ??? We get called on all sorts of random stuff from
6455 aggregate_value_p. We can't abort, but it's not clear
6456 what's safe to return. Pretend it's a struct I guess. */
6460 /* Define where to return a (scalar) value of type TYPE.
6461 If TYPE is null, define where to return a (scalar)
6462 value of mode MODE from a libcall. */
6465 s390_function_value (tree type, enum machine_mode mode)
6469 int unsignedp = TYPE_UNSIGNED (type);
6470 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
6473 if (GET_MODE_CLASS (mode) != MODE_INT
6474 && GET_MODE_CLASS (mode) != MODE_FLOAT)
6476 if (GET_MODE_SIZE (mode) > 8)
6479 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
6480 return gen_rtx_REG (mode, 16);
6482 return gen_rtx_REG (mode, 2);
6486 /* Create and return the va_list datatype.
6488 On S/390, va_list is an array type equivalent to
6490 typedef struct __va_list_tag
6494 void *__overflow_arg_area;
6495 void *__reg_save_area;
6498 where __gpr and __fpr hold the number of general purpose
6499 or floating point arguments used up to now, respectively,
6500 __overflow_arg_area points to the stack location of the
6501 next argument passed on the stack, and __reg_save_area
6502 always points to the start of the register area in the
6503 call frame of the current function. The function prologue
6504 saves all registers used for argument passing into this
6505 area if the function uses variable arguments. */
6508 s390_build_builtin_va_list (void)
6510 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
6512 record = lang_hooks.types.make_type (RECORD_TYPE);
6515 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
6517 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
6518 long_integer_type_node);
6519 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
6520 long_integer_type_node);
6521 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
6523 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
6526 DECL_FIELD_CONTEXT (f_gpr) = record;
6527 DECL_FIELD_CONTEXT (f_fpr) = record;
6528 DECL_FIELD_CONTEXT (f_ovf) = record;
6529 DECL_FIELD_CONTEXT (f_sav) = record;
6531 TREE_CHAIN (record) = type_decl;
6532 TYPE_NAME (record) = type_decl;
6533 TYPE_FIELDS (record) = f_gpr;
6534 TREE_CHAIN (f_gpr) = f_fpr;
6535 TREE_CHAIN (f_fpr) = f_ovf;
6536 TREE_CHAIN (f_ovf) = f_sav;
6538 layout_type (record);
6540 /* The correct type is an array type of one element. */
6541 return build_array_type (record, build_index_type (size_zero_node));
6544 /* Implement va_start by filling the va_list structure VALIST.
6545 STDARG_P is always true, and ignored.
6546 NEXTARG points to the first anonymous stack argument.
6548 The following global variables are used to initialize
6549 the va_list structure:
6551 current_function_args_info:
6552 holds number of gprs and fprs used for named arguments.
6553 current_function_arg_offset_rtx:
6554 holds the offset of the first anonymous stack argument
6555 (relative to the virtual arg pointer). */
6558 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
6560 HOST_WIDE_INT n_gpr, n_fpr;
6562 tree f_gpr, f_fpr, f_ovf, f_sav;
6563 tree gpr, fpr, ovf, sav, t;
6565 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6566 f_fpr = TREE_CHAIN (f_gpr);
6567 f_ovf = TREE_CHAIN (f_fpr);
6568 f_sav = TREE_CHAIN (f_ovf);
6570 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6571 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6572 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6573 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6574 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
6576 /* Count number of gp and fp argument registers used. */
6578 n_gpr = current_function_args_info.gprs;
6579 n_fpr = current_function_args_info.fprs;
6581 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
6582 TREE_SIDE_EFFECTS (t) = 1;
6583 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6585 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
6586 TREE_SIDE_EFFECTS (t) = 1;
6587 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6589 /* Find the overflow area. */
6590 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
6592 off = INTVAL (current_function_arg_offset_rtx);
6593 off = off < 0 ? 0 : off;
6594 if (TARGET_DEBUG_ARG)
6595 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
6596 (int)n_gpr, (int)n_fpr, off);
6598 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
6600 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6601 TREE_SIDE_EFFECTS (t) = 1;
6602 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6604 /* Find the register save area. */
6605 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
6606 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
6607 build_int_2 (-STACK_POINTER_OFFSET, -1));
6608 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
6609 TREE_SIDE_EFFECTS (t) = 1;
6610 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6613 /* Implement va_arg by updating the va_list structure
6614 VALIST as required to retrieve an argument of type
6615 TYPE, and returning that argument.
6617 Generates code equivalent to:
6619 if (integral value) {
6620 if (size <= 4 && args.gpr < 5 ||
6621 size > 4 && args.gpr < 4 )
6622 ret = args.reg_save_area[args.gpr+8]
6624 ret = *args.overflow_arg_area++;
6625 } else if (float value) {
6627 ret = args.reg_save_area[args.fpr+64]
6629 ret = *args.overflow_arg_area++;
6630 } else if (aggregate value) {
6632 ret = *args.reg_save_area[args.gpr]
6634 ret = **args.overflow_arg_area++;
6638 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
6639 tree *post_p ATTRIBUTE_UNUSED)
6641 tree f_gpr, f_fpr, f_ovf, f_sav;
6642 tree gpr, fpr, ovf, sav, reg, t, u;
6643 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
6644 tree lab_false, lab_over, addr;
6646 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
6647 f_fpr = TREE_CHAIN (f_gpr);
6648 f_ovf = TREE_CHAIN (f_fpr);
6649 f_sav = TREE_CHAIN (f_ovf);
6651 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
6652 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
6653 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
6654 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
6655 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
6657 size = int_size_in_bytes (type);
6659 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
6661 if (TARGET_DEBUG_ARG)
6663 fprintf (stderr, "va_arg: aggregate type");
6667 /* Aggregates are passed by reference. */
6671 sav_ofs = 2 * UNITS_PER_WORD;
6672 sav_scale = UNITS_PER_WORD;
6673 size = UNITS_PER_WORD;
6676 else if (s390_function_arg_float (TYPE_MODE (type), type))
6678 if (TARGET_DEBUG_ARG)
6680 fprintf (stderr, "va_arg: float type");
6684 /* FP args go in FP registers, if present. */
6688 sav_ofs = 16 * UNITS_PER_WORD;
6690 /* TARGET_64BIT has up to 4 parameter in fprs */
6691 max_reg = TARGET_64BIT ? 3 : 1;
6695 if (TARGET_DEBUG_ARG)
6697 fprintf (stderr, "va_arg: other type");
6701 /* Otherwise into GP registers. */
6704 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6705 sav_ofs = 2 * UNITS_PER_WORD;
6707 if (size < UNITS_PER_WORD)
6708 sav_ofs += UNITS_PER_WORD - size;
6710 sav_scale = UNITS_PER_WORD;
6717 /* Pull the value out of the saved registers ... */
6719 lab_false = create_artificial_label ();
6720 lab_over = create_artificial_label ();
6721 addr = create_tmp_var (ptr_type_node, "addr");
6723 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
6724 t = build2 (GT_EXPR, boolean_type_node, reg, t);
6725 u = build1 (GOTO_EXPR, void_type_node, lab_false);
6726 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
6727 gimplify_and_add (t, pre_p);
6729 t = build2 (PLUS_EXPR, ptr_type_node, sav,
6730 fold_convert (ptr_type_node, size_int (sav_ofs)));
6731 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
6732 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
6733 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
6735 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
6736 gimplify_and_add (t, pre_p);
6738 t = build1 (GOTO_EXPR, void_type_node, lab_over);
6739 gimplify_and_add (t, pre_p);
6741 t = build1 (LABEL_EXPR, void_type_node, lab_false);
6742 append_to_statement_list (t, pre_p);
6745 /* ... Otherwise out of the overflow area. */
6748 if (size < UNITS_PER_WORD)
6749 t = build2 (PLUS_EXPR, ptr_type_node, t,
6750 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
6752 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
6754 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
6755 gimplify_and_add (u, pre_p);
6757 t = build2 (PLUS_EXPR, ptr_type_node, t,
6758 fold_convert (ptr_type_node, size_int (size)));
6759 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
6760 gimplify_and_add (t, pre_p);
6762 t = build1 (LABEL_EXPR, void_type_node, lab_over);
6763 append_to_statement_list (t, pre_p);
6766 /* Increment register save count. */
6768 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6769 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
6770 gimplify_and_add (u, pre_p);
6774 t = build_pointer_type (build_pointer_type (type));
6775 addr = fold_convert (t, addr);
6776 addr = build_fold_indirect_ref (addr);
6780 t = build_pointer_type (type);
6781 addr = fold_convert (t, addr);
6784 return build_fold_indirect_ref (addr);
6792 S390_BUILTIN_THREAD_POINTER,
6793 S390_BUILTIN_SET_THREAD_POINTER,
6798 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6803 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6809 s390_init_builtins (void)
6813 ftype = build_function_type (ptr_type_node, void_list_node);
6814 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
6815 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6818 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6819 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
6820 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6824 /* Expand an expression EXP that calls a built-in function,
6825 with result going to TARGET if that's convenient
6826 (and in mode MODE if that's convenient).
6827 SUBTARGET may be used as the target for computing one of EXP's operands.
6828 IGNORE is nonzero if the value is to be ignored. */
6831 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6832 enum machine_mode mode ATTRIBUTE_UNUSED,
6833 int ignore ATTRIBUTE_UNUSED)
6837 unsigned int const *code_for_builtin =
6838 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6840 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6841 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6842 tree arglist = TREE_OPERAND (exp, 1);
6843 enum insn_code icode;
6844 rtx op[MAX_ARGS], pat;
6848 if (fcode >= S390_BUILTIN_max)
6849 internal_error ("bad builtin fcode");
6850 icode = code_for_builtin[fcode];
6852 internal_error ("bad builtin fcode");
6854 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6856 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6858 arglist = TREE_CHAIN (arglist), arity++)
6860 const struct insn_operand_data *insn_op;
6862 tree arg = TREE_VALUE (arglist);
6863 if (arg == error_mark_node)
6865 if (arity > MAX_ARGS)
6868 insn_op = &insn_data[icode].operand[arity + nonvoid];
6870 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6872 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6873 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6878 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6880 || GET_MODE (target) != tmode
6881 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6882 target = gen_reg_rtx (tmode);
6888 pat = GEN_FCN (icode) (target);
6892 pat = GEN_FCN (icode) (target, op[0]);
6894 pat = GEN_FCN (icode) (op[0]);
6897 pat = GEN_FCN (icode) (target, op[0], op[1]);
6913 /* Output assembly code for the trampoline template to
6916 On S/390, we use gpr 1 internally in the trampoline code;
6917 gpr 0 is used to hold the static chain. */
6920 s390_trampoline_template (FILE *file)
6924 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6925 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6926 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6927 fprintf (file, "br\t%s\n", reg_names[1]);
6928 fprintf (file, "0:\t.quad\t0\n");
6929 fprintf (file, ".quad\t0\n");
6933 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6934 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6935 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6936 fprintf (file, "br\t%s\n", reg_names[1]);
6937 fprintf (file, ".long\t0\n");
6938 fprintf (file, ".long\t0\n");
6942 /* Emit RTL insns to initialize the variable parts of a trampoline.
6943 FNADDR is an RTX for the address of the function's pure code.
6944 CXT is an RTX for the static chain value for the function. */
6947 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
6949 emit_move_insn (gen_rtx_MEM (Pmode,
6950 memory_address (Pmode,
6951 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6952 emit_move_insn (gen_rtx_MEM (Pmode,
6953 memory_address (Pmode,
6954 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6957 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6958 LOW and HIGH, independent of the host word size. */
6961 s390_gen_rtx_const_DI (int high, int low)
6963 #if HOST_BITS_PER_WIDE_INT >= 64
6965 val = (HOST_WIDE_INT)high;
6967 val |= (HOST_WIDE_INT)low;
6969 return GEN_INT (val);
6971 #if HOST_BITS_PER_WIDE_INT >= 32
6972 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6979 /* Output assembler code to FILE to increment profiler label # LABELNO
6980 for profiling a function entry. */
6983 s390_function_profiler (FILE *file, int labelno)
6988 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6990 fprintf (file, "# function profiler \n");
6992 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6993 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6994 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6996 op[2] = gen_rtx_REG (Pmode, 1);
6997 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6998 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7000 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7003 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7004 op[4] = gen_rtx_CONST (Pmode, op[4]);
7009 output_asm_insn ("stg\t%0,%1", op);
7010 output_asm_insn ("larl\t%2,%3", op);
7011 output_asm_insn ("brasl\t%0,%4", op);
7012 output_asm_insn ("lg\t%0,%1", op);
7016 op[6] = gen_label_rtx ();
7018 output_asm_insn ("st\t%0,%1", op);
7019 output_asm_insn ("bras\t%2,%l6", op);
7020 output_asm_insn (".long\t%4", op);
7021 output_asm_insn (".long\t%3", op);
7022 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7023 output_asm_insn ("l\t%0,0(%2)", op);
7024 output_asm_insn ("l\t%2,4(%2)", op);
7025 output_asm_insn ("basr\t%0,%0", op);
7026 output_asm_insn ("l\t%0,%1", op);
7030 op[5] = gen_label_rtx ();
7031 op[6] = gen_label_rtx ();
7033 output_asm_insn ("st\t%0,%1", op);
7034 output_asm_insn ("bras\t%2,%l6", op);
7035 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7036 output_asm_insn (".long\t%4-%l5", op);
7037 output_asm_insn (".long\t%3-%l5", op);
7038 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7039 output_asm_insn ("lr\t%0,%2", op);
7040 output_asm_insn ("a\t%0,0(%2)", op);
7041 output_asm_insn ("a\t%2,4(%2)", op);
7042 output_asm_insn ("basr\t%0,%0", op);
7043 output_asm_insn ("l\t%0,%1", op);
7047 /* Select section for constant in constant pool. In 32-bit mode,
7048 constants go in the function section; in 64-bit mode in .rodata. */
7051 s390_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED,
7052 rtx x ATTRIBUTE_UNUSED,
7053 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7055 if (TARGET_CPU_ZARCH)
7056 readonly_data_section ();
7058 function_section (current_function_decl);
7061 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7062 into its SYMBOL_REF_FLAGS. */
7065 s390_encode_section_info (tree decl, rtx rtl, int first)
7067 default_encode_section_info (decl, rtl, first);
7069 /* If a variable has a forced alignment to < 2 bytes, mark it with
7070 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7071 if (TREE_CODE (decl) == VAR_DECL
7072 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
7073 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
7076 /* Output thunk to FILE that implements a C++ virtual function call (with
7077 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7078 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7079 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7080 relative to the resulting this pointer. */
7083 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
7084 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
7090 /* Operand 0 is the target function. */
7091 op[0] = XEXP (DECL_RTL (function), 0);
7092 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
7095 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
7096 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
7097 op[0] = gen_rtx_CONST (Pmode, op[0]);
7100 /* Operand 1 is the 'this' pointer. */
7101 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
7102 op[1] = gen_rtx_REG (Pmode, 3);
7104 op[1] = gen_rtx_REG (Pmode, 2);
7106 /* Operand 2 is the delta. */
7107 op[2] = GEN_INT (delta);
7109 /* Operand 3 is the vcall_offset. */
7110 op[3] = GEN_INT (vcall_offset);
7112 /* Operand 4 is the temporary register. */
7113 op[4] = gen_rtx_REG (Pmode, 1);
7115 /* Operands 5 to 8 can be used as labels. */
7121 /* Operand 9 can be used for temporary register. */
7124 /* Generate code. */
7127 /* Setup literal pool pointer if required. */
7128 if ((!DISP_IN_RANGE (delta)
7129 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
7130 || (!DISP_IN_RANGE (vcall_offset)
7131 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
7133 op[5] = gen_label_rtx ();
7134 output_asm_insn ("larl\t%4,%5", op);
7137 /* Add DELTA to this pointer. */
7140 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
7141 output_asm_insn ("la\t%1,%2(%1)", op);
7142 else if (DISP_IN_RANGE (delta))
7143 output_asm_insn ("lay\t%1,%2(%1)", op);
7144 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
7145 output_asm_insn ("aghi\t%1,%2", op);
7148 op[6] = gen_label_rtx ();
7149 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
7153 /* Perform vcall adjustment. */
7156 if (DISP_IN_RANGE (vcall_offset))
7158 output_asm_insn ("lg\t%4,0(%1)", op);
7159 output_asm_insn ("ag\t%1,%3(%4)", op);
7161 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
7163 output_asm_insn ("lghi\t%4,%3", op);
7164 output_asm_insn ("ag\t%4,0(%1)", op);
7165 output_asm_insn ("ag\t%1,0(%4)", op);
7169 op[7] = gen_label_rtx ();
7170 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
7171 output_asm_insn ("ag\t%4,0(%1)", op);
7172 output_asm_insn ("ag\t%1,0(%4)", op);
7176 /* Jump to target. */
7177 output_asm_insn ("jg\t%0", op);
7179 /* Output literal pool if required. */
7182 output_asm_insn (".align\t4", op);
7183 targetm.asm_out.internal_label (file, "L",
7184 CODE_LABEL_NUMBER (op[5]));
7188 targetm.asm_out.internal_label (file, "L",
7189 CODE_LABEL_NUMBER (op[6]));
7190 output_asm_insn (".long\t%2", op);
7194 targetm.asm_out.internal_label (file, "L",
7195 CODE_LABEL_NUMBER (op[7]));
7196 output_asm_insn (".long\t%3", op);
7201 /* Setup base pointer if required. */
7203 || (!DISP_IN_RANGE (delta)
7204 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
7205 || (!DISP_IN_RANGE (delta)
7206 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
7208 op[5] = gen_label_rtx ();
7209 output_asm_insn ("basr\t%4,0", op);
7210 targetm.asm_out.internal_label (file, "L",
7211 CODE_LABEL_NUMBER (op[5]));
7214 /* Add DELTA to this pointer. */
7217 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
7218 output_asm_insn ("la\t%1,%2(%1)", op);
7219 else if (DISP_IN_RANGE (delta))
7220 output_asm_insn ("lay\t%1,%2(%1)", op);
7221 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
7222 output_asm_insn ("ahi\t%1,%2", op);
7225 op[6] = gen_label_rtx ();
7226 output_asm_insn ("a\t%1,%6-%5(%4)", op);
7230 /* Perform vcall adjustment. */
7233 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
7235 output_asm_insn ("lg\t%4,0(%1)", op);
7236 output_asm_insn ("a\t%1,%3(%4)", op);
7238 else if (DISP_IN_RANGE (vcall_offset))
7240 output_asm_insn ("lg\t%4,0(%1)", op);
7241 output_asm_insn ("ay\t%1,%3(%4)", op);
7243 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
7245 output_asm_insn ("lhi\t%4,%3", op);
7246 output_asm_insn ("a\t%4,0(%1)", op);
7247 output_asm_insn ("a\t%1,0(%4)", op);
7251 op[7] = gen_label_rtx ();
7252 output_asm_insn ("l\t%4,%7-%5(%4)", op);
7253 output_asm_insn ("a\t%4,0(%1)", op);
7254 output_asm_insn ("a\t%1,0(%4)", op);
7257 /* We had to clobber the base pointer register.
7258 Re-setup the base pointer (with a different base). */
7259 op[5] = gen_label_rtx ();
7260 output_asm_insn ("basr\t%4,0", op);
7261 targetm.asm_out.internal_label (file, "L",
7262 CODE_LABEL_NUMBER (op[5]));
7265 /* Jump to target. */
7266 op[8] = gen_label_rtx ();
7269 output_asm_insn ("l\t%4,%8-%5(%4)", op);
7271 output_asm_insn ("a\t%4,%8-%5(%4)", op);
7272 /* We cannot call through .plt, since .plt requires %r12 loaded. */
7273 else if (flag_pic == 1)
7275 output_asm_insn ("a\t%4,%8-%5(%4)", op);
7276 output_asm_insn ("l\t%4,%0(%4)", op);
7278 else if (flag_pic == 2)
7280 op[9] = gen_rtx_REG (Pmode, 0);
7281 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
7282 output_asm_insn ("a\t%4,%8-%5(%4)", op);
7283 output_asm_insn ("ar\t%4,%9", op);
7284 output_asm_insn ("l\t%4,0(%4)", op);
7287 output_asm_insn ("br\t%4", op);
7289 /* Output literal pool. */
7290 output_asm_insn (".align\t4", op);
7292 if (nonlocal && flag_pic == 2)
7293 output_asm_insn (".long\t%0", op);
7296 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7297 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
7300 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
7302 output_asm_insn (".long\t%0", op);
7304 output_asm_insn (".long\t%0-%5", op);
7308 targetm.asm_out.internal_label (file, "L",
7309 CODE_LABEL_NUMBER (op[6]));
7310 output_asm_insn (".long\t%2", op);
7314 targetm.asm_out.internal_label (file, "L",
7315 CODE_LABEL_NUMBER (op[7]));
7316 output_asm_insn (".long\t%3", op);
7322 s390_valid_pointer_mode (enum machine_mode mode)
7324 return (mode == SImode || (TARGET_64BIT && mode == DImode));
7327 /* How to allocate a 'struct machine_function'. */
7329 static struct machine_function *
7330 s390_init_machine_status (void)
7332 return ggc_alloc_cleared (sizeof (struct machine_function));
7335 /* Checks whether the given ARGUMENT_LIST would use a caller
7336 saved register. This is used to decide whether sibling call
7337 optimization could be performed on the respective function
7341 s390_call_saved_register_used (tree argument_list)
7343 CUMULATIVE_ARGS cum;
7345 enum machine_mode mode;
7350 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
7352 while (argument_list)
7354 parameter = TREE_VALUE (argument_list);
7355 argument_list = TREE_CHAIN (argument_list);
7360 /* For an undeclared variable passed as parameter we will get
7361 an ERROR_MARK node here. */
7362 if (TREE_CODE (parameter) == ERROR_MARK)
7365 if (! (type = TREE_TYPE (parameter)))
7368 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
7371 if (pass_by_reference (&cum, mode, type, true))
7374 type = build_pointer_type (type);
7377 parm_rtx = s390_function_arg (&cum, mode, type, 0);
7379 s390_function_arg_advance (&cum, mode, type, 0);
7381 if (parm_rtx && REG_P (parm_rtx))
7384 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
7386 if (! call_used_regs[reg + REGNO (parm_rtx)])
7393 /* Return true if the given call expression can be
7394 turned into a sibling call.
7395 DECL holds the declaration of the function to be called whereas
7396 EXP is the call expression itself. */
7399 s390_function_ok_for_sibcall (tree decl, tree exp)
7401 /* The TPF epilogue uses register 1. */
7402 if (TARGET_TPF_PROFILING)
7405 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
7406 which would have to be restored before the sibcall. */
7407 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
7410 /* Register 6 on s390 is available as an argument register but unfortunately
7411 "caller saved". This makes functions needing this register for arguments
7412 not suitable for sibcalls. */
7413 if (TREE_OPERAND (exp, 1)
7414 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
7420 /* This function is used by the call expanders of the machine description.
7421 It emits the call insn itself together with the necessary operations
7422 to adjust the target address and returns the emitted insn.
7423 ADDR_LOCATION is the target address rtx
7424 TLS_CALL the location of the thread-local symbol
7425 RESULT_REG the register where the result of the call should be stored
7426 RETADDR_REG the register where the return address should be stored
7427 If this parameter is NULL_RTX the call is considered
7428 to be a sibling call. */
7431 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
7434 bool plt_call = false;
7440 /* Direct function calls need special treatment. */
7441 if (GET_CODE (addr_location) == SYMBOL_REF)
7443 /* When calling a global routine in PIC mode, we must
7444 replace the symbol itself with the PLT stub. */
7445 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
7447 addr_location = gen_rtx_UNSPEC (Pmode,
7448 gen_rtvec (1, addr_location),
7450 addr_location = gen_rtx_CONST (Pmode, addr_location);
7454 /* Unless we can use the bras(l) insn, force the
7455 routine address into a register. */
7456 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
7459 addr_location = legitimize_pic_address (addr_location, 0);
7461 addr_location = force_reg (Pmode, addr_location);
7465 /* If it is already an indirect call or the code above moved the
7466 SYMBOL_REF to somewhere else make sure the address can be found in
7468 if (retaddr_reg == NULL_RTX
7469 && GET_CODE (addr_location) != SYMBOL_REF
7472 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
7473 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
7476 addr_location = gen_rtx_MEM (QImode, addr_location);
7477 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
7479 if (result_reg != NULL_RTX)
7480 call = gen_rtx_SET (VOIDmode, result_reg, call);
7482 if (retaddr_reg != NULL_RTX)
7484 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
7486 if (tls_call != NULL_RTX)
7487 vec = gen_rtvec (3, call, clobber,
7488 gen_rtx_USE (VOIDmode, tls_call));
7490 vec = gen_rtvec (2, call, clobber);
7492 call = gen_rtx_PARALLEL (VOIDmode, vec);
7495 insn = emit_call_insn (call);
7497 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
7498 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
7500 /* s390_function_ok_for_sibcall should
7501 have denied sibcalls in this case. */
7502 if (retaddr_reg == NULL_RTX)
7505 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
7510 /* Implement CONDITIONAL_REGISTER_USAGE. */
7513 s390_conditional_register_usage (void)
7519 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
7520 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
7522 if (TARGET_CPU_ZARCH)
7524 fixed_regs[RETURN_REGNUM] = 0;
7525 call_used_regs[RETURN_REGNUM] = 0;
7529 for (i = 24; i < 32; i++)
7530 call_used_regs[i] = call_really_used_regs[i] = 0;
7534 for (i = 18; i < 20; i++)
7535 call_used_regs[i] = call_really_used_regs[i] = 0;
7540 #include "gt-s390.h"