1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx, unsigned int, int);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static bool s390_return_in_memory (tree, tree);
64 static void s390_init_builtins (void);
65 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
66 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
68 static enum attr_type s390_safe_attr_type (rtx);
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_first_cycle_multipass_dfa_lookahead (void);
73 static bool s390_cannot_copy_insn_p (rtx);
74 static bool s390_rtx_costs (rtx, int, int, int *);
75 static int s390_address_cost (rtx);
76 static void s390_reorg (void);
77 static bool s390_valid_pointer_mode (enum machine_mode);
78 static tree s390_build_builtin_va_list (void);
79 static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
80 static bool s390_function_ok_for_sibcall (tree, tree);
81 static bool s390_call_saved_register_used (tree);
82 static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
84 static bool s390_fixed_condition_code_regs (unsigned int *, unsigned int *);
86 #undef TARGET_ASM_ALIGNED_HI_OP
87 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
88 #undef TARGET_ASM_ALIGNED_DI_OP
89 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
90 #undef TARGET_ASM_INTEGER
91 #define TARGET_ASM_INTEGER s390_assemble_integer
93 #undef TARGET_ASM_OPEN_PAREN
94 #define TARGET_ASM_OPEN_PAREN ""
96 #undef TARGET_ASM_CLOSE_PAREN
97 #define TARGET_ASM_CLOSE_PAREN ""
99 #undef TARGET_ENCODE_SECTION_INFO
100 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
103 #undef TARGET_HAVE_TLS
104 #define TARGET_HAVE_TLS true
106 #undef TARGET_CANNOT_FORCE_CONST_MEM
107 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
109 #undef TARGET_DELEGITIMIZE_ADDRESS
110 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
112 #undef TARGET_RETURN_IN_MEMORY
113 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
115 #undef TARGET_INIT_BUILTINS
116 #define TARGET_INIT_BUILTINS s390_init_builtins
117 #undef TARGET_EXPAND_BUILTIN
118 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
120 #undef TARGET_ASM_OUTPUT_MI_THUNK
121 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
122 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
123 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
125 #undef TARGET_SCHED_ADJUST_PRIORITY
126 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
127 #undef TARGET_SCHED_ISSUE_RATE
128 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
129 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
130 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
132 #undef TARGET_CANNOT_COPY_INSN_P
133 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
134 #undef TARGET_RTX_COSTS
135 #define TARGET_RTX_COSTS s390_rtx_costs
136 #undef TARGET_ADDRESS_COST
137 #define TARGET_ADDRESS_COST s390_address_cost
139 #undef TARGET_MACHINE_DEPENDENT_REORG
140 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
142 #undef TARGET_VALID_POINTER_MODE
143 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
145 #undef TARGET_BUILD_BUILTIN_VA_LIST
146 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
147 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
148 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
150 #undef TARGET_PROMOTE_FUNCTION_ARGS
151 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
152 #undef TARGET_PROMOTE_FUNCTION_RETURN
153 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
154 #undef TARGET_PASS_BY_REFERENCE
155 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
157 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
158 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
160 #undef TARGET_FIXED_CONDITION_CODE_REGS
161 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
163 struct gcc_target targetm = TARGET_INITIALIZER;
165 extern int reload_completed;
167 /* The alias set for prologue/epilogue register save/restore. */
168 static int s390_sr_alias_set = 0;
170 /* Save information from a "cmpxx" operation until the branch or scc is
172 rtx s390_compare_op0, s390_compare_op1;
174 /* Structure used to hold the components of a S/390 memory
175 address. A legitimate address on S/390 is of the general
177 base + index + displacement
178 where any of the components is optional.
180 base and index are registers of the class ADDR_REGS,
181 displacement is an unsigned 12-bit immediate constant. */
191 /* Which cpu are we tuning for. */
192 enum processor_type s390_tune;
193 enum processor_flags s390_tune_flags;
194 /* Which instruction set architecture to use. */
195 enum processor_type s390_arch;
196 enum processor_flags s390_arch_flags;
198 /* Strings to hold which cpu and instruction set architecture to use. */
199 const char *s390_tune_string; /* for -mtune=<xxx> */
200 const char *s390_arch_string; /* for -march=<xxx> */
202 /* String to specify backchain mode:
203 "" no-backchain, "1" backchain, "2" kernel-backchain. */
204 const char *s390_backchain_string = TARGET_DEFAULT_BACKCHAIN;
206 const char *s390_warn_framesize_string;
207 const char *s390_warn_dynamicstack_string;
208 const char *s390_stack_size_string;
209 const char *s390_stack_guard_string;
211 HOST_WIDE_INT s390_warn_framesize = 0;
212 bool s390_warn_dynamicstack_p = 0;
213 HOST_WIDE_INT s390_stack_size = 0;
214 HOST_WIDE_INT s390_stack_guard = 0;
216 /* The following structure is embedded in the machine
217 specific part of struct function. */
219 struct s390_frame_layout GTY (())
221 /* Offset within stack frame. */
222 HOST_WIDE_INT gprs_offset;
223 HOST_WIDE_INT f0_offset;
224 HOST_WIDE_INT f4_offset;
225 HOST_WIDE_INT f8_offset;
226 HOST_WIDE_INT backchain_offset;
228 /* Number of first and last gpr to be saved, restored. */
230 int first_restore_gpr;
232 int last_restore_gpr;
234 /* Bits standing for floating point registers. Set, if the
235 respective register has to be saved. Starting with reg 16 (f0)
236 at the rightmost bit.
237 Bit 15 - 8 7 6 5 4 3 2 1 0
238 fpr 15 - 8 7 5 3 1 6 4 2 0
239 reg 31 - 24 23 22 21 20 19 18 17 16 */
240 unsigned int fpr_bitmap;
242 /* Number of floating point registers f8-f15 which must be saved. */
245 /* Set if return address needs to be saved. */
246 bool save_return_addr_p;
248 /* Set if backchain needs to be saved. */
249 bool save_backchain_p;
251 /* Size of stack frame. */
252 HOST_WIDE_INT frame_size;
255 /* Define the structure for the machine field in struct function. */
257 struct machine_function GTY(())
259 struct s390_frame_layout frame_layout;
261 /* Literal pool base register. */
264 /* True if we may need to perform branch splitting. */
265 bool split_branches_pending_p;
267 /* Some local-dynamic TLS symbol name. */
268 const char *some_ld_name;
271 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
273 #define cfun_frame_layout (cfun->machine->frame_layout)
274 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
275 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
276 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
277 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
279 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
282 static int s390_match_ccmode_set (rtx, enum machine_mode);
283 static int s390_branch_condition_mask (rtx);
284 static const char *s390_branch_condition_mnemonic (rtx, int);
285 static int check_mode (rtx, enum machine_mode *);
286 static int s390_short_displacement (rtx);
287 static int s390_decompose_address (rtx, struct s390_address *);
288 static rtx get_thread_pointer (void);
289 static rtx legitimize_tls_address (rtx, rtx);
290 static void print_shift_count_operand (FILE *, rtx);
291 static const char *get_some_local_dynamic_name (void);
292 static int get_some_local_dynamic_name_1 (rtx *, void *);
293 static int reg_used_in_mem_p (int, rtx);
294 static int addr_generation_dependency_p (rtx, rtx);
295 static int s390_split_branches (void);
296 static void annotate_constant_pool_refs (rtx *x);
297 static void find_constant_pool_ref (rtx, rtx *);
298 static void replace_constant_pool_ref (rtx *, rtx, rtx);
299 static rtx find_ltrel_base (rtx);
300 static void replace_ltrel_base (rtx *);
301 static void s390_optimize_prologue (void);
302 static int find_unused_clobbered_reg (void);
303 static void s390_frame_area (int *, int *);
304 static void s390_register_info (int []);
305 static void s390_frame_info (void);
306 static void s390_init_frame_layout (void);
307 static void s390_update_frame_layout (void);
308 static rtx save_fpr (rtx, int, int);
309 static rtx restore_fpr (rtx, int, int);
310 static rtx save_gprs (rtx, int, int, int);
311 static rtx restore_gprs (rtx, int, int, int);
312 static int s390_function_arg_size (enum machine_mode, tree);
313 static bool s390_function_arg_float (enum machine_mode, tree);
314 static struct machine_function * s390_init_machine_status (void);
316 /* Check whether integer displacement is in range. */
317 #define DISP_IN_RANGE(d) \
318 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
319 : ((d) >= 0 && (d) <= 4095))
321 /* Return true if SET either doesn't set the CC register, or else
322 the source and destination have matching CC modes and that
323 CC mode is at least as constrained as REQ_MODE. */
326 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
328 enum machine_mode set_mode;
330 if (GET_CODE (set) != SET)
333 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
336 set_mode = GET_MODE (SET_DEST (set));
350 if (req_mode != set_mode)
355 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
356 && req_mode != CCSRmode && req_mode != CCURmode)
362 if (req_mode != CCAmode)
370 return (GET_MODE (SET_SRC (set)) == set_mode);
373 /* Return true if every SET in INSN that sets the CC register
374 has source and destination with matching CC modes and that
375 CC mode is at least as constrained as REQ_MODE.
376 If REQ_MODE is VOIDmode, always return false. */
379 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
383 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
384 if (req_mode == VOIDmode)
387 if (GET_CODE (PATTERN (insn)) == SET)
388 return s390_match_ccmode_set (PATTERN (insn), req_mode);
390 if (GET_CODE (PATTERN (insn)) == PARALLEL)
391 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
393 rtx set = XVECEXP (PATTERN (insn), 0, i);
394 if (GET_CODE (set) == SET)
395 if (!s390_match_ccmode_set (set, req_mode))
402 /* If a test-under-mask instruction can be used to implement
403 (compare (and ... OP1) OP2), return the CC mode required
404 to do that. Otherwise, return VOIDmode.
405 MIXED is true if the instruction can distinguish between
406 CC1 and CC2 for mixed selected bits (TMxx), it is false
407 if the instruction cannot (TM). */
410 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
414 /* ??? Fixme: should work on CONST_DOUBLE as well. */
415 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
418 /* Selected bits all zero: CC0. */
419 if (INTVAL (op2) == 0)
422 /* Selected bits all one: CC3. */
423 if (INTVAL (op2) == INTVAL (op1))
426 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
429 bit1 = exact_log2 (INTVAL (op2));
430 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
431 if (bit0 != -1 && bit1 != -1)
432 return bit0 > bit1 ? CCT1mode : CCT2mode;
438 /* Given a comparison code OP (EQ, NE, etc.) and the operands
439 OP0 and OP1 of a COMPARE, return the mode to be used for the
443 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
449 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
450 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
452 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
453 || GET_CODE (op1) == NEG)
454 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
457 if (GET_CODE (op0) == AND)
459 /* Check whether we can potentially do it via TM. */
460 enum machine_mode ccmode;
461 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
462 if (ccmode != VOIDmode)
464 /* Relax CCTmode to CCZmode to allow fall-back to AND
465 if that turns out to be beneficial. */
466 return ccmode == CCTmode ? CCZmode : ccmode;
470 if (register_operand (op0, HImode)
471 && GET_CODE (op1) == CONST_INT
472 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
474 if (register_operand (op0, QImode)
475 && GET_CODE (op1) == CONST_INT
476 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
485 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
486 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
488 if (INTVAL (XEXP((op0), 1)) < 0)
501 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
502 && GET_CODE (op1) != CONST_INT)
508 if (GET_CODE (op0) == PLUS
509 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
512 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
513 && GET_CODE (op1) != CONST_INT)
519 if (GET_CODE (op0) == MINUS
520 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
523 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
524 && GET_CODE (op1) != CONST_INT)
533 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
534 that we can implement more efficiently. */
537 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
539 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
540 if ((*code == EQ || *code == NE)
541 && *op1 == const0_rtx
542 && GET_CODE (*op0) == ZERO_EXTRACT
543 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
544 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
545 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
547 rtx inner = XEXP (*op0, 0);
548 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
549 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
550 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
552 if (len > 0 && len < modesize
553 && pos >= 0 && pos + len <= modesize
554 && modesize <= HOST_BITS_PER_WIDE_INT)
556 unsigned HOST_WIDE_INT block;
557 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
558 block <<= modesize - pos - len;
560 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
561 gen_int_mode (block, GET_MODE (inner)));
565 /* Narrow AND of memory against immediate to enable TM. */
566 if ((*code == EQ || *code == NE)
567 && *op1 == const0_rtx
568 && GET_CODE (*op0) == AND
569 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
570 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
572 rtx inner = XEXP (*op0, 0);
573 rtx mask = XEXP (*op0, 1);
575 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
576 if (GET_CODE (inner) == SUBREG
577 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
578 && (GET_MODE_SIZE (GET_MODE (inner))
579 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
581 & GET_MODE_MASK (GET_MODE (inner))
582 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
584 inner = SUBREG_REG (inner);
586 /* Do not change volatile MEMs. */
587 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
589 int part = s390_single_part (XEXP (*op0, 1),
590 GET_MODE (inner), QImode, 0);
593 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
594 inner = adjust_address_nv (inner, QImode, part);
595 *op0 = gen_rtx_AND (QImode, inner, mask);
600 /* Narrow comparisons against 0xffff to HImode if possible. */
601 if ((*code == EQ || *code == NE)
602 && GET_CODE (*op1) == CONST_INT
603 && INTVAL (*op1) == 0xffff
604 && SCALAR_INT_MODE_P (GET_MODE (*op0))
605 && (nonzero_bits (*op0, GET_MODE (*op0))
606 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
608 *op0 = gen_lowpart (HImode, *op0);
613 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
614 if (GET_CODE (*op0) == UNSPEC
615 && XINT (*op0, 1) == UNSPEC_CMPINT
616 && XVECLEN (*op0, 0) == 1
617 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
618 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
619 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
620 && *op1 == const0_rtx)
622 enum rtx_code new_code = UNKNOWN;
625 case EQ: new_code = EQ; break;
626 case NE: new_code = NE; break;
627 case LT: new_code = LTU; break;
628 case GT: new_code = GTU; break;
629 case LE: new_code = LEU; break;
630 case GE: new_code = GEU; break;
634 if (new_code != UNKNOWN)
636 *op0 = XVECEXP (*op0, 0, 0);
642 /* Emit a compare instruction suitable to implement the comparison
643 OP0 CODE OP1. Return the correct condition RTL to be placed in
644 the IF_THEN_ELSE of the conditional branch testing the result. */
647 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
649 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
650 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
652 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
653 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
656 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
657 unconditional jump, else a conditional jump under condition COND. */
660 s390_emit_jump (rtx target, rtx cond)
664 target = gen_rtx_LABEL_REF (VOIDmode, target);
666 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
668 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
669 emit_jump_insn (insn);
672 /* Return nonzero if OP is a valid comparison operator
673 for a branch condition in mode MODE. */
676 s390_comparison (rtx op, enum machine_mode mode)
678 if (mode != VOIDmode && mode != GET_MODE (op))
681 if (!COMPARISON_P (op))
684 if (GET_CODE (XEXP (op, 0)) != REG
685 || REGNO (XEXP (op, 0)) != CC_REGNUM
686 || XEXP (op, 1) != const0_rtx)
689 return s390_branch_condition_mask (op) >= 0;
692 /* Return nonzero if OP is a valid comparison operator
693 for an ALC condition in mode MODE. */
696 s390_alc_comparison (rtx op, enum machine_mode mode)
698 if (mode != VOIDmode && mode != GET_MODE (op))
701 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
704 if (!COMPARISON_P (op))
707 if (GET_CODE (XEXP (op, 0)) != REG
708 || REGNO (XEXP (op, 0)) != CC_REGNUM
709 || XEXP (op, 1) != const0_rtx)
712 switch (GET_MODE (XEXP (op, 0)))
715 return GET_CODE (op) == LTU;
718 return GET_CODE (op) == LEU;
721 return GET_CODE (op) == GEU;
724 return GET_CODE (op) == GTU;
727 return GET_CODE (op) == LTU;
730 return GET_CODE (op) == UNGT;
733 return GET_CODE (op) == UNLT;
740 /* Return nonzero if OP is a valid comparison operator
741 for an SLB condition in mode MODE. */
744 s390_slb_comparison (rtx op, enum machine_mode mode)
746 if (mode != VOIDmode && mode != GET_MODE (op))
749 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
752 if (!COMPARISON_P (op))
755 if (GET_CODE (XEXP (op, 0)) != REG
756 || REGNO (XEXP (op, 0)) != CC_REGNUM
757 || XEXP (op, 1) != const0_rtx)
760 switch (GET_MODE (XEXP (op, 0)))
763 return GET_CODE (op) == GEU;
766 return GET_CODE (op) == GTU;
769 return GET_CODE (op) == LTU;
772 return GET_CODE (op) == LEU;
775 return GET_CODE (op) == GEU;
778 return GET_CODE (op) == LE;
781 return GET_CODE (op) == GE;
788 /* Return branch condition mask to implement a branch
789 specified by CODE. Return -1 for invalid comparisons. */
792 s390_branch_condition_mask (rtx code)
794 const int CC0 = 1 << 3;
795 const int CC1 = 1 << 2;
796 const int CC2 = 1 << 1;
797 const int CC3 = 1 << 0;
799 if (GET_CODE (XEXP (code, 0)) != REG
800 || REGNO (XEXP (code, 0)) != CC_REGNUM
801 || XEXP (code, 1) != const0_rtx)
804 switch (GET_MODE (XEXP (code, 0)))
807 switch (GET_CODE (code))
810 case NE: return CC1 | CC2 | CC3;
816 switch (GET_CODE (code))
819 case NE: return CC0 | CC2 | CC3;
825 switch (GET_CODE (code))
828 case NE: return CC0 | CC1 | CC3;
834 switch (GET_CODE (code))
837 case NE: return CC0 | CC1 | CC2;
843 switch (GET_CODE (code))
845 case EQ: return CC0 | CC2;
846 case NE: return CC1 | CC3;
852 switch (GET_CODE (code))
854 case LTU: return CC2 | CC3; /* carry */
855 case GEU: return CC0 | CC1; /* no carry */
861 switch (GET_CODE (code))
863 case GTU: return CC0 | CC1; /* borrow */
864 case LEU: return CC2 | CC3; /* no borrow */
870 switch (GET_CODE (code))
872 case EQ: return CC0 | CC2;
873 case NE: return CC1 | CC3;
874 case LTU: return CC1;
875 case GTU: return CC3;
876 case LEU: return CC1 | CC2;
877 case GEU: return CC2 | CC3;
882 switch (GET_CODE (code))
885 case NE: return CC1 | CC2 | CC3;
886 case LTU: return CC1;
887 case GTU: return CC2;
888 case LEU: return CC0 | CC1;
889 case GEU: return CC0 | CC2;
895 switch (GET_CODE (code))
898 case NE: return CC2 | CC1 | CC3;
899 case LTU: return CC2;
900 case GTU: return CC1;
901 case LEU: return CC0 | CC2;
902 case GEU: return CC0 | CC1;
908 switch (GET_CODE (code))
911 case NE: return CC1 | CC2 | CC3;
912 case LT: return CC1 | CC3;
914 case LE: return CC0 | CC1 | CC3;
915 case GE: return CC0 | CC2;
921 switch (GET_CODE (code))
924 case NE: return CC1 | CC2 | CC3;
926 case GT: return CC2 | CC3;
927 case LE: return CC0 | CC1;
928 case GE: return CC0 | CC2 | CC3;
934 switch (GET_CODE (code))
937 case NE: return CC1 | CC2 | CC3;
940 case LE: return CC0 | CC1;
941 case GE: return CC0 | CC2;
942 case UNORDERED: return CC3;
943 case ORDERED: return CC0 | CC1 | CC2;
944 case UNEQ: return CC0 | CC3;
945 case UNLT: return CC1 | CC3;
946 case UNGT: return CC2 | CC3;
947 case UNLE: return CC0 | CC1 | CC3;
948 case UNGE: return CC0 | CC2 | CC3;
949 case LTGT: return CC1 | CC2;
955 switch (GET_CODE (code))
958 case NE: return CC2 | CC1 | CC3;
961 case LE: return CC0 | CC2;
962 case GE: return CC0 | CC1;
963 case UNORDERED: return CC3;
964 case ORDERED: return CC0 | CC2 | CC1;
965 case UNEQ: return CC0 | CC3;
966 case UNLT: return CC2 | CC3;
967 case UNGT: return CC1 | CC3;
968 case UNLE: return CC0 | CC2 | CC3;
969 case UNGE: return CC0 | CC1 | CC3;
970 case LTGT: return CC2 | CC1;
980 /* If INV is false, return assembler mnemonic string to implement
981 a branch specified by CODE. If INV is true, return mnemonic
982 for the corresponding inverted branch. */
985 s390_branch_condition_mnemonic (rtx code, int inv)
987 static const char *const mnemonic[16] =
989 NULL, "o", "h", "nle",
990 "l", "nhe", "lh", "ne",
991 "e", "nlh", "he", "nl",
992 "le", "nh", "no", NULL
995 int mask = s390_branch_condition_mask (code);
996 gcc_assert (mask >= 0);
1001 if (mask < 1 || mask > 14)
1004 return mnemonic[mask];
1007 /* Return the part of op which has a value different from def.
1008 The size of the part is determined by mode.
1009 Use this function only if you already know that op really
1010 contains such a part. */
1012 unsigned HOST_WIDE_INT
1013 s390_extract_part (rtx op, enum machine_mode mode, int def)
1015 unsigned HOST_WIDE_INT value = 0;
1016 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1017 int part_bits = GET_MODE_BITSIZE (mode);
1018 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
1021 for (i = 0; i < max_parts; i++)
1024 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1026 value >>= part_bits;
1028 if ((value & part_mask) != (def & part_mask))
1029 return value & part_mask;
1035 /* If OP is an integer constant of mode MODE with exactly one
1036 part of mode PART_MODE unequal to DEF, return the number of that
1037 part. Otherwise, return -1. */
1040 s390_single_part (rtx op,
1041 enum machine_mode mode,
1042 enum machine_mode part_mode,
1045 unsigned HOST_WIDE_INT value = 0;
1046 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1047 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
1050 if (GET_CODE (op) != CONST_INT)
1053 for (i = 0; i < n_parts; i++)
1056 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1058 value >>= GET_MODE_BITSIZE (part_mode);
1060 if ((value & part_mask) != (def & part_mask))
1068 return part == -1 ? -1 : n_parts - 1 - part;
1071 /* Check whether we can (and want to) split a double-word
1072 move in mode MODE from SRC to DST into two single-word
1073 moves, moving the subword FIRST_SUBWORD first. */
1076 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1078 /* Floating point registers cannot be split. */
1079 if (FP_REG_P (src) || FP_REG_P (dst))
1082 /* We don't need to split if operands are directly accessible. */
1083 if (s_operand (src, mode) || s_operand (dst, mode))
1086 /* Non-offsettable memory references cannot be split. */
1087 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1088 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1091 /* Moving the first subword must not clobber a register
1092 needed to move the second subword. */
1093 if (register_operand (dst, mode))
1095 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1096 if (reg_overlap_mentioned_p (subreg, src))
1103 /* Check whether the address of memory reference MEM2 equals exactly
1104 the address of memory reference MEM1 plus DELTA. Return true if
1105 we can prove this to be the case, false otherwise. */
1108 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1110 rtx addr1, addr2, addr_delta;
1112 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1115 addr1 = XEXP (mem1, 0);
1116 addr2 = XEXP (mem2, 0);
1118 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1119 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1125 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1128 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1131 enum machine_mode wmode = mode;
1132 rtx dst = operands[0];
1133 rtx src1 = operands[1];
1134 rtx src2 = operands[2];
1137 /* If we cannot handle the operation directly, use a temp register. */
1138 if (!s390_logical_operator_ok_p (operands))
1139 dst = gen_reg_rtx (mode);
1141 /* QImode and HImode patterns make sense only if we have a destination
1142 in memory. Otherwise perform the operation in SImode. */
1143 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1146 /* Widen operands if required. */
1149 if (GET_CODE (dst) == SUBREG
1150 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1152 else if (REG_P (dst))
1153 dst = gen_rtx_SUBREG (wmode, dst, 0);
1155 dst = gen_reg_rtx (wmode);
1157 if (GET_CODE (src1) == SUBREG
1158 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1160 else if (GET_MODE (src1) != VOIDmode)
1161 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1163 if (GET_CODE (src2) == SUBREG
1164 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1166 else if (GET_MODE (src2) != VOIDmode)
1167 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1170 /* Emit the instruction. */
1171 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1172 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1173 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1175 /* Fix up the destination if needed. */
1176 if (dst != operands[0])
1177 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1180 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1183 s390_logical_operator_ok_p (rtx *operands)
1185 /* If the destination operand is in memory, it needs to coincide
1186 with one of the source operands. After reload, it has to be
1187 the first source operand. */
1188 if (GET_CODE (operands[0]) == MEM)
1189 return rtx_equal_p (operands[0], operands[1])
1190 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1195 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1196 operand IMMOP to switch from SS to SI type instructions. */
1199 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1201 int def = code == AND ? -1 : 0;
1205 gcc_assert (GET_CODE (*memop) == MEM);
1206 gcc_assert (!MEM_VOLATILE_P (*memop));
1208 mask = s390_extract_part (*immop, QImode, def);
1209 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1210 gcc_assert (part >= 0);
1212 *memop = adjust_address (*memop, QImode, part);
1213 *immop = gen_int_mode (mask, QImode);
1217 /* Change optimizations to be performed, depending on the
1220 LEVEL is the optimization level specified; 2 if `-O2' is
1221 specified, 1 if `-O' is specified, and 0 if neither is specified.
1223 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1226 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1228 /* ??? There are apparently still problems with -fcaller-saves. */
1229 flag_caller_saves = 0;
1231 /* By default, always emit DWARF-2 unwind info. This allows debugging
1232 without maintaining a stack frame back-chain. */
1233 flag_asynchronous_unwind_tables = 1;
1237 override_options (void)
1242 const char *const name; /* processor name or nickname. */
1243 const enum processor_type processor;
1244 const enum processor_flags flags;
1246 const processor_alias_table[] =
1248 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1249 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1250 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1251 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1252 | PF_LONG_DISPLACEMENT},
1255 int const pta_size = ARRAY_SIZE (processor_alias_table);
1257 /* Acquire a unique set number for our register saves and restores. */
1258 s390_sr_alias_set = new_alias_set ();
1260 /* Set up function hooks. */
1261 init_machine_status = s390_init_machine_status;
1263 /* Architecture mode defaults according to ABI. */
1264 if (!(target_flags_explicit & MASK_ZARCH))
1267 target_flags |= MASK_ZARCH;
1269 target_flags &= ~MASK_ZARCH;
1272 /* Determine processor architectural level. */
1273 if (!s390_arch_string)
1274 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1276 for (i = 0; i < pta_size; i++)
1277 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
1279 s390_arch = processor_alias_table[i].processor;
1280 s390_arch_flags = processor_alias_table[i].flags;
1284 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1286 /* Determine processor to tune for. */
1287 if (!s390_tune_string)
1289 s390_tune = s390_arch;
1290 s390_tune_flags = s390_arch_flags;
1291 s390_tune_string = s390_arch_string;
1295 for (i = 0; i < pta_size; i++)
1296 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1298 s390_tune = processor_alias_table[i].processor;
1299 s390_tune_flags = processor_alias_table[i].flags;
1303 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1306 /* Sanity checks. */
1307 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1308 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1309 if (TARGET_64BIT && !TARGET_ZARCH)
1310 error ("64-bit ABI not supported in ESA/390 mode.");
1312 if (s390_warn_framesize_string)
1314 if (sscanf (s390_warn_framesize_string, HOST_WIDE_INT_PRINT_DEC,
1315 &s390_warn_framesize) != 1)
1316 error ("invalid value for -mwarn-framesize");
1319 if (s390_warn_dynamicstack_string)
1320 s390_warn_dynamicstack_p = 1;
1322 if (s390_stack_size_string)
1324 if (sscanf (s390_stack_size_string, HOST_WIDE_INT_PRINT_DEC,
1325 &s390_stack_size) != 1)
1326 error ("invalid value for -mstack-size");
1328 if (exact_log2 (s390_stack_size) == -1)
1329 error ("stack size must be an exact power of 2");
1331 if (s390_stack_guard_string)
1333 if (sscanf (s390_stack_guard_string, HOST_WIDE_INT_PRINT_DEC,
1334 &s390_stack_guard) != 1)
1335 error ("invalid value for -mstack-guard");
1337 if (s390_stack_guard >= s390_stack_size)
1338 error ("stack size must be greater than the stack guard value");
1340 if (exact_log2 (s390_stack_guard) == -1)
1341 error ("stack guard value must be an exact power of 2");
1344 error ("-mstack-size implies use of -mstack-guard");
1347 if (s390_stack_guard_string && !s390_stack_size_string)
1348 error ("-mstack-guard implies use of -mstack-size");
1351 /* Map for smallest class containing reg regno. */
1353 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1354 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1355 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1356 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1357 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1358 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1359 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1360 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1361 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1362 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1363 ACCESS_REGS, ACCESS_REGS
1366 /* Return attribute type of insn. */
1368 static enum attr_type
1369 s390_safe_attr_type (rtx insn)
1371 if (recog_memoized (insn) >= 0)
1372 return get_attr_type (insn);
1377 /* Return true if OP a (const_int 0) operand.
1378 OP is the current operation.
1379 MODE is the current operation mode. */
1382 const0_operand (register rtx op, enum machine_mode mode)
1384 return op == CONST0_RTX (mode);
1387 /* Return true if OP is constant.
1388 OP is the current operation.
1389 MODE is the current operation mode. */
1392 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1394 return CONSTANT_P (op);
1397 /* Return true if the mode of operand OP matches MODE.
1398 If MODE is set to VOIDmode, set it to the mode of OP. */
1401 check_mode (register rtx op, enum machine_mode *mode)
1403 if (*mode == VOIDmode)
1404 *mode = GET_MODE (op);
1407 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1413 /* Return true if OP a valid operand for the LARL instruction.
1414 OP is the current operation.
1415 MODE is the current operation mode. */
1418 larl_operand (register rtx op, enum machine_mode mode)
1420 if (! check_mode (op, &mode))
1423 /* Allow labels and local symbols. */
1424 if (GET_CODE (op) == LABEL_REF)
1426 if (GET_CODE (op) == SYMBOL_REF)
1427 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1428 && SYMBOL_REF_TLS_MODEL (op) == 0
1429 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1431 /* Everything else must have a CONST, so strip it. */
1432 if (GET_CODE (op) != CONST)
1436 /* Allow adding *even* in-range constants. */
1437 if (GET_CODE (op) == PLUS)
1439 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1440 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1442 #if HOST_BITS_PER_WIDE_INT > 32
1443 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1444 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1450 /* Labels and local symbols allowed here as well. */
1451 if (GET_CODE (op) == LABEL_REF)
1453 if (GET_CODE (op) == SYMBOL_REF)
1454 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1455 && SYMBOL_REF_TLS_MODEL (op) == 0
1456 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1458 /* Now we must have a @GOTENT offset or @PLT stub
1459 or an @INDNTPOFF TLS offset. */
1460 if (GET_CODE (op) == UNSPEC
1461 && XINT (op, 1) == UNSPEC_GOTENT)
1463 if (GET_CODE (op) == UNSPEC
1464 && XINT (op, 1) == UNSPEC_PLT)
1466 if (GET_CODE (op) == UNSPEC
1467 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1473 /* Return true if OP is a valid S-type operand.
1474 OP is the current operation.
1475 MODE is the current operation mode. */
1478 s_operand (rtx op, enum machine_mode mode)
1480 struct s390_address addr;
1482 /* Call general_operand first, so that we don't have to
1483 check for many special cases. */
1484 if (!general_operand (op, mode))
1487 /* Just like memory_operand, allow (subreg (mem ...))
1489 if (reload_completed
1490 && GET_CODE (op) == SUBREG
1491 && GET_CODE (SUBREG_REG (op)) == MEM)
1492 op = SUBREG_REG (op);
1494 if (GET_CODE (op) != MEM)
1496 if (!s390_decompose_address (XEXP (op, 0), &addr))
1504 /* Return true if OP is a memory operand pointing to the
1505 literal pool, or an immediate operand. */
1508 s390_pool_operand (rtx op)
1510 struct s390_address addr;
1512 /* Just like memory_operand, allow (subreg (mem ...))
1514 if (reload_completed
1515 && GET_CODE (op) == SUBREG
1516 && GET_CODE (SUBREG_REG (op)) == MEM)
1517 op = SUBREG_REG (op);
1519 switch (GET_CODE (op))
1526 if (!s390_decompose_address (XEXP (op, 0), &addr))
1528 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1530 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1539 /* Return true if OP a valid shift count operand.
1540 OP is the current operation.
1541 MODE is the current operation mode. */
1544 shift_count_operand (rtx op, enum machine_mode mode)
1546 HOST_WIDE_INT offset = 0;
1548 if (! check_mode (op, &mode))
1551 /* We can have an integer constant, an address register,
1552 or a sum of the two. Note that reload already checks
1553 that any register present is an address register, so
1554 we just check for any register here. */
1555 if (GET_CODE (op) == CONST_INT)
1557 offset = INTVAL (op);
1560 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1562 offset = INTVAL (XEXP (op, 1));
1565 while (op && GET_CODE (op) == SUBREG)
1566 op = SUBREG_REG (op);
1567 if (op && GET_CODE (op) != REG)
1570 /* Unfortunately we have to reject constants that are invalid
1571 for an address, or else reload will get confused. */
1572 if (!DISP_IN_RANGE (offset))
1578 /* Return true if DISP is a valid short displacement. */
1581 s390_short_displacement (rtx disp)
1583 /* No displacement is OK. */
1587 /* Integer displacement in range. */
1588 if (GET_CODE (disp) == CONST_INT)
1589 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1591 /* GOT offset is not OK, the GOT can be large. */
1592 if (GET_CODE (disp) == CONST
1593 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1594 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1597 /* All other symbolic constants are literal pool references,
1598 which are OK as the literal pool must be small. */
1599 if (GET_CODE (disp) == CONST)
1605 /* Return true if OP is a valid operand for a C constraint. */
1608 s390_extra_constraint_str (rtx op, int c, const char * str)
1610 struct s390_address addr;
1615 /* Check for offsettable variants of memory constraints. */
1618 /* Only accept non-volatile MEMs. */
1619 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1622 if ((reload_completed || reload_in_progress)
1623 ? !offsettable_memref_p (op)
1624 : !offsettable_nonstrict_memref_p (op))
1633 if (GET_CODE (op) != MEM)
1635 if (!s390_decompose_address (XEXP (op, 0), &addr))
1640 if (TARGET_LONG_DISPLACEMENT)
1642 if (!s390_short_displacement (addr.disp))
1648 if (GET_CODE (op) != MEM)
1651 if (TARGET_LONG_DISPLACEMENT)
1653 if (!s390_decompose_address (XEXP (op, 0), &addr))
1655 if (!s390_short_displacement (addr.disp))
1661 if (!TARGET_LONG_DISPLACEMENT)
1663 if (GET_CODE (op) != MEM)
1665 if (!s390_decompose_address (XEXP (op, 0), &addr))
1669 if (s390_short_displacement (addr.disp))
1674 if (!TARGET_LONG_DISPLACEMENT)
1676 if (GET_CODE (op) != MEM)
1678 /* Any invalid address here will be fixed up by reload,
1679 so accept it for the most generic constraint. */
1680 if (s390_decompose_address (XEXP (op, 0), &addr)
1681 && s390_short_displacement (addr.disp))
1686 if (TARGET_LONG_DISPLACEMENT)
1688 if (!s390_decompose_address (op, &addr))
1690 if (!s390_short_displacement (addr.disp))
1696 if (!TARGET_LONG_DISPLACEMENT)
1698 /* Any invalid address here will be fixed up by reload,
1699 so accept it for the most generic constraint. */
1700 if (s390_decompose_address (op, &addr)
1701 && s390_short_displacement (addr.disp))
1706 return shift_count_operand (op, VOIDmode);
1715 /* Return true if VALUE matches the constraint STR. */
1718 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1722 enum machine_mode mode, part_mode;
1724 int part, part_goal;
1732 return (unsigned int)value < 256;
1735 return (unsigned int)value < 4096;
1738 return value >= -32768 && value < 32768;
1741 return (TARGET_LONG_DISPLACEMENT ?
1742 (value >= -524288 && value <= 524287)
1743 : (value >= 0 && value <= 4095));
1745 return value == 2147483647;
1751 part_goal = str[1] - '0';
1755 case 'H': part_mode = HImode; break;
1756 case 'Q': part_mode = QImode; break;
1762 case 'H': mode = HImode; break;
1763 case 'S': mode = SImode; break;
1764 case 'D': mode = DImode; break;
1770 case '0': def = 0; break;
1771 case 'F': def = -1; break;
1775 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1778 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
1781 if (part_goal != -1 && part_goal != part)
1793 /* Compute a (partial) cost for rtx X. Return true if the complete
1794 cost has been computed, and false if subexpressions should be
1795 scanned. In either case, *TOTAL contains the cost result. */
1798 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1803 if (GET_CODE (XEXP (x, 0)) == MINUS
1804 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1811 /* Force_const_mem does not work out of reload, because the
1812 saveable_obstack is set to reload_obstack, which does not
1813 live long enough. Because of this we cannot use force_const_mem
1814 in addsi3. This leads to problems with gen_add2_insn with a
1815 constant greater than a short. Because of that we give an
1816 addition of greater constants a cost of 3 (reload1.c 10096). */
1817 /* ??? saveable_obstack no longer exists. */
1818 if (outer_code == PLUS
1819 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1820 *total = COSTS_N_INSNS (3);
1841 *total = COSTS_N_INSNS (1);
1845 if (GET_MODE (XEXP (x, 0)) == DImode)
1846 *total = COSTS_N_INSNS (40);
1848 *total = COSTS_N_INSNS (7);
1855 *total = COSTS_N_INSNS (33);
1863 /* Return the cost of an address rtx ADDR. */
1866 s390_address_cost (rtx addr)
1868 struct s390_address ad;
1869 if (!s390_decompose_address (addr, &ad))
1872 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1875 /* Return true if OP is a valid operand for the BRAS instruction.
1876 OP is the current operation.
1877 MODE is the current operation mode. */
1880 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1882 register enum rtx_code code = GET_CODE (op);
1884 /* Allow SYMBOL_REFs. */
1885 if (code == SYMBOL_REF)
1888 /* Allow @PLT stubs. */
1890 && GET_CODE (XEXP (op, 0)) == UNSPEC
1891 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1896 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1897 otherwise return 0. */
1900 tls_symbolic_operand (register rtx op)
1902 if (GET_CODE (op) != SYMBOL_REF)
1904 return SYMBOL_REF_TLS_MODEL (op);
1907 /* Return true if OP is a load multiple operation. It is known to be a
1908 PARALLEL and the first section will be tested.
1909 OP is the current operation.
1910 MODE is the current operation mode. */
1913 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1915 enum machine_mode elt_mode;
1916 int count = XVECLEN (op, 0);
1917 unsigned int dest_regno;
1922 /* Perform a quick check so we don't blow up below. */
1924 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1925 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1926 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1929 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1930 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1931 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
1933 /* Check, is base, or base + displacement. */
1935 if (GET_CODE (src_addr) == REG)
1937 else if (GET_CODE (src_addr) == PLUS
1938 && GET_CODE (XEXP (src_addr, 0)) == REG
1939 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1941 off = INTVAL (XEXP (src_addr, 1));
1942 src_addr = XEXP (src_addr, 0);
1947 for (i = 1; i < count; i++)
1949 rtx elt = XVECEXP (op, 0, i);
1951 if (GET_CODE (elt) != SET
1952 || GET_CODE (SET_DEST (elt)) != REG
1953 || GET_MODE (SET_DEST (elt)) != elt_mode
1954 || REGNO (SET_DEST (elt)) != dest_regno + i
1955 || GET_CODE (SET_SRC (elt)) != MEM
1956 || GET_MODE (SET_SRC (elt)) != elt_mode
1957 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1958 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1959 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1960 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1961 != off + i * GET_MODE_SIZE (elt_mode))
1968 /* Return true if OP is a store multiple operation. It is known to be a
1969 PARALLEL and the first section will be tested.
1970 OP is the current operation.
1971 MODE is the current operation mode. */
1974 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1976 enum machine_mode elt_mode;
1977 int count = XVECLEN (op, 0);
1978 unsigned int src_regno;
1982 /* Perform a quick check so we don't blow up below. */
1984 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1985 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1986 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1989 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1990 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1991 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
1993 /* Check, is base, or base + displacement. */
1995 if (GET_CODE (dest_addr) == REG)
1997 else if (GET_CODE (dest_addr) == PLUS
1998 && GET_CODE (XEXP (dest_addr, 0)) == REG
1999 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
2001 off = INTVAL (XEXP (dest_addr, 1));
2002 dest_addr = XEXP (dest_addr, 0);
2007 for (i = 1; i < count; i++)
2009 rtx elt = XVECEXP (op, 0, i);
2011 if (GET_CODE (elt) != SET
2012 || GET_CODE (SET_SRC (elt)) != REG
2013 || GET_MODE (SET_SRC (elt)) != elt_mode
2014 || REGNO (SET_SRC (elt)) != src_regno + i
2015 || GET_CODE (SET_DEST (elt)) != MEM
2016 || GET_MODE (SET_DEST (elt)) != elt_mode
2017 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2018 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2019 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2020 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
2021 != off + i * GET_MODE_SIZE (elt_mode))
2027 /* Split DImode access register reference REG (on 64-bit) into its constituent
2028 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2029 gen_highpart cannot be used as they assume all registers are word-sized,
2030 while our access registers have only half that size. */
2033 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2035 gcc_assert (TARGET_64BIT);
2036 gcc_assert (ACCESS_REG_P (reg));
2037 gcc_assert (GET_MODE (reg) == DImode);
2038 gcc_assert (!(REGNO (reg) & 1));
2040 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2041 *hi = gen_rtx_REG (SImode, REGNO (reg));
2044 /* Return true if OP contains a symbol reference */
2047 symbolic_reference_mentioned_p (rtx op)
2049 register const char *fmt;
2052 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2055 fmt = GET_RTX_FORMAT (GET_CODE (op));
2056 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2062 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2063 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2067 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2074 /* Return true if OP contains a reference to a thread-local symbol. */
2077 tls_symbolic_reference_mentioned_p (rtx op)
2079 register const char *fmt;
2082 if (GET_CODE (op) == SYMBOL_REF)
2083 return tls_symbolic_operand (op);
2085 fmt = GET_RTX_FORMAT (GET_CODE (op));
2086 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2092 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2093 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2097 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2105 /* Return true if OP is a legitimate general operand when
2106 generating PIC code. It is given that flag_pic is on
2107 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2110 legitimate_pic_operand_p (register rtx op)
2112 /* Accept all non-symbolic constants. */
2113 if (!SYMBOLIC_CONST (op))
2116 /* Reject everything else; must be handled
2117 via emit_symbolic_move. */
2121 /* Returns true if the constant value OP is a legitimate general operand.
2122 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2125 legitimate_constant_p (register rtx op)
2127 /* Accept all non-symbolic constants. */
2128 if (!SYMBOLIC_CONST (op))
2131 /* Accept immediate LARL operands. */
2132 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2135 /* Thread-local symbols are never legal constants. This is
2136 so that emit_call knows that computing such addresses
2137 might require a function call. */
2138 if (TLS_SYMBOLIC_CONST (op))
2141 /* In the PIC case, symbolic constants must *not* be
2142 forced into the literal pool. We accept them here,
2143 so that they will be handled by emit_symbolic_move. */
2147 /* All remaining non-PIC symbolic constants are
2148 forced into the literal pool. */
2152 /* Determine if it's legal to put X into the constant pool. This
2153 is not possible if X contains the address of a symbol that is
2154 not constant (TLS) or not known at final link time (PIC). */
2157 s390_cannot_force_const_mem (rtx x)
2159 switch (GET_CODE (x))
2163 /* Accept all non-symbolic constants. */
2167 /* Labels are OK iff we are non-PIC. */
2168 return flag_pic != 0;
2171 /* 'Naked' TLS symbol references are never OK,
2172 non-TLS symbols are OK iff we are non-PIC. */
2173 if (tls_symbolic_operand (x))
2176 return flag_pic != 0;
2179 return s390_cannot_force_const_mem (XEXP (x, 0));
2182 return s390_cannot_force_const_mem (XEXP (x, 0))
2183 || s390_cannot_force_const_mem (XEXP (x, 1));
2186 switch (XINT (x, 1))
2188 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2189 case UNSPEC_LTREL_OFFSET:
2197 case UNSPEC_GOTNTPOFF:
2198 case UNSPEC_INDNTPOFF:
2201 /* If the literal pool shares the code section, be put
2202 execute template placeholders into the pool as well. */
2204 return TARGET_CPU_ZARCH;
2216 /* Returns true if the constant value OP is a legitimate general
2217 operand during and after reload. The difference to
2218 legitimate_constant_p is that this function will not accept
2219 a constant that would need to be forced to the literal pool
2220 before it can be used as operand. */
2223 legitimate_reload_constant_p (register rtx op)
2225 /* Accept la(y) operands. */
2226 if (GET_CODE (op) == CONST_INT
2227 && DISP_IN_RANGE (INTVAL (op)))
2230 /* Accept l(g)hi operands. */
2231 if (GET_CODE (op) == CONST_INT
2232 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
2235 /* Accept lliXX operands. */
2237 && s390_single_part (op, DImode, HImode, 0) >= 0)
2240 /* Accept larl operands. */
2241 if (TARGET_CPU_ZARCH
2242 && larl_operand (op, VOIDmode))
2245 /* Everything else cannot be handled without reload. */
2249 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2250 return the class of reg to actually use. */
2253 s390_preferred_reload_class (rtx op, enum reg_class class)
2255 switch (GET_CODE (op))
2257 /* Constants we cannot reload must be forced into the
2262 if (legitimate_reload_constant_p (op))
2267 /* If a symbolic constant or a PLUS is reloaded,
2268 it is most likely being used as an address, so
2269 prefer ADDR_REGS. If 'class' is not a superset
2270 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2275 if (reg_class_subset_p (ADDR_REGS, class))
2287 /* Return the register class of a scratch register needed to
2288 load IN into a register of class CLASS in MODE.
2290 We need a temporary when loading a PLUS expression which
2291 is not a legitimate operand of the LOAD ADDRESS instruction. */
2294 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
2295 enum machine_mode mode, rtx in)
2297 if (s390_plus_operand (in, mode))
2300 if (GET_MODE_CLASS (mode) == MODE_CC)
2301 return GENERAL_REGS;
2306 /* Return the register class of a scratch register needed to
2307 store a register of class CLASS in MODE into OUT:
2309 We need a temporary when storing a double-word to a
2310 non-offsettable memory address. */
2313 s390_secondary_output_reload_class (enum reg_class class,
2314 enum machine_mode mode, rtx out)
2316 if ((TARGET_64BIT ? mode == TImode
2317 : (mode == DImode || mode == DFmode))
2318 && reg_classes_intersect_p (GENERAL_REGS, class)
2319 && GET_CODE (out) == MEM
2320 && !offsettable_memref_p (out)
2321 && !s_operand (out, VOIDmode))
2324 if (GET_MODE_CLASS (mode) == MODE_CC)
2325 return GENERAL_REGS;
2330 /* Return true if OP is a PLUS that is not a legitimate
2331 operand for the LA instruction.
2332 OP is the current operation.
2333 MODE is the current operation mode. */
2336 s390_plus_operand (register rtx op, enum machine_mode mode)
2338 if (!check_mode (op, &mode) || mode != Pmode)
2341 if (GET_CODE (op) != PLUS)
2344 if (legitimate_la_operand_p (op))
2350 /* Generate code to load SRC, which is PLUS that is not a
2351 legitimate operand for the LA instruction, into TARGET.
2352 SCRATCH may be used as scratch register. */
2355 s390_expand_plus_operand (register rtx target, register rtx src,
2356 register rtx scratch)
2359 struct s390_address ad;
2361 /* src must be a PLUS; get its two operands. */
2362 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2365 /* Check if any of the two operands is already scheduled
2366 for replacement by reload. This can happen e.g. when
2367 float registers occur in an address. */
2368 sum1 = find_replacement (&XEXP (src, 0));
2369 sum2 = find_replacement (&XEXP (src, 1));
2370 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2372 /* If the address is already strictly valid, there's nothing to do. */
2373 if (!s390_decompose_address (src, &ad)
2374 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2375 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2377 /* Otherwise, one of the operands cannot be an address register;
2378 we reload its value into the scratch register. */
2379 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2381 emit_move_insn (scratch, sum1);
2384 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2386 emit_move_insn (scratch, sum2);
2390 /* According to the way these invalid addresses are generated
2391 in reload.c, it should never happen (at least on s390) that
2392 *neither* of the PLUS components, after find_replacements
2393 was applied, is an address register. */
2394 if (sum1 == scratch && sum2 == scratch)
2400 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2403 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2404 is only ever performed on addresses, so we can mark the
2405 sum as legitimate for LA in any case. */
2406 s390_load_address (target, src);
2410 /* Decompose a RTL expression ADDR for a memory address into
2411 its components, returned in OUT.
2413 Returns 0 if ADDR is not a valid memory address, nonzero
2414 otherwise. If OUT is NULL, don't return the components,
2415 but check for validity only.
2417 Note: Only addresses in canonical form are recognized.
2418 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2419 canonical form so that they will be recognized. */
2422 s390_decompose_address (register rtx addr, struct s390_address *out)
2424 HOST_WIDE_INT offset = 0;
2425 rtx base = NULL_RTX;
2426 rtx indx = NULL_RTX;
2427 rtx disp = NULL_RTX;
2429 int pointer = FALSE;
2430 int base_ptr = FALSE;
2431 int indx_ptr = FALSE;
2433 /* Decompose address into base + index + displacement. */
2435 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2438 else if (GET_CODE (addr) == PLUS)
2440 rtx op0 = XEXP (addr, 0);
2441 rtx op1 = XEXP (addr, 1);
2442 enum rtx_code code0 = GET_CODE (op0);
2443 enum rtx_code code1 = GET_CODE (op1);
2445 if (code0 == REG || code0 == UNSPEC)
2447 if (code1 == REG || code1 == UNSPEC)
2449 indx = op0; /* index + base */
2455 base = op0; /* base + displacement */
2460 else if (code0 == PLUS)
2462 indx = XEXP (op0, 0); /* index + base + disp */
2463 base = XEXP (op0, 1);
2474 disp = addr; /* displacement */
2476 /* Extract integer part of displacement. */
2480 if (GET_CODE (disp) == CONST_INT)
2482 offset = INTVAL (disp);
2485 else if (GET_CODE (disp) == CONST
2486 && GET_CODE (XEXP (disp, 0)) == PLUS
2487 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2489 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2490 disp = XEXP (XEXP (disp, 0), 0);
2494 /* Strip off CONST here to avoid special case tests later. */
2495 if (disp && GET_CODE (disp) == CONST)
2496 disp = XEXP (disp, 0);
2498 /* We can convert literal pool addresses to
2499 displacements by basing them off the base register. */
2500 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2502 /* Either base or index must be free to hold the base register. */
2504 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2506 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2510 /* Mark up the displacement. */
2511 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2512 UNSPEC_LTREL_OFFSET);
2515 /* Validate base register. */
2518 if (GET_CODE (base) == UNSPEC)
2519 switch (XINT (base, 1))
2523 disp = gen_rtx_UNSPEC (Pmode,
2524 gen_rtvec (1, XVECEXP (base, 0, 0)),
2525 UNSPEC_LTREL_OFFSET);
2529 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2532 case UNSPEC_LTREL_BASE:
2533 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2540 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2543 if (REGNO (base) == BASE_REGNUM
2544 || REGNO (base) == STACK_POINTER_REGNUM
2545 || REGNO (base) == FRAME_POINTER_REGNUM
2546 || ((reload_completed || reload_in_progress)
2547 && frame_pointer_needed
2548 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2549 || REGNO (base) == ARG_POINTER_REGNUM
2551 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2552 pointer = base_ptr = TRUE;
2555 /* Validate index register. */
2558 if (GET_CODE (indx) == UNSPEC)
2559 switch (XINT (indx, 1))
2563 disp = gen_rtx_UNSPEC (Pmode,
2564 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2565 UNSPEC_LTREL_OFFSET);
2569 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2572 case UNSPEC_LTREL_BASE:
2573 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2580 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2583 if (REGNO (indx) == BASE_REGNUM
2584 || REGNO (indx) == STACK_POINTER_REGNUM
2585 || REGNO (indx) == FRAME_POINTER_REGNUM
2586 || ((reload_completed || reload_in_progress)
2587 && frame_pointer_needed
2588 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2589 || REGNO (indx) == ARG_POINTER_REGNUM
2591 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2592 pointer = indx_ptr = TRUE;
2595 /* Prefer to use pointer as base, not index. */
2596 if (base && indx && !base_ptr
2597 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2604 /* Validate displacement. */
2607 /* If the argument pointer or the return address pointer are involved,
2608 the displacement will change later anyway as the virtual registers get
2609 eliminated. This could make a valid displacement invalid, but it is
2610 more likely to make an invalid displacement valid, because we sometimes
2611 access the register save area via negative offsets to one of those
2613 Thus we don't check the displacement for validity here. If after
2614 elimination the displacement turns out to be invalid after all,
2615 this is fixed up by reload in any case. */
2616 if (base != arg_pointer_rtx
2617 && indx != arg_pointer_rtx
2618 && base != return_address_pointer_rtx
2619 && indx != return_address_pointer_rtx)
2620 if (!DISP_IN_RANGE (offset))
2625 /* All the special cases are pointers. */
2628 /* In the small-PIC case, the linker converts @GOT
2629 and @GOTNTPOFF offsets to possible displacements. */
2630 if (GET_CODE (disp) == UNSPEC
2631 && (XINT (disp, 1) == UNSPEC_GOT
2632 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2639 /* Accept chunkified literal pool symbol references. */
2640 else if (GET_CODE (disp) == MINUS
2641 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2642 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
2647 /* Accept literal pool references. */
2648 else if (GET_CODE (disp) == UNSPEC
2649 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2651 orig_disp = gen_rtx_CONST (Pmode, disp);
2654 /* If we have an offset, make sure it does not
2655 exceed the size of the constant pool entry. */
2656 rtx sym = XVECEXP (disp, 0, 0);
2657 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2660 orig_disp = plus_constant (orig_disp, offset);
2675 out->disp = orig_disp;
2676 out->pointer = pointer;
2682 /* Return nonzero if ADDR is a valid memory address.
2683 STRICT specifies whether strict register checking applies. */
2686 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2687 register rtx addr, int strict)
2689 struct s390_address ad;
2690 if (!s390_decompose_address (addr, &ad))
2695 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2697 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2702 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2704 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2711 /* Return 1 if OP is a valid operand for the LA instruction.
2712 In 31-bit, we need to prove that the result is used as an
2713 address, as LA performs only a 31-bit addition. */
2716 legitimate_la_operand_p (register rtx op)
2718 struct s390_address addr;
2719 if (!s390_decompose_address (op, &addr))
2722 if (TARGET_64BIT || addr.pointer)
2728 /* Return 1 if it is valid *and* preferable to use LA to
2729 compute the sum of OP1 and OP2. */
2732 preferred_la_operand_p (rtx op1, rtx op2)
2734 struct s390_address addr;
2736 if (op2 != const0_rtx)
2737 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2739 if (!s390_decompose_address (op1, &addr))
2741 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2743 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
2746 if (!TARGET_64BIT && !addr.pointer)
2752 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2753 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2759 /* Emit a forced load-address operation to load SRC into DST.
2760 This will use the LOAD ADDRESS instruction even in situations
2761 where legitimate_la_operand_p (SRC) returns false. */
2764 s390_load_address (rtx dst, rtx src)
2767 emit_move_insn (dst, src);
2769 emit_insn (gen_force_la_31 (dst, src));
2772 /* Return a legitimate reference for ORIG (an address) using the
2773 register REG. If REG is 0, a new pseudo is generated.
2775 There are two types of references that must be handled:
2777 1. Global data references must load the address from the GOT, via
2778 the PIC reg. An insn is emitted to do this load, and the reg is
2781 2. Static data references, constant pool addresses, and code labels
2782 compute the address as an offset from the GOT, whose base is in
2783 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2784 differentiate them from global data objects. The returned
2785 address is the PIC reg + an unspec constant.
2787 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2788 reg also appears in the address. */
2791 legitimize_pic_address (rtx orig, rtx reg)
2797 if (GET_CODE (addr) == LABEL_REF
2798 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2800 /* This is a local symbol. */
2801 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2803 /* Access local symbols PC-relative via LARL.
2804 This is the same as in the non-PIC case, so it is
2805 handled automatically ... */
2809 /* Access local symbols relative to the GOT. */
2811 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2813 if (reload_in_progress || reload_completed)
2814 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2816 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2817 addr = gen_rtx_CONST (Pmode, addr);
2818 addr = force_const_mem (Pmode, addr);
2819 emit_move_insn (temp, addr);
2821 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2824 emit_move_insn (reg, new);
2829 else if (GET_CODE (addr) == SYMBOL_REF)
2832 reg = gen_reg_rtx (Pmode);
2836 /* Assume GOT offset < 4k. This is handled the same way
2837 in both 31- and 64-bit code (@GOT). */
2839 if (reload_in_progress || reload_completed)
2840 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2842 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2843 new = gen_rtx_CONST (Pmode, new);
2844 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2845 new = gen_const_mem (Pmode, new);
2846 emit_move_insn (reg, new);
2849 else if (TARGET_CPU_ZARCH)
2851 /* If the GOT offset might be >= 4k, we determine the position
2852 of the GOT entry via a PC-relative LARL (@GOTENT). */
2854 rtx temp = gen_reg_rtx (Pmode);
2856 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2857 new = gen_rtx_CONST (Pmode, new);
2858 emit_move_insn (temp, new);
2860 new = gen_const_mem (Pmode, temp);
2861 emit_move_insn (reg, new);
2866 /* If the GOT offset might be >= 4k, we have to load it
2867 from the literal pool (@GOT). */
2869 rtx temp = gen_reg_rtx (Pmode);
2871 if (reload_in_progress || reload_completed)
2872 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2874 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2875 addr = gen_rtx_CONST (Pmode, addr);
2876 addr = force_const_mem (Pmode, addr);
2877 emit_move_insn (temp, addr);
2879 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2880 new = gen_const_mem (Pmode, new);
2881 emit_move_insn (reg, new);
2887 if (GET_CODE (addr) == CONST)
2889 addr = XEXP (addr, 0);
2890 if (GET_CODE (addr) == UNSPEC)
2892 if (XVECLEN (addr, 0) != 1)
2894 switch (XINT (addr, 1))
2896 /* If someone moved a GOT-relative UNSPEC
2897 out of the literal pool, force them back in. */
2900 new = force_const_mem (Pmode, orig);
2903 /* @GOT is OK as is if small. */
2906 new = force_const_mem (Pmode, orig);
2909 /* @GOTENT is OK as is. */
2913 /* @PLT is OK as is on 64-bit, must be converted to
2914 GOT-relative @PLTOFF on 31-bit. */
2916 if (!TARGET_CPU_ZARCH)
2918 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2920 if (reload_in_progress || reload_completed)
2921 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2923 addr = XVECEXP (addr, 0, 0);
2924 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2926 addr = gen_rtx_CONST (Pmode, addr);
2927 addr = force_const_mem (Pmode, addr);
2928 emit_move_insn (temp, addr);
2930 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2933 emit_move_insn (reg, new);
2939 /* Everything else cannot happen. */
2944 else if (GET_CODE (addr) != PLUS)
2947 if (GET_CODE (addr) == PLUS)
2949 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2950 /* Check first to see if this is a constant offset
2951 from a local symbol reference. */
2952 if ((GET_CODE (op0) == LABEL_REF
2953 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2954 && GET_CODE (op1) == CONST_INT)
2956 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2958 if (INTVAL (op1) & 1)
2960 /* LARL can't handle odd offsets, so emit a
2961 pair of LARL and LA. */
2962 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2964 if (!DISP_IN_RANGE (INTVAL (op1)))
2966 int even = INTVAL (op1) - 1;
2967 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2968 op0 = gen_rtx_CONST (Pmode, op0);
2972 emit_move_insn (temp, op0);
2973 new = gen_rtx_PLUS (Pmode, temp, op1);
2977 emit_move_insn (reg, new);
2983 /* If the offset is even, we can just use LARL.
2984 This will happen automatically. */
2989 /* Access local symbols relative to the GOT. */
2991 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2993 if (reload_in_progress || reload_completed)
2994 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2996 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2998 addr = gen_rtx_PLUS (Pmode, addr, op1);
2999 addr = gen_rtx_CONST (Pmode, addr);
3000 addr = force_const_mem (Pmode, addr);
3001 emit_move_insn (temp, addr);
3003 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3006 emit_move_insn (reg, new);
3012 /* Now, check whether it is a GOT relative symbol plus offset
3013 that was pulled out of the literal pool. Force it back in. */
3015 else if (GET_CODE (op0) == UNSPEC
3016 && GET_CODE (op1) == CONST_INT
3017 && XINT (op0, 1) == UNSPEC_GOTOFF)
3019 if (XVECLEN (op0, 0) != 1)
3022 new = force_const_mem (Pmode, orig);
3025 /* Otherwise, compute the sum. */
3028 base = legitimize_pic_address (XEXP (addr, 0), reg);
3029 new = legitimize_pic_address (XEXP (addr, 1),
3030 base == reg ? NULL_RTX : reg);
3031 if (GET_CODE (new) == CONST_INT)
3032 new = plus_constant (base, INTVAL (new));
3035 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3037 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3038 new = XEXP (new, 1);
3040 new = gen_rtx_PLUS (Pmode, base, new);
3043 if (GET_CODE (new) == CONST)
3044 new = XEXP (new, 0);
3045 new = force_operand (new, 0);
3052 /* Load the thread pointer into a register. */
3055 get_thread_pointer (void)
3057 rtx tp = gen_reg_rtx (Pmode);
3059 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3060 mark_reg_pointer (tp, BITS_PER_WORD);
3065 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3066 in s390_tls_symbol which always refers to __tls_get_offset.
3067 The returned offset is written to RESULT_REG and an USE rtx is
3068 generated for TLS_CALL. */
3070 static GTY(()) rtx s390_tls_symbol;
3073 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3080 if (!s390_tls_symbol)
3081 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3083 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3084 gen_rtx_REG (Pmode, RETURN_REGNUM));
3086 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3087 CONST_OR_PURE_CALL_P (insn) = 1;
3090 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3091 this (thread-local) address. REG may be used as temporary. */
3094 legitimize_tls_address (rtx addr, rtx reg)
3096 rtx new, tls_call, temp, base, r2, insn;
3098 if (GET_CODE (addr) == SYMBOL_REF)
3099 switch (tls_symbolic_operand (addr))
3101 case TLS_MODEL_GLOBAL_DYNAMIC:
3103 r2 = gen_rtx_REG (Pmode, 2);
3104 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3105 new = gen_rtx_CONST (Pmode, tls_call);
3106 new = force_const_mem (Pmode, new);
3107 emit_move_insn (r2, new);
3108 s390_emit_tls_call_insn (r2, tls_call);
3109 insn = get_insns ();
3112 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3113 temp = gen_reg_rtx (Pmode);
3114 emit_libcall_block (insn, temp, r2, new);
3116 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3119 s390_load_address (reg, new);
3124 case TLS_MODEL_LOCAL_DYNAMIC:
3126 r2 = gen_rtx_REG (Pmode, 2);
3127 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3128 new = gen_rtx_CONST (Pmode, tls_call);
3129 new = force_const_mem (Pmode, new);
3130 emit_move_insn (r2, new);
3131 s390_emit_tls_call_insn (r2, tls_call);
3132 insn = get_insns ();
3135 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3136 temp = gen_reg_rtx (Pmode);
3137 emit_libcall_block (insn, temp, r2, new);
3139 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3140 base = gen_reg_rtx (Pmode);
3141 s390_load_address (base, new);
3143 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3144 new = gen_rtx_CONST (Pmode, new);
3145 new = force_const_mem (Pmode, new);
3146 temp = gen_reg_rtx (Pmode);
3147 emit_move_insn (temp, new);
3149 new = gen_rtx_PLUS (Pmode, base, temp);
3152 s390_load_address (reg, new);
3157 case TLS_MODEL_INITIAL_EXEC:
3160 /* Assume GOT offset < 4k. This is handled the same way
3161 in both 31- and 64-bit code. */
3163 if (reload_in_progress || reload_completed)
3164 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3166 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3167 new = gen_rtx_CONST (Pmode, new);
3168 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3169 new = gen_const_mem (Pmode, new);
3170 temp = gen_reg_rtx (Pmode);
3171 emit_move_insn (temp, new);
3173 else if (TARGET_CPU_ZARCH)
3175 /* If the GOT offset might be >= 4k, we determine the position
3176 of the GOT entry via a PC-relative LARL. */
3178 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3179 new = gen_rtx_CONST (Pmode, new);
3180 temp = gen_reg_rtx (Pmode);
3181 emit_move_insn (temp, new);
3183 new = gen_const_mem (Pmode, temp);
3184 temp = gen_reg_rtx (Pmode);
3185 emit_move_insn (temp, new);
3189 /* If the GOT offset might be >= 4k, we have to load it
3190 from the literal pool. */
3192 if (reload_in_progress || reload_completed)
3193 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3195 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3196 new = gen_rtx_CONST (Pmode, new);
3197 new = force_const_mem (Pmode, new);
3198 temp = gen_reg_rtx (Pmode);
3199 emit_move_insn (temp, new);
3201 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3202 new = gen_const_mem (Pmode, new);
3204 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3205 temp = gen_reg_rtx (Pmode);
3206 emit_insn (gen_rtx_SET (Pmode, temp, new));
3210 /* In position-dependent code, load the absolute address of
3211 the GOT entry from the literal pool. */
3213 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3214 new = gen_rtx_CONST (Pmode, new);
3215 new = force_const_mem (Pmode, new);
3216 temp = gen_reg_rtx (Pmode);
3217 emit_move_insn (temp, new);
3220 new = gen_const_mem (Pmode, new);
3221 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3222 temp = gen_reg_rtx (Pmode);
3223 emit_insn (gen_rtx_SET (Pmode, temp, new));
3226 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3229 s390_load_address (reg, new);
3234 case TLS_MODEL_LOCAL_EXEC:
3235 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3236 new = gen_rtx_CONST (Pmode, new);
3237 new = force_const_mem (Pmode, new);
3238 temp = gen_reg_rtx (Pmode);
3239 emit_move_insn (temp, new);
3241 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3244 s390_load_address (reg, new);
3253 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3255 switch (XINT (XEXP (addr, 0), 1))
3257 case UNSPEC_INDNTPOFF:
3258 if (TARGET_CPU_ZARCH)
3269 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3270 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3272 new = XEXP (XEXP (addr, 0), 0);
3273 if (GET_CODE (new) != SYMBOL_REF)
3274 new = gen_rtx_CONST (Pmode, new);
3276 new = legitimize_tls_address (new, reg);
3277 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3278 new = force_operand (new, 0);
3282 abort (); /* for now ... */
3287 /* Emit insns to move operands[1] into operands[0]. */
3290 emit_symbolic_move (rtx *operands)
3292 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3294 if (GET_CODE (operands[0]) == MEM)
3295 operands[1] = force_reg (Pmode, operands[1]);
3296 else if (TLS_SYMBOLIC_CONST (operands[1]))
3297 operands[1] = legitimize_tls_address (operands[1], temp);
3299 operands[1] = legitimize_pic_address (operands[1], temp);
3302 /* Try machine-dependent ways of modifying an illegitimate address X
3303 to be legitimate. If we find one, return the new, valid address.
3305 OLDX is the address as it was before break_out_memory_refs was called.
3306 In some cases it is useful to look at this to decide what needs to be done.
3308 MODE is the mode of the operand pointed to by X.
3310 When -fpic is used, special handling is needed for symbolic references.
3311 See comments by legitimize_pic_address for details. */
3314 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
3315 enum machine_mode mode ATTRIBUTE_UNUSED)
3317 rtx constant_term = const0_rtx;
3319 if (TLS_SYMBOLIC_CONST (x))
3321 x = legitimize_tls_address (x, 0);
3323 if (legitimate_address_p (mode, x, FALSE))
3328 if (SYMBOLIC_CONST (x)
3329 || (GET_CODE (x) == PLUS
3330 && (SYMBOLIC_CONST (XEXP (x, 0))
3331 || SYMBOLIC_CONST (XEXP (x, 1)))))
3332 x = legitimize_pic_address (x, 0);
3334 if (legitimate_address_p (mode, x, FALSE))
3338 x = eliminate_constant_term (x, &constant_term);
3340 /* Optimize loading of large displacements by splitting them
3341 into the multiple of 4K and the rest; this allows the
3342 former to be CSE'd if possible.
3344 Don't do this if the displacement is added to a register
3345 pointing into the stack frame, as the offsets will
3346 change later anyway. */
3348 if (GET_CODE (constant_term) == CONST_INT
3349 && !TARGET_LONG_DISPLACEMENT
3350 && !DISP_IN_RANGE (INTVAL (constant_term))
3351 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3353 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3354 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3356 rtx temp = gen_reg_rtx (Pmode);
3357 rtx val = force_operand (GEN_INT (upper), temp);
3359 emit_move_insn (temp, val);
3361 x = gen_rtx_PLUS (Pmode, x, temp);
3362 constant_term = GEN_INT (lower);
3365 if (GET_CODE (x) == PLUS)
3367 if (GET_CODE (XEXP (x, 0)) == REG)
3369 register rtx temp = gen_reg_rtx (Pmode);
3370 register rtx val = force_operand (XEXP (x, 1), temp);
3372 emit_move_insn (temp, val);
3374 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3377 else if (GET_CODE (XEXP (x, 1)) == REG)
3379 register rtx temp = gen_reg_rtx (Pmode);
3380 register rtx val = force_operand (XEXP (x, 0), temp);
3382 emit_move_insn (temp, val);
3384 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3388 if (constant_term != const0_rtx)
3389 x = gen_rtx_PLUS (Pmode, x, constant_term);
3394 /* Try a machine-dependent way of reloading an illegitimate address AD
3395 operand. If we find one, push the reload and and return the new address.
3397 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3398 and TYPE is the reload type of the current reload. */
3401 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3402 int opnum, int type)
3404 if (!optimize || TARGET_LONG_DISPLACEMENT)
3407 if (GET_CODE (ad) == PLUS)
3409 rtx tem = simplify_binary_operation (PLUS, Pmode,
3410 XEXP (ad, 0), XEXP (ad, 1));
3415 if (GET_CODE (ad) == PLUS
3416 && GET_CODE (XEXP (ad, 0)) == REG
3417 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3418 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3420 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3421 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3424 cst = GEN_INT (upper);
3425 if (!legitimate_reload_constant_p (cst))
3426 cst = force_const_mem (Pmode, cst);
3428 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3429 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3431 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3432 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3433 opnum, (enum reload_type) type);
3440 /* Emit code to move LEN bytes from DST to SRC. */
3443 s390_expand_movmem (rtx dst, rtx src, rtx len)
3445 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3447 if (INTVAL (len) > 0)
3448 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3451 else if (TARGET_MVCLE)
3453 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3458 rtx dst_addr, src_addr, count, blocks, temp;
3459 rtx loop_start_label = gen_label_rtx ();
3460 rtx loop_end_label = gen_label_rtx ();
3461 rtx end_label = gen_label_rtx ();
3462 enum machine_mode mode;
3464 mode = GET_MODE (len);
3465 if (mode == VOIDmode)
3468 dst_addr = gen_reg_rtx (Pmode);
3469 src_addr = gen_reg_rtx (Pmode);
3470 count = gen_reg_rtx (mode);
3471 blocks = gen_reg_rtx (mode);
3473 convert_move (count, len, 1);
3474 emit_cmp_and_jump_insns (count, const0_rtx,
3475 EQ, NULL_RTX, mode, 1, end_label);
3477 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3478 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3479 dst = change_address (dst, VOIDmode, dst_addr);
3480 src = change_address (src, VOIDmode, src_addr);
3482 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3484 emit_move_insn (count, temp);
3486 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3488 emit_move_insn (blocks, temp);
3490 emit_cmp_and_jump_insns (blocks, const0_rtx,
3491 EQ, NULL_RTX, mode, 1, loop_end_label);
3493 emit_label (loop_start_label);
3495 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3496 s390_load_address (dst_addr,
3497 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3498 s390_load_address (src_addr,
3499 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3501 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3503 emit_move_insn (blocks, temp);
3505 emit_cmp_and_jump_insns (blocks, const0_rtx,
3506 EQ, NULL_RTX, mode, 1, loop_end_label);
3508 emit_jump (loop_start_label);
3509 emit_label (loop_end_label);
3511 emit_insn (gen_movmem_short (dst, src,
3512 convert_to_mode (Pmode, count, 1)));
3513 emit_label (end_label);
3517 /* Emit code to clear LEN bytes at DST. */
3520 s390_expand_clrmem (rtx dst, rtx len)
3522 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3524 if (INTVAL (len) > 0)
3525 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3528 else if (TARGET_MVCLE)
3530 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
3535 rtx dst_addr, src_addr, count, blocks, temp;
3536 rtx loop_start_label = gen_label_rtx ();
3537 rtx loop_end_label = gen_label_rtx ();
3538 rtx end_label = gen_label_rtx ();
3539 enum machine_mode mode;
3541 mode = GET_MODE (len);
3542 if (mode == VOIDmode)
3545 dst_addr = gen_reg_rtx (Pmode);
3546 src_addr = gen_reg_rtx (Pmode);
3547 count = gen_reg_rtx (mode);
3548 blocks = gen_reg_rtx (mode);
3550 convert_move (count, len, 1);
3551 emit_cmp_and_jump_insns (count, const0_rtx,
3552 EQ, NULL_RTX, mode, 1, end_label);
3554 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3555 dst = change_address (dst, VOIDmode, dst_addr);
3557 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3559 emit_move_insn (count, temp);
3561 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3563 emit_move_insn (blocks, temp);
3565 emit_cmp_and_jump_insns (blocks, const0_rtx,
3566 EQ, NULL_RTX, mode, 1, loop_end_label);
3568 emit_label (loop_start_label);
3570 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3571 s390_load_address (dst_addr,
3572 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3574 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3576 emit_move_insn (blocks, temp);
3578 emit_cmp_and_jump_insns (blocks, const0_rtx,
3579 EQ, NULL_RTX, mode, 1, loop_end_label);
3581 emit_jump (loop_start_label);
3582 emit_label (loop_end_label);
3584 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3585 emit_label (end_label);
3589 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3590 and return the result in TARGET. */
3593 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3595 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3596 rtx result = gen_rtx_UNSPEC (SImode, gen_rtvec (1, ccreg), UNSPEC_CMPINT);
3598 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3600 if (INTVAL (len) > 0)
3602 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3603 emit_move_insn (target, result);
3606 emit_move_insn (target, const0_rtx);
3608 else if (TARGET_MVCLE)
3610 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3611 emit_move_insn (target, result);
3615 rtx addr0, addr1, count, blocks, temp;
3616 rtx loop_start_label = gen_label_rtx ();
3617 rtx loop_end_label = gen_label_rtx ();
3618 rtx end_label = gen_label_rtx ();
3619 enum machine_mode mode;
3621 mode = GET_MODE (len);
3622 if (mode == VOIDmode)
3625 addr0 = gen_reg_rtx (Pmode);
3626 addr1 = gen_reg_rtx (Pmode);
3627 count = gen_reg_rtx (mode);
3628 blocks = gen_reg_rtx (mode);
3630 convert_move (count, len, 1);
3631 emit_cmp_and_jump_insns (count, const0_rtx,
3632 EQ, NULL_RTX, mode, 1, end_label);
3634 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3635 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3636 op0 = change_address (op0, VOIDmode, addr0);
3637 op1 = change_address (op1, VOIDmode, addr1);
3639 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3641 emit_move_insn (count, temp);
3643 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3645 emit_move_insn (blocks, temp);
3647 emit_cmp_and_jump_insns (blocks, const0_rtx,
3648 EQ, NULL_RTX, mode, 1, loop_end_label);
3650 emit_label (loop_start_label);
3652 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3653 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3654 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3655 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3656 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3657 emit_jump_insn (temp);
3659 s390_load_address (addr0,
3660 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3661 s390_load_address (addr1,
3662 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3664 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3666 emit_move_insn (blocks, temp);
3668 emit_cmp_and_jump_insns (blocks, const0_rtx,
3669 EQ, NULL_RTX, mode, 1, loop_end_label);
3671 emit_jump (loop_start_label);
3672 emit_label (loop_end_label);
3674 emit_insn (gen_cmpmem_short (op0, op1,
3675 convert_to_mode (Pmode, count, 1)));
3676 emit_label (end_label);
3678 emit_move_insn (target, result);
3683 /* Expand conditional increment or decrement using alc/slb instructions.
3684 Should generate code setting DST to either SRC or SRC + INCREMENT,
3685 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3686 Returns true if successful, false otherwise. */
3689 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3690 rtx dst, rtx src, rtx increment)
3692 enum machine_mode cmp_mode;
3693 enum machine_mode cc_mode;
3698 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3699 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3701 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3702 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3707 /* Try ADD LOGICAL WITH CARRY. */
3708 if (increment == const1_rtx)
3710 /* Determine CC mode to use. */
3711 if (cmp_code == EQ || cmp_code == NE)
3713 if (cmp_op1 != const0_rtx)
3715 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3716 NULL_RTX, 0, OPTAB_WIDEN);
3717 cmp_op1 = const0_rtx;
3720 cmp_code = cmp_code == EQ ? LEU : GTU;
3723 if (cmp_code == LTU || cmp_code == LEU)
3728 cmp_code = swap_condition (cmp_code);
3745 /* Emit comparison instruction pattern. */
3746 if (!register_operand (cmp_op0, cmp_mode))
3747 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3749 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3750 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3751 /* We use insn_invalid_p here to add clobbers if required. */
3752 if (insn_invalid_p (emit_insn (insn)))
3755 /* Emit ALC instruction pattern. */
3756 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3757 gen_rtx_REG (cc_mode, CC_REGNUM),
3760 if (src != const0_rtx)
3762 if (!register_operand (src, GET_MODE (dst)))
3763 src = force_reg (GET_MODE (dst), src);
3765 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3766 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3769 p = rtvec_alloc (2);
3771 gen_rtx_SET (VOIDmode, dst, op_res);
3773 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3774 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3779 /* Try SUBTRACT LOGICAL WITH BORROW. */
3780 if (increment == constm1_rtx)
3782 /* Determine CC mode to use. */
3783 if (cmp_code == EQ || cmp_code == NE)
3785 if (cmp_op1 != const0_rtx)
3787 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3788 NULL_RTX, 0, OPTAB_WIDEN);
3789 cmp_op1 = const0_rtx;
3792 cmp_code = cmp_code == EQ ? LEU : GTU;
3795 if (cmp_code == GTU || cmp_code == GEU)
3800 cmp_code = swap_condition (cmp_code);
3817 /* Emit comparison instruction pattern. */
3818 if (!register_operand (cmp_op0, cmp_mode))
3819 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3821 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3822 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3823 /* We use insn_invalid_p here to add clobbers if required. */
3824 if (insn_invalid_p (emit_insn (insn)))
3827 /* Emit SLB instruction pattern. */
3828 if (!register_operand (src, GET_MODE (dst)))
3829 src = force_reg (GET_MODE (dst), src);
3831 op_res = gen_rtx_MINUS (GET_MODE (dst),
3832 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3833 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3834 gen_rtx_REG (cc_mode, CC_REGNUM),
3836 p = rtvec_alloc (2);
3838 gen_rtx_SET (VOIDmode, dst, op_res);
3840 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3841 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3850 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3851 We need to emit DTP-relative relocations. */
3854 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3859 fputs ("\t.long\t", file);
3862 fputs ("\t.quad\t", file);
3867 output_addr_const (file, x);
3868 fputs ("@DTPOFF", file);
3871 /* In the name of slightly smaller debug output, and to cater to
3872 general assembler losage, recognize various UNSPEC sequences
3873 and turn them back into a direct symbol reference. */
3876 s390_delegitimize_address (rtx orig_x)
3880 if (GET_CODE (x) != MEM)
3884 if (GET_CODE (x) == PLUS
3885 && GET_CODE (XEXP (x, 1)) == CONST
3886 && GET_CODE (XEXP (x, 0)) == REG
3887 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3889 y = XEXP (XEXP (x, 1), 0);
3890 if (GET_CODE (y) == UNSPEC
3891 && XINT (y, 1) == UNSPEC_GOT)
3892 return XVECEXP (y, 0, 0);
3896 if (GET_CODE (x) == CONST)
3899 if (GET_CODE (y) == UNSPEC
3900 && XINT (y, 1) == UNSPEC_GOTENT)
3901 return XVECEXP (y, 0, 0);
3908 /* Output shift count operand OP to stdio stream FILE. */
3911 print_shift_count_operand (FILE *file, rtx op)
3913 HOST_WIDE_INT offset = 0;
3915 /* We can have an integer constant, an address register,
3916 or a sum of the two. */
3917 if (GET_CODE (op) == CONST_INT)
3919 offset = INTVAL (op);
3922 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3924 offset = INTVAL (XEXP (op, 1));
3927 while (op && GET_CODE (op) == SUBREG)
3928 op = SUBREG_REG (op);
3931 if (op && (GET_CODE (op) != REG
3932 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3933 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3936 /* Shift counts are truncated to the low six bits anyway. */
3937 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3939 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3942 /* Locate some local-dynamic symbol still in use by this function
3943 so that we can print its name in local-dynamic base patterns. */
3946 get_some_local_dynamic_name (void)
3950 if (cfun->machine->some_ld_name)
3951 return cfun->machine->some_ld_name;
3953 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3955 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3956 return cfun->machine->some_ld_name;
3962 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3966 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3968 x = get_pool_constant (x);
3969 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3972 if (GET_CODE (x) == SYMBOL_REF
3973 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3975 cfun->machine->some_ld_name = XSTR (x, 0);
3982 /* Output machine-dependent UNSPECs occurring in address constant X
3983 in assembler syntax to stdio stream FILE. Returns true if the
3984 constant X could be recognized, false otherwise. */
3987 s390_output_addr_const_extra (FILE *file, rtx x)
3989 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3990 switch (XINT (x, 1))
3993 output_addr_const (file, XVECEXP (x, 0, 0));
3994 fprintf (file, "@GOTENT");
3997 output_addr_const (file, XVECEXP (x, 0, 0));
3998 fprintf (file, "@GOT");
4001 output_addr_const (file, XVECEXP (x, 0, 0));
4002 fprintf (file, "@GOTOFF");
4005 output_addr_const (file, XVECEXP (x, 0, 0));
4006 fprintf (file, "@PLT");
4009 output_addr_const (file, XVECEXP (x, 0, 0));
4010 fprintf (file, "@PLTOFF");
4013 output_addr_const (file, XVECEXP (x, 0, 0));
4014 fprintf (file, "@TLSGD");
4017 assemble_name (file, get_some_local_dynamic_name ());
4018 fprintf (file, "@TLSLDM");
4021 output_addr_const (file, XVECEXP (x, 0, 0));
4022 fprintf (file, "@DTPOFF");
4025 output_addr_const (file, XVECEXP (x, 0, 0));
4026 fprintf (file, "@NTPOFF");
4028 case UNSPEC_GOTNTPOFF:
4029 output_addr_const (file, XVECEXP (x, 0, 0));
4030 fprintf (file, "@GOTNTPOFF");
4032 case UNSPEC_INDNTPOFF:
4033 output_addr_const (file, XVECEXP (x, 0, 0));
4034 fprintf (file, "@INDNTPOFF");
4041 /* Output address operand ADDR in assembler syntax to
4042 stdio stream FILE. */
4045 print_operand_address (FILE *file, rtx addr)
4047 struct s390_address ad;
4049 if (!s390_decompose_address (addr, &ad)
4050 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4051 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4052 output_operand_lossage ("Cannot decompose address.");
4055 output_addr_const (file, ad.disp);
4057 fprintf (file, "0");
4059 if (ad.base && ad.indx)
4060 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4061 reg_names[REGNO (ad.base)]);
4063 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4066 /* Output operand X in assembler syntax to stdio stream FILE.
4067 CODE specified the format flag. The following format flags
4070 'C': print opcode suffix for branch condition.
4071 'D': print opcode suffix for inverse branch condition.
4072 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4073 'O': print only the displacement of a memory reference.
4074 'R': print only the base register of a memory reference.
4075 'S': print S-type memory reference (base+displacement).
4076 'N': print the second word of a DImode operand.
4077 'M': print the second word of a TImode operand.
4078 'Y': print shift count operand.
4080 'b': print integer X as if it's an unsigned byte.
4081 'x': print integer X as if it's an unsigned word.
4082 'h': print integer X as if it's a signed word.
4083 'i': print the first nonzero HImode part of X.
4084 'j': print the first HImode part unequal to 0xffff of X. */
4087 print_operand (FILE *file, rtx x, int code)
4092 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4096 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4100 if (GET_CODE (x) == SYMBOL_REF)
4102 fprintf (file, "%s", ":tls_load:");
4103 output_addr_const (file, x);
4105 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4107 fprintf (file, "%s", ":tls_gdcall:");
4108 output_addr_const (file, XVECEXP (x, 0, 0));
4110 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4112 fprintf (file, "%s", ":tls_ldcall:");
4113 assemble_name (file, get_some_local_dynamic_name ());
4121 struct s390_address ad;
4123 if (GET_CODE (x) != MEM
4124 || !s390_decompose_address (XEXP (x, 0), &ad)
4125 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4130 output_addr_const (file, ad.disp);
4132 fprintf (file, "0");
4138 struct s390_address ad;
4140 if (GET_CODE (x) != MEM
4141 || !s390_decompose_address (XEXP (x, 0), &ad)
4142 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4147 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4149 fprintf (file, "0");
4155 struct s390_address ad;
4157 if (GET_CODE (x) != MEM
4158 || !s390_decompose_address (XEXP (x, 0), &ad)
4159 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4164 output_addr_const (file, ad.disp);
4166 fprintf (file, "0");
4169 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4174 if (GET_CODE (x) == REG)
4175 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4176 else if (GET_CODE (x) == MEM)
4177 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4183 if (GET_CODE (x) == REG)
4184 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4185 else if (GET_CODE (x) == MEM)
4186 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4192 print_shift_count_operand (file, x);
4196 switch (GET_CODE (x))
4199 fprintf (file, "%s", reg_names[REGNO (x)]);
4203 output_address (XEXP (x, 0));
4210 output_addr_const (file, x);
4215 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4216 else if (code == 'x')
4217 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4218 else if (code == 'h')
4219 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4220 else if (code == 'i')
4221 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4222 s390_extract_part (x, HImode, 0));
4223 else if (code == 'j')
4224 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4225 s390_extract_part (x, HImode, -1));
4227 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4231 if (GET_MODE (x) != VOIDmode)
4234 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4235 else if (code == 'x')
4236 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4237 else if (code == 'h')
4238 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4244 fatal_insn ("UNKNOWN in print_operand !?", x);
4249 /* Target hook for assembling integer objects. We need to define it
4250 here to work a round a bug in some versions of GAS, which couldn't
4251 handle values smaller than INT_MIN when printed in decimal. */
4254 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4256 if (size == 8 && aligned_p
4257 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4259 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4263 return default_assemble_integer (x, size, aligned_p);
4266 /* Returns true if register REGNO is used for forming
4267 a memory address in expression X. */
4270 reg_used_in_mem_p (int regno, rtx x)
4272 enum rtx_code code = GET_CODE (x);
4278 if (refers_to_regno_p (regno, regno+1,
4282 else if (code == SET
4283 && GET_CODE (SET_DEST (x)) == PC)
4285 if (refers_to_regno_p (regno, regno+1,
4290 fmt = GET_RTX_FORMAT (code);
4291 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4294 && reg_used_in_mem_p (regno, XEXP (x, i)))
4297 else if (fmt[i] == 'E')
4298 for (j = 0; j < XVECLEN (x, i); j++)
4299 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4305 /* Returns true if expression DEP_RTX sets an address register
4306 used by instruction INSN to address memory. */
4309 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4313 if (GET_CODE (dep_rtx) == INSN)
4314 dep_rtx = PATTERN (dep_rtx);
4316 if (GET_CODE (dep_rtx) == SET)
4318 target = SET_DEST (dep_rtx);
4319 if (GET_CODE (target) == STRICT_LOW_PART)
4320 target = XEXP (target, 0);
4321 while (GET_CODE (target) == SUBREG)
4322 target = SUBREG_REG (target);
4324 if (GET_CODE (target) == REG)
4326 int regno = REGNO (target);
4328 if (s390_safe_attr_type (insn) == TYPE_LA)
4330 pat = PATTERN (insn);
4331 if (GET_CODE (pat) == PARALLEL)
4333 if (XVECLEN (pat, 0) != 2)
4335 pat = XVECEXP (pat, 0, 0);
4337 if (GET_CODE (pat) == SET)
4338 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4342 else if (get_attr_atype (insn) == ATYPE_AGEN)
4343 return reg_used_in_mem_p (regno, PATTERN (insn));
4349 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4352 s390_agen_dep_p (rtx dep_insn, rtx insn)
4354 rtx dep_rtx = PATTERN (dep_insn);
4357 if (GET_CODE (dep_rtx) == SET
4358 && addr_generation_dependency_p (dep_rtx, insn))
4360 else if (GET_CODE (dep_rtx) == PARALLEL)
4362 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4364 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4371 /* A C statement (sans semicolon) to update the integer scheduling priority
4372 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4373 reduce the priority to execute INSN later. Do not define this macro if
4374 you do not need to adjust the scheduling priorities of insns.
4376 A STD instruction should be scheduled earlier,
4377 in order to use the bypass. */
4380 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4382 if (! INSN_P (insn))
4385 if (s390_tune != PROCESSOR_2084_Z990)
4388 switch (s390_safe_attr_type (insn))
4392 priority = priority << 3;
4396 priority = priority << 1;
4404 /* The number of instructions that can be issued per cycle. */
4407 s390_issue_rate (void)
4409 if (s390_tune == PROCESSOR_2084_Z990)
4415 s390_first_cycle_multipass_dfa_lookahead (void)
4421 /* Split all branches that exceed the maximum distance.
4422 Returns true if this created a new literal pool entry. */
4425 s390_split_branches (void)
4427 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4428 int new_literal = 0;
4429 rtx insn, pat, tmp, target;
4432 /* We need correct insn addresses. */
4434 shorten_branches (get_insns ());
4436 /* Find all branches that exceed 64KB, and split them. */
4438 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4440 if (GET_CODE (insn) != JUMP_INSN)
4443 pat = PATTERN (insn);
4444 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4445 pat = XVECEXP (pat, 0, 0);
4446 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4449 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4451 label = &SET_SRC (pat);
4453 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4455 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4456 label = &XEXP (SET_SRC (pat), 1);
4457 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4458 label = &XEXP (SET_SRC (pat), 2);
4465 if (get_attr_length (insn) <= 4)
4468 /* We are going to use the return register as scratch register,
4469 make sure it will be saved/restored by the prologue/epilogue. */
4470 cfun_frame_layout.save_return_addr_p = 1;
4475 tmp = force_const_mem (Pmode, *label);
4476 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4477 INSN_ADDRESSES_NEW (tmp, -1);
4478 annotate_constant_pool_refs (&PATTERN (tmp));
4485 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4486 UNSPEC_LTREL_OFFSET);
4487 target = gen_rtx_CONST (Pmode, target);
4488 target = force_const_mem (Pmode, target);
4489 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4490 INSN_ADDRESSES_NEW (tmp, -1);
4491 annotate_constant_pool_refs (&PATTERN (tmp));
4493 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4494 cfun->machine->base_reg),
4496 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4499 if (!validate_change (insn, label, target, 0))
4506 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4507 Fix up MEMs as required. */
4510 annotate_constant_pool_refs (rtx *x)
4515 if (GET_CODE (*x) == SYMBOL_REF
4516 && CONSTANT_POOL_ADDRESS_P (*x))
4519 /* Literal pool references can only occur inside a MEM ... */
4520 if (GET_CODE (*x) == MEM)
4522 rtx memref = XEXP (*x, 0);
4524 if (GET_CODE (memref) == SYMBOL_REF
4525 && CONSTANT_POOL_ADDRESS_P (memref))
4527 rtx base = cfun->machine->base_reg;
4528 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4531 *x = replace_equiv_address (*x, addr);
4535 if (GET_CODE (memref) == CONST
4536 && GET_CODE (XEXP (memref, 0)) == PLUS
4537 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4538 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4539 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4541 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4542 rtx sym = XEXP (XEXP (memref, 0), 0);
4543 rtx base = cfun->machine->base_reg;
4544 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4547 *x = replace_equiv_address (*x, plus_constant (addr, off));
4552 /* ... or a load-address type pattern. */
4553 if (GET_CODE (*x) == SET)
4555 rtx addrref = SET_SRC (*x);
4557 if (GET_CODE (addrref) == SYMBOL_REF
4558 && CONSTANT_POOL_ADDRESS_P (addrref))
4560 rtx base = cfun->machine->base_reg;
4561 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4564 SET_SRC (*x) = addr;
4568 if (GET_CODE (addrref) == CONST
4569 && GET_CODE (XEXP (addrref, 0)) == PLUS
4570 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4571 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4572 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4574 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4575 rtx sym = XEXP (XEXP (addrref, 0), 0);
4576 rtx base = cfun->machine->base_reg;
4577 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4580 SET_SRC (*x) = plus_constant (addr, off);
4585 /* Annotate LTREL_BASE as well. */
4586 if (GET_CODE (*x) == UNSPEC
4587 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4589 rtx base = cfun->machine->base_reg;
4590 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4595 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4596 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4600 annotate_constant_pool_refs (&XEXP (*x, i));
4602 else if (fmt[i] == 'E')
4604 for (j = 0; j < XVECLEN (*x, i); j++)
4605 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4611 /* Find an annotated literal pool symbol referenced in RTX X,
4612 and store it at REF. Will abort if X contains references to
4613 more than one such pool symbol; multiple references to the same
4614 symbol are allowed, however.
4616 The rtx pointed to by REF must be initialized to NULL_RTX
4617 by the caller before calling this routine. */
4620 find_constant_pool_ref (rtx x, rtx *ref)
4625 /* Ignore LTREL_BASE references. */
4626 if (GET_CODE (x) == UNSPEC
4627 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4629 /* Likewise POOL_ENTRY insns. */
4630 if (GET_CODE (x) == UNSPEC_VOLATILE
4631 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4634 if (GET_CODE (x) == SYMBOL_REF
4635 && CONSTANT_POOL_ADDRESS_P (x))
4638 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4640 rtx sym = XVECEXP (x, 0, 0);
4641 if (GET_CODE (sym) != SYMBOL_REF
4642 || !CONSTANT_POOL_ADDRESS_P (sym))
4645 if (*ref == NULL_RTX)
4647 else if (*ref != sym)
4653 fmt = GET_RTX_FORMAT (GET_CODE (x));
4654 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4658 find_constant_pool_ref (XEXP (x, i), ref);
4660 else if (fmt[i] == 'E')
4662 for (j = 0; j < XVECLEN (x, i); j++)
4663 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4668 /* Replace every reference to the annotated literal pool
4669 symbol REF in X by its base plus OFFSET. */
4672 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4680 if (GET_CODE (*x) == UNSPEC
4681 && XINT (*x, 1) == UNSPEC_LTREF
4682 && XVECEXP (*x, 0, 0) == ref)
4684 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4688 if (GET_CODE (*x) == PLUS
4689 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4690 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4691 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4692 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4694 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4695 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4699 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4700 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4704 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4706 else if (fmt[i] == 'E')
4708 for (j = 0; j < XVECLEN (*x, i); j++)
4709 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4714 /* Check whether X contains an UNSPEC_LTREL_BASE.
4715 Return its constant pool symbol if found, NULL_RTX otherwise. */
4718 find_ltrel_base (rtx x)
4723 if (GET_CODE (x) == UNSPEC
4724 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4725 return XVECEXP (x, 0, 0);
4727 fmt = GET_RTX_FORMAT (GET_CODE (x));
4728 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4732 rtx fnd = find_ltrel_base (XEXP (x, i));
4736 else if (fmt[i] == 'E')
4738 for (j = 0; j < XVECLEN (x, i); j++)
4740 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4750 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4753 replace_ltrel_base (rtx *x)
4758 if (GET_CODE (*x) == UNSPEC
4759 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4761 *x = XVECEXP (*x, 0, 1);
4765 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4766 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4770 replace_ltrel_base (&XEXP (*x, i));
4772 else if (fmt[i] == 'E')
4774 for (j = 0; j < XVECLEN (*x, i); j++)
4775 replace_ltrel_base (&XVECEXP (*x, i, j));
4781 /* We keep a list of constants which we have to add to internal
4782 constant tables in the middle of large functions. */
4784 #define NR_C_MODES 7
4785 enum machine_mode constant_modes[NR_C_MODES] =
4796 struct constant *next;
4801 struct constant_pool
4803 struct constant_pool *next;
4808 struct constant *constants[NR_C_MODES];
4809 struct constant *execute;
4814 static struct constant_pool * s390_mainpool_start (void);
4815 static void s390_mainpool_finish (struct constant_pool *);
4816 static void s390_mainpool_cancel (struct constant_pool *);
4818 static struct constant_pool * s390_chunkify_start (void);
4819 static void s390_chunkify_finish (struct constant_pool *);
4820 static void s390_chunkify_cancel (struct constant_pool *);
4822 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4823 static void s390_end_pool (struct constant_pool *, rtx);
4824 static void s390_add_pool_insn (struct constant_pool *, rtx);
4825 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4826 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4827 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4828 static void s390_add_execute (struct constant_pool *, rtx);
4829 static rtx s390_find_execute (struct constant_pool *, rtx);
4830 static rtx s390_execute_label (rtx);
4831 static rtx s390_execute_target (rtx);
4832 static void s390_dump_pool (struct constant_pool *, bool);
4833 static void s390_dump_execute (struct constant_pool *);
4834 static struct constant_pool *s390_alloc_pool (void);
4835 static void s390_free_pool (struct constant_pool *);
4837 /* Create new constant pool covering instructions starting at INSN
4838 and chain it to the end of POOL_LIST. */
4840 static struct constant_pool *
4841 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4843 struct constant_pool *pool, **prev;
4845 pool = s390_alloc_pool ();
4846 pool->first_insn = insn;
4848 for (prev = pool_list; *prev; prev = &(*prev)->next)
4855 /* End range of instructions covered by POOL at INSN and emit
4856 placeholder insn representing the pool. */
4859 s390_end_pool (struct constant_pool *pool, rtx insn)
4861 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4864 insn = get_last_insn ();
4866 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4867 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4870 /* Add INSN to the list of insns covered by POOL. */
4873 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4875 bitmap_set_bit (pool->insns, INSN_UID (insn));
4878 /* Return pool out of POOL_LIST that covers INSN. */
4880 static struct constant_pool *
4881 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4883 struct constant_pool *pool;
4885 for (pool = pool_list; pool; pool = pool->next)
4886 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4892 /* Add constant VAL of mode MODE to the constant pool POOL. */
4895 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4900 for (i = 0; i < NR_C_MODES; i++)
4901 if (constant_modes[i] == mode)
4903 if (i == NR_C_MODES)
4906 for (c = pool->constants[i]; c != NULL; c = c->next)
4907 if (rtx_equal_p (val, c->value))
4912 c = (struct constant *) xmalloc (sizeof *c);
4914 c->label = gen_label_rtx ();
4915 c->next = pool->constants[i];
4916 pool->constants[i] = c;
4917 pool->size += GET_MODE_SIZE (mode);
4921 /* Find constant VAL of mode MODE in the constant pool POOL.
4922 Return an RTX describing the distance from the start of
4923 the pool to the location of the new constant. */
4926 s390_find_constant (struct constant_pool *pool, rtx val,
4927 enum machine_mode mode)
4933 for (i = 0; i < NR_C_MODES; i++)
4934 if (constant_modes[i] == mode)
4936 if (i == NR_C_MODES)
4939 for (c = pool->constants[i]; c != NULL; c = c->next)
4940 if (rtx_equal_p (val, c->value))
4946 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4947 gen_rtx_LABEL_REF (Pmode, pool->label));
4948 offset = gen_rtx_CONST (Pmode, offset);
4952 /* Add execute target for INSN to the constant pool POOL. */
4955 s390_add_execute (struct constant_pool *pool, rtx insn)
4959 for (c = pool->execute; c != NULL; c = c->next)
4960 if (INSN_UID (insn) == INSN_UID (c->value))
4965 rtx label = s390_execute_label (insn);
4968 c = (struct constant *) xmalloc (sizeof *c);
4970 c->label = label == const0_rtx ? gen_label_rtx () : XEXP (label, 0);
4971 c->next = pool->execute;
4973 pool->size += label == const0_rtx ? 6 : 0;
4977 /* Find execute target for INSN in the constant pool POOL.
4978 Return an RTX describing the distance from the start of
4979 the pool to the location of the execute target. */
4982 s390_find_execute (struct constant_pool *pool, rtx insn)
4987 for (c = pool->execute; c != NULL; c = c->next)
4988 if (INSN_UID (insn) == INSN_UID (c->value))
4994 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4995 gen_rtx_LABEL_REF (Pmode, pool->label));
4996 offset = gen_rtx_CONST (Pmode, offset);
5000 /* Check whether INSN is an execute. Return the label_ref to its
5001 execute target template if so, NULL_RTX otherwise. */
5004 s390_execute_label (rtx insn)
5006 if (GET_CODE (insn) == INSN
5007 && GET_CODE (PATTERN (insn)) == PARALLEL
5008 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5009 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5010 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5015 /* For an execute INSN, extract the execute target template. */
5018 s390_execute_target (rtx insn)
5020 rtx pattern = PATTERN (insn);
5021 gcc_assert (s390_execute_label (insn));
5023 if (XVECLEN (pattern, 0) == 2)
5025 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5029 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5032 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5033 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5035 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5041 /* Indicate that INSN cannot be duplicated. This is the case for
5042 execute insns that carry a unique label. */
5045 s390_cannot_copy_insn_p (rtx insn)
5047 rtx label = s390_execute_label (insn);
5048 return label && label != const0_rtx;
5051 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5052 do not emit the pool base label. */
5055 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5058 rtx insn = pool->pool_insn;
5061 /* Switch to rodata section. */
5062 if (TARGET_CPU_ZARCH)
5064 insn = emit_insn_after (gen_pool_section_start (), insn);
5065 INSN_ADDRESSES_NEW (insn, -1);
5068 /* Ensure minimum pool alignment. */
5069 if (TARGET_CPU_ZARCH)
5070 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5072 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5073 INSN_ADDRESSES_NEW (insn, -1);
5075 /* Emit pool base label. */
5078 insn = emit_label_after (pool->label, insn);
5079 INSN_ADDRESSES_NEW (insn, -1);
5082 /* Dump constants in descending alignment requirement order,
5083 ensuring proper alignment for every constant. */
5084 for (i = 0; i < NR_C_MODES; i++)
5085 for (c = pool->constants[i]; c; c = c->next)
5087 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5088 rtx value = c->value;
5089 if (GET_CODE (value) == CONST
5090 && GET_CODE (XEXP (value, 0)) == UNSPEC
5091 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5092 && XVECLEN (XEXP (value, 0), 0) == 1)
5094 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5095 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5096 value = gen_rtx_CONST (VOIDmode, value);
5099 insn = emit_label_after (c->label, insn);
5100 INSN_ADDRESSES_NEW (insn, -1);
5102 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5103 gen_rtvec (1, value),
5104 UNSPECV_POOL_ENTRY);
5105 insn = emit_insn_after (value, insn);
5106 INSN_ADDRESSES_NEW (insn, -1);
5109 /* Ensure minimum alignment for instructions. */
5110 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5111 INSN_ADDRESSES_NEW (insn, -1);
5113 /* Output in-pool execute template insns. */
5114 for (c = pool->execute; c; c = c->next)
5116 if (s390_execute_label (c->value) != const0_rtx)
5119 insn = emit_label_after (c->label, insn);
5120 INSN_ADDRESSES_NEW (insn, -1);
5122 insn = emit_insn_after (s390_execute_target (c->value), insn);
5123 INSN_ADDRESSES_NEW (insn, -1);
5126 /* Switch back to previous section. */
5127 if (TARGET_CPU_ZARCH)
5129 insn = emit_insn_after (gen_pool_section_end (), insn);
5130 INSN_ADDRESSES_NEW (insn, -1);
5133 insn = emit_barrier_after (insn);
5134 INSN_ADDRESSES_NEW (insn, -1);
5136 /* Remove placeholder insn. */
5137 remove_insn (pool->pool_insn);
5139 /* Output out-of-pool execute template isns. */
5140 s390_dump_execute (pool);
5143 /* Dump out the out-of-pool execute template insns in POOL
5144 at the end of the instruction stream. */
5147 s390_dump_execute (struct constant_pool *pool)
5152 for (c = pool->execute; c; c = c->next)
5154 if (s390_execute_label (c->value) == const0_rtx)
5157 insn = emit_label (c->label);
5158 INSN_ADDRESSES_NEW (insn, -1);
5160 insn = emit_insn (s390_execute_target (c->value));
5161 INSN_ADDRESSES_NEW (insn, -1);
5165 /* Allocate new constant_pool structure. */
5167 static struct constant_pool *
5168 s390_alloc_pool (void)
5170 struct constant_pool *pool;
5173 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5175 for (i = 0; i < NR_C_MODES; i++)
5176 pool->constants[i] = NULL;
5178 pool->execute = NULL;
5179 pool->label = gen_label_rtx ();
5180 pool->first_insn = NULL_RTX;
5181 pool->pool_insn = NULL_RTX;
5182 pool->insns = BITMAP_XMALLOC ();
5188 /* Free all memory used by POOL. */
5191 s390_free_pool (struct constant_pool *pool)
5193 struct constant *c, *next;
5196 for (i = 0; i < NR_C_MODES; i++)
5197 for (c = pool->constants[i]; c; c = next)
5203 for (c = pool->execute; c; c = next)
5209 BITMAP_XFREE (pool->insns);
5214 /* Collect main literal pool. Return NULL on overflow. */
5216 static struct constant_pool *
5217 s390_mainpool_start (void)
5219 struct constant_pool *pool;
5222 pool = s390_alloc_pool ();
5224 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5226 if (GET_CODE (insn) == INSN
5227 && GET_CODE (PATTERN (insn)) == SET
5228 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5229 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5231 if (pool->pool_insn)
5233 pool->pool_insn = insn;
5236 if (s390_execute_label (insn))
5238 s390_add_execute (pool, insn);
5240 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5242 rtx pool_ref = NULL_RTX;
5243 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5246 rtx constant = get_pool_constant (pool_ref);
5247 enum machine_mode mode = get_pool_mode (pool_ref);
5248 s390_add_constant (pool, constant, mode);
5253 if (!pool->pool_insn && pool->size > 0)
5256 if (pool->size >= 4096)
5258 /* We're going to chunkify the pool, so remove the main
5259 pool placeholder insn. */
5260 remove_insn (pool->pool_insn);
5262 s390_free_pool (pool);
5269 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5270 Modify the current function to output the pool constants as well as
5271 the pool register setup instruction. */
5274 s390_mainpool_finish (struct constant_pool *pool)
5276 rtx base_reg = cfun->machine->base_reg;
5279 /* If the pool is empty, we're done. */
5280 if (pool->size == 0)
5282 /* However, we may have out-of-pool execute templates. */
5283 s390_dump_execute (pool);
5285 /* We don't actually need a base register after all. */
5286 cfun->machine->base_reg = NULL_RTX;
5288 if (pool->pool_insn)
5289 remove_insn (pool->pool_insn);
5290 s390_free_pool (pool);
5294 /* We need correct insn addresses. */
5295 shorten_branches (get_insns ());
5297 /* On zSeries, we use a LARL to load the pool register. The pool is
5298 located in the .rodata section, so we emit it after the function. */
5299 if (TARGET_CPU_ZARCH)
5301 insn = gen_main_base_64 (base_reg, pool->label);
5302 insn = emit_insn_after (insn, pool->pool_insn);
5303 INSN_ADDRESSES_NEW (insn, -1);
5304 remove_insn (pool->pool_insn);
5306 insn = get_last_insn ();
5307 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5308 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5310 s390_dump_pool (pool, 0);
5313 /* On S/390, if the total size of the function's code plus literal pool
5314 does not exceed 4096 bytes, we use BASR to set up a function base
5315 pointer, and emit the literal pool at the end of the function. */
5316 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5317 + pool->size + 8 /* alignment slop */ < 4096)
5319 insn = gen_main_base_31_small (base_reg, pool->label);
5320 insn = emit_insn_after (insn, pool->pool_insn);
5321 INSN_ADDRESSES_NEW (insn, -1);
5322 remove_insn (pool->pool_insn);
5324 insn = emit_label_after (pool->label, insn);
5325 INSN_ADDRESSES_NEW (insn, -1);
5327 insn = get_last_insn ();
5328 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5329 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5331 s390_dump_pool (pool, 1);
5334 /* Otherwise, we emit an inline literal pool and use BASR to branch
5335 over it, setting up the pool register at the same time. */
5338 rtx pool_end = gen_label_rtx ();
5340 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5341 insn = emit_insn_after (insn, pool->pool_insn);
5342 INSN_ADDRESSES_NEW (insn, -1);
5343 remove_insn (pool->pool_insn);
5345 insn = emit_label_after (pool->label, insn);
5346 INSN_ADDRESSES_NEW (insn, -1);
5348 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5349 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5351 insn = emit_label_after (pool_end, pool->pool_insn);
5352 INSN_ADDRESSES_NEW (insn, -1);
5354 s390_dump_pool (pool, 1);
5358 /* Replace all literal pool references. */
5360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5363 replace_ltrel_base (&PATTERN (insn));
5365 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5367 rtx addr, pool_ref = NULL_RTX;
5368 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5371 if (s390_execute_label (insn))
5372 addr = s390_find_execute (pool, insn);
5374 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5375 get_pool_mode (pool_ref));
5377 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5378 INSN_CODE (insn) = -1;
5384 /* Free the pool. */
5385 s390_free_pool (pool);
5388 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5389 We have decided we cannot use this pool, so revert all changes
5390 to the current function that were done by s390_mainpool_start. */
5392 s390_mainpool_cancel (struct constant_pool *pool)
5394 /* We didn't actually change the instruction stream, so simply
5395 free the pool memory. */
5396 s390_free_pool (pool);
5400 /* Chunkify the literal pool. */
5402 #define S390_POOL_CHUNK_MIN 0xc00
5403 #define S390_POOL_CHUNK_MAX 0xe00
5405 static struct constant_pool *
5406 s390_chunkify_start (void)
5408 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5411 rtx pending_ltrel = NULL_RTX;
5414 rtx (*gen_reload_base) (rtx, rtx) =
5415 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5418 /* We need correct insn addresses. */
5420 shorten_branches (get_insns ());
5422 /* Scan all insns and move literals to pool chunks. */
5424 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5426 /* Check for pending LTREL_BASE. */
5429 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5432 if (ltrel_base == pending_ltrel)
5433 pending_ltrel = NULL_RTX;
5439 if (s390_execute_label (insn))
5442 curr_pool = s390_start_pool (&pool_list, insn);
5444 s390_add_execute (curr_pool, insn);
5445 s390_add_pool_insn (curr_pool, insn);
5447 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5449 rtx pool_ref = NULL_RTX;
5450 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5453 rtx constant = get_pool_constant (pool_ref);
5454 enum machine_mode mode = get_pool_mode (pool_ref);
5457 curr_pool = s390_start_pool (&pool_list, insn);
5459 s390_add_constant (curr_pool, constant, mode);
5460 s390_add_pool_insn (curr_pool, insn);
5462 /* Don't split the pool chunk between a LTREL_OFFSET load
5463 and the corresponding LTREL_BASE. */
5464 if (GET_CODE (constant) == CONST
5465 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5466 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5470 pending_ltrel = pool_ref;
5475 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5478 s390_add_pool_insn (curr_pool, insn);
5479 /* An LTREL_BASE must follow within the same basic block. */
5485 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5486 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5489 if (TARGET_CPU_ZARCH)
5491 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5494 s390_end_pool (curr_pool, NULL_RTX);
5499 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5500 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5503 /* We will later have to insert base register reload insns.
5504 Those will have an effect on code size, which we need to
5505 consider here. This calculation makes rather pessimistic
5506 worst-case assumptions. */
5507 if (GET_CODE (insn) == CODE_LABEL)
5510 if (chunk_size < S390_POOL_CHUNK_MIN
5511 && curr_pool->size < S390_POOL_CHUNK_MIN)
5514 /* Pool chunks can only be inserted after BARRIERs ... */
5515 if (GET_CODE (insn) == BARRIER)
5517 s390_end_pool (curr_pool, insn);
5522 /* ... so if we don't find one in time, create one. */
5523 else if ((chunk_size > S390_POOL_CHUNK_MAX
5524 || curr_pool->size > S390_POOL_CHUNK_MAX))
5526 rtx label, jump, barrier;
5528 /* We can insert the barrier only after a 'real' insn. */
5529 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5531 if (get_attr_length (insn) == 0)
5534 /* Don't separate LTREL_BASE from the corresponding
5535 LTREL_OFFSET load. */
5539 label = gen_label_rtx ();
5540 jump = emit_jump_insn_after (gen_jump (label), insn);
5541 barrier = emit_barrier_after (jump);
5542 insn = emit_label_after (label, barrier);
5543 JUMP_LABEL (jump) = label;
5544 LABEL_NUSES (label) = 1;
5546 INSN_ADDRESSES_NEW (jump, -1);
5547 INSN_ADDRESSES_NEW (barrier, -1);
5548 INSN_ADDRESSES_NEW (insn, -1);
5550 s390_end_pool (curr_pool, barrier);
5558 s390_end_pool (curr_pool, NULL_RTX);
5563 /* Find all labels that are branched into
5564 from an insn belonging to a different chunk. */
5566 far_labels = BITMAP_XMALLOC ();
5568 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5570 /* Labels marked with LABEL_PRESERVE_P can be target
5571 of non-local jumps, so we have to mark them.
5572 The same holds for named labels.
5574 Don't do that, however, if it is the label before
5577 if (GET_CODE (insn) == CODE_LABEL
5578 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5580 rtx vec_insn = next_real_insn (insn);
5581 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5582 PATTERN (vec_insn) : NULL_RTX;
5584 || !(GET_CODE (vec_pat) == ADDR_VEC
5585 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5586 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5589 /* If we have a direct jump (conditional or unconditional)
5590 or a casesi jump, check all potential targets. */
5591 else if (GET_CODE (insn) == JUMP_INSN)
5593 rtx pat = PATTERN (insn);
5594 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5595 pat = XVECEXP (pat, 0, 0);
5597 if (GET_CODE (pat) == SET)
5599 rtx label = JUMP_LABEL (insn);
5602 if (s390_find_pool (pool_list, label)
5603 != s390_find_pool (pool_list, insn))
5604 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5607 else if (GET_CODE (pat) == PARALLEL
5608 && XVECLEN (pat, 0) == 2
5609 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5610 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5611 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5613 /* Find the jump table used by this casesi jump. */
5614 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5615 rtx vec_insn = next_real_insn (vec_label);
5616 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5617 PATTERN (vec_insn) : NULL_RTX;
5619 && (GET_CODE (vec_pat) == ADDR_VEC
5620 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5622 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5624 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5626 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5628 if (s390_find_pool (pool_list, label)
5629 != s390_find_pool (pool_list, insn))
5630 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5637 /* Insert base register reload insns before every pool. */
5639 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5641 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5643 rtx insn = curr_pool->first_insn;
5644 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5647 /* Insert base register reload insns at every far label. */
5649 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5650 if (GET_CODE (insn) == CODE_LABEL
5651 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5653 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5656 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5658 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5663 BITMAP_XFREE (far_labels);
5666 /* Recompute insn addresses. */
5668 init_insn_lengths ();
5669 shorten_branches (get_insns ());
5674 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5675 After we have decided to use this list, finish implementing
5676 all changes to the current function as required. */
5679 s390_chunkify_finish (struct constant_pool *pool_list)
5681 struct constant_pool *curr_pool = NULL;
5685 /* Replace all literal pool references. */
5687 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5690 replace_ltrel_base (&PATTERN (insn));
5692 curr_pool = s390_find_pool (pool_list, insn);
5696 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5698 rtx addr, pool_ref = NULL_RTX;
5699 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5702 if (s390_execute_label (insn))
5703 addr = s390_find_execute (curr_pool, insn);
5705 addr = s390_find_constant (curr_pool,
5706 get_pool_constant (pool_ref),
5707 get_pool_mode (pool_ref));
5709 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5710 INSN_CODE (insn) = -1;
5715 /* Dump out all literal pools. */
5717 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5718 s390_dump_pool (curr_pool, 0);
5720 /* Free pool list. */
5724 struct constant_pool *next = pool_list->next;
5725 s390_free_pool (pool_list);
5730 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5731 We have decided we cannot use this list, so revert all changes
5732 to the current function that were done by s390_chunkify_start. */
5735 s390_chunkify_cancel (struct constant_pool *pool_list)
5737 struct constant_pool *curr_pool = NULL;
5740 /* Remove all pool placeholder insns. */
5742 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5744 /* Did we insert an extra barrier? Remove it. */
5745 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5746 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5747 rtx label = NEXT_INSN (curr_pool->pool_insn);
5749 if (jump && GET_CODE (jump) == JUMP_INSN
5750 && barrier && GET_CODE (barrier) == BARRIER
5751 && label && GET_CODE (label) == CODE_LABEL
5752 && GET_CODE (PATTERN (jump)) == SET
5753 && SET_DEST (PATTERN (jump)) == pc_rtx
5754 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5755 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5758 remove_insn (barrier);
5759 remove_insn (label);
5762 remove_insn (curr_pool->pool_insn);
5765 /* Remove all base register reload insns. */
5767 for (insn = get_insns (); insn; )
5769 rtx next_insn = NEXT_INSN (insn);
5771 if (GET_CODE (insn) == INSN
5772 && GET_CODE (PATTERN (insn)) == SET
5773 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5774 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5780 /* Free pool list. */
5784 struct constant_pool *next = pool_list->next;
5785 s390_free_pool (pool_list);
5791 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5794 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5798 switch (GET_MODE_CLASS (mode))
5801 if (GET_CODE (exp) != CONST_DOUBLE)
5804 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5805 assemble_real (r, mode, align);
5809 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5818 /* Rework the prologue/epilogue to avoid saving/restoring
5819 registers unnecessarily. */
5822 s390_optimize_prologue (void)
5824 rtx insn, new_insn, next_insn;
5826 /* Do a final recompute of the frame-related data. */
5828 s390_update_frame_layout ();
5830 /* If all special registers are in fact used, there's nothing we
5831 can do, so no point in walking the insn list. */
5833 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
5834 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
5835 && (TARGET_CPU_ZARCH
5836 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
5837 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
5840 /* Search for prologue/epilogue insns and replace them. */
5842 for (insn = get_insns (); insn; insn = next_insn)
5844 int first, last, off;
5845 rtx set, base, offset;
5847 next_insn = NEXT_INSN (insn);
5849 if (GET_CODE (insn) != INSN)
5852 if (GET_CODE (PATTERN (insn)) == PARALLEL
5853 && store_multiple_operation (PATTERN (insn), VOIDmode))
5855 set = XVECEXP (PATTERN (insn), 0, 0);
5856 first = REGNO (SET_SRC (set));
5857 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5858 offset = const0_rtx;
5859 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5860 off = INTVAL (offset);
5862 if (GET_CODE (base) != REG || off < 0)
5864 if (REGNO (base) != STACK_POINTER_REGNUM
5865 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5867 if (first > BASE_REGNUM || last < BASE_REGNUM)
5870 if (cfun_frame_layout.first_save_gpr != -1)
5872 new_insn = save_gprs (base,
5873 off + (cfun_frame_layout.first_save_gpr
5874 - first) * UNITS_PER_WORD,
5875 cfun_frame_layout.first_save_gpr,
5876 cfun_frame_layout.last_save_gpr);
5877 new_insn = emit_insn_before (new_insn, insn);
5878 INSN_ADDRESSES_NEW (new_insn, -1);
5885 if (GET_CODE (PATTERN (insn)) == SET
5886 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5887 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5888 || (!TARGET_CPU_ZARCH
5889 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
5890 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5892 set = PATTERN (insn);
5893 first = REGNO (SET_SRC (set));
5894 offset = const0_rtx;
5895 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5896 off = INTVAL (offset);
5898 if (GET_CODE (base) != REG || off < 0)
5900 if (REGNO (base) != STACK_POINTER_REGNUM
5901 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5903 if (cfun_frame_layout.first_save_gpr != -1)
5905 new_insn = save_gprs (base,
5906 off + (cfun_frame_layout.first_save_gpr
5907 - first) * UNITS_PER_WORD,
5908 cfun_frame_layout.first_save_gpr,
5909 cfun_frame_layout.last_save_gpr);
5910 new_insn = emit_insn_before (new_insn, insn);
5911 INSN_ADDRESSES_NEW (new_insn, -1);
5918 if (GET_CODE (PATTERN (insn)) == PARALLEL
5919 && load_multiple_operation (PATTERN (insn), VOIDmode))
5921 set = XVECEXP (PATTERN (insn), 0, 0);
5922 first = REGNO (SET_DEST (set));
5923 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5924 offset = const0_rtx;
5925 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5926 off = INTVAL (offset);
5928 if (GET_CODE (base) != REG || off < 0)
5930 if (REGNO (base) != STACK_POINTER_REGNUM
5931 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5933 if (first > BASE_REGNUM || last < BASE_REGNUM)
5936 if (cfun_frame_layout.first_restore_gpr != -1)
5938 new_insn = restore_gprs (base,
5939 off + (cfun_frame_layout.first_restore_gpr
5940 - first) * UNITS_PER_WORD,
5941 cfun_frame_layout.first_restore_gpr,
5942 cfun_frame_layout.last_restore_gpr);
5943 new_insn = emit_insn_before (new_insn, insn);
5944 INSN_ADDRESSES_NEW (new_insn, -1);
5951 if (GET_CODE (PATTERN (insn)) == SET
5952 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5953 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5954 || (!TARGET_CPU_ZARCH
5955 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
5956 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5958 set = PATTERN (insn);
5959 first = REGNO (SET_DEST (set));
5960 offset = const0_rtx;
5961 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5962 off = INTVAL (offset);
5964 if (GET_CODE (base) != REG || off < 0)
5966 if (REGNO (base) != STACK_POINTER_REGNUM
5967 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5969 if (cfun_frame_layout.first_restore_gpr != -1)
5971 new_insn = restore_gprs (base,
5972 off + (cfun_frame_layout.first_restore_gpr
5973 - first) * UNITS_PER_WORD,
5974 cfun_frame_layout.first_restore_gpr,
5975 cfun_frame_layout.last_restore_gpr);
5976 new_insn = emit_insn_before (new_insn, insn);
5977 INSN_ADDRESSES_NEW (new_insn, -1);
5986 /* Perform machine-dependent processing. */
5991 bool pool_overflow = false;
5993 /* Make sure all splits have been performed; splits after
5994 machine_dependent_reorg might confuse insn length counts. */
5995 split_all_insns_noflow ();
5998 /* Install the main literal pool and the associated base
5999 register load insns.
6001 In addition, there are two problematic situations we need
6004 - the literal pool might be > 4096 bytes in size, so that
6005 some of its elements cannot be directly accessed
6007 - a branch target might be > 64K away from the branch, so that
6008 it is not possible to use a PC-relative instruction.
6010 To fix those, we split the single literal pool into multiple
6011 pool chunks, reloading the pool base register at various
6012 points throughout the function to ensure it always points to
6013 the pool chunk the following code expects, and / or replace
6014 PC-relative branches by absolute branches.
6016 However, the two problems are interdependent: splitting the
6017 literal pool can move a branch further away from its target,
6018 causing the 64K limit to overflow, and on the other hand,
6019 replacing a PC-relative branch by an absolute branch means
6020 we need to put the branch target address into the literal
6021 pool, possibly causing it to overflow.
6023 So, we loop trying to fix up both problems until we manage
6024 to satisfy both conditions at the same time. Note that the
6025 loop is guaranteed to terminate as every pass of the loop
6026 strictly decreases the total number of PC-relative branches
6027 in the function. (This is not completely true as there
6028 might be branch-over-pool insns introduced by chunkify_start.
6029 Those never need to be split however.) */
6033 struct constant_pool *pool = NULL;
6035 /* Collect the literal pool. */
6038 pool = s390_mainpool_start ();
6040 pool_overflow = true;
6043 /* If literal pool overflowed, start to chunkify it. */
6045 pool = s390_chunkify_start ();
6047 /* Split out-of-range branches. If this has created new
6048 literal pool entries, cancel current chunk list and
6049 recompute it. zSeries machines have large branch
6050 instructions, so we never need to split a branch. */
6051 if (!TARGET_CPU_ZARCH && s390_split_branches ())
6054 s390_chunkify_cancel (pool);
6056 s390_mainpool_cancel (pool);
6061 /* If we made it up to here, both conditions are satisfied.
6062 Finish up literal pool related changes. */
6064 s390_chunkify_finish (pool);
6066 s390_mainpool_finish (pool);
6068 /* We're done splitting branches. */
6069 cfun->machine->split_branches_pending_p = false;
6073 s390_optimize_prologue ();
6077 /* Return an RTL expression representing the value of the return address
6078 for the frame COUNT steps up from the current frame. FRAME is the
6079 frame pointer of that frame. */
6082 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6087 /* Without backchain, we fail for all but the current frame. */
6089 if (!TARGET_BACKCHAIN && !TARGET_KERNEL_BACKCHAIN && count > 0)
6092 /* For the current frame, we need to make sure the initial
6093 value of RETURN_REGNUM is actually saved. */
6097 cfun_frame_layout.save_return_addr_p = true;
6098 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6101 if (TARGET_BACKCHAIN)
6102 offset = RETURN_REGNUM * UNITS_PER_WORD;
6104 offset = -2 * UNITS_PER_WORD;
6106 addr = plus_constant (frame, offset);
6107 addr = memory_address (Pmode, addr);
6108 return gen_rtx_MEM (Pmode, addr);
6111 /* Return an RTL expression representing the back chain stored in
6112 the current stack frame. */
6115 s390_back_chain_rtx (void)
6119 gcc_assert (TARGET_BACKCHAIN || TARGET_KERNEL_BACKCHAIN);
6121 if (TARGET_BACKCHAIN)
6122 chain = stack_pointer_rtx;
6124 chain = plus_constant (stack_pointer_rtx,
6125 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6127 chain = gen_rtx_MEM (Pmode, chain);
6131 /* Find first call clobbered register unused in a function.
6132 This could be used as base register in a leaf function
6133 or for holding the return address before epilogue. */
6136 find_unused_clobbered_reg (void)
6139 for (i = 0; i < 6; i++)
6140 if (!regs_ever_live[i])
6145 /* Determine the frame area which actually has to be accessed
6146 in the function epilogue. The values are stored at the
6147 given pointers AREA_BOTTOM (address of the lowest used stack
6148 address) and AREA_TOP (address of the first item which does
6149 not belong to the stack frame). */
6152 s390_frame_area (int *area_bottom, int *area_top)
6160 if (cfun_frame_layout.first_restore_gpr != -1)
6162 b = (cfun_frame_layout.gprs_offset
6163 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6164 t = b + (cfun_frame_layout.last_restore_gpr
6165 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6168 if (TARGET_64BIT && cfun_save_high_fprs_p)
6170 b = MIN (b, cfun_frame_layout.f8_offset);
6171 t = MAX (t, (cfun_frame_layout.f8_offset
6172 + cfun_frame_layout.high_fprs * 8));
6176 for (i = 2; i < 4; i++)
6177 if (cfun_fpr_bit_p (i))
6179 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6180 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6187 /* Fill cfun->machine with info about register usage of current function.
6188 Return in LIVE_REGS which GPRs are currently considered live. */
6191 s390_register_info (int live_regs[])
6195 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6196 cfun_frame_layout.fpr_bitmap = 0;
6197 cfun_frame_layout.high_fprs = 0;
6199 for (i = 24; i < 32; i++)
6200 if (regs_ever_live[i] && !global_regs[i])
6202 cfun_set_fpr_bit (i - 16);
6203 cfun_frame_layout.high_fprs++;
6206 /* Find first and last gpr to be saved. We trust regs_ever_live
6207 data, except that we don't save and restore global registers.
6209 Also, all registers with special meaning to the compiler need
6210 to be handled extra. */
6212 for (i = 0; i < 16; i++)
6213 live_regs[i] = regs_ever_live[i] && !global_regs[i];
6216 live_regs[PIC_OFFSET_TABLE_REGNUM]
6217 = regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6219 live_regs[BASE_REGNUM]
6220 = cfun->machine->base_reg
6221 && REGNO (cfun->machine->base_reg) == BASE_REGNUM;
6223 live_regs[RETURN_REGNUM]
6224 = cfun->machine->split_branches_pending_p
6225 || cfun_frame_layout.save_return_addr_p;
6227 live_regs[STACK_POINTER_REGNUM]
6228 = !current_function_is_leaf
6229 || TARGET_TPF_PROFILING
6230 || cfun_save_high_fprs_p
6231 || get_frame_size () > 0
6232 || current_function_calls_alloca
6233 || current_function_stdarg;
6235 for (i = 6; i < 16; i++)
6238 for (j = 15; j > i; j--)
6244 /* Nothing to save/restore. */
6245 cfun_frame_layout.first_save_gpr = -1;
6246 cfun_frame_layout.first_restore_gpr = -1;
6247 cfun_frame_layout.last_save_gpr = -1;
6248 cfun_frame_layout.last_restore_gpr = -1;
6252 /* Save / Restore from gpr i to j. */
6253 cfun_frame_layout.first_save_gpr = i;
6254 cfun_frame_layout.first_restore_gpr = i;
6255 cfun_frame_layout.last_save_gpr = j;
6256 cfun_frame_layout.last_restore_gpr = j;
6259 if (current_function_stdarg)
6261 /* Varargs functions need to save gprs 2 to 6. */
6262 if (cfun_frame_layout.first_save_gpr == -1
6263 || cfun_frame_layout.first_save_gpr > 2)
6264 cfun_frame_layout.first_save_gpr = 2;
6266 if (cfun_frame_layout.last_save_gpr == -1
6267 || cfun_frame_layout.last_save_gpr < 6)
6268 cfun_frame_layout.last_save_gpr = 6;
6270 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6271 if (TARGET_HARD_FLOAT)
6272 for (i = 0; i < (TARGET_64BIT ? 4 : 2); i++)
6273 cfun_set_fpr_bit (i);
6277 for (i = 2; i < 4; i++)
6278 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6279 cfun_set_fpr_bit (i);
6282 /* Fill cfun->machine with info about frame of current function. */
6285 s390_frame_info (void)
6289 cfun_frame_layout.frame_size = get_frame_size ();
6290 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6291 fatal_error ("Total size of local variables exceeds architecture limit.");
6293 cfun_frame_layout.save_backchain_p = (TARGET_BACKCHAIN
6294 || TARGET_KERNEL_BACKCHAIN);
6296 if (TARGET_BACKCHAIN)
6298 cfun_frame_layout.backchain_offset = 0;
6299 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6300 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6301 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6302 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6305 else if (TARGET_KERNEL_BACKCHAIN)
6307 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6309 cfun_frame_layout.gprs_offset
6310 = (cfun_frame_layout.backchain_offset
6311 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6316 cfun_frame_layout.f4_offset
6317 = (cfun_frame_layout.gprs_offset
6318 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6320 cfun_frame_layout.f0_offset
6321 = (cfun_frame_layout.f4_offset
6322 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6326 /* On 31 bit we have to care about alignment of the
6327 floating point regs to provide fastest access. */
6328 cfun_frame_layout.f0_offset
6329 = ((cfun_frame_layout.gprs_offset
6330 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6331 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6333 cfun_frame_layout.f4_offset
6334 = (cfun_frame_layout.f0_offset
6335 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6338 else /* no backchain */
6340 cfun_frame_layout.f4_offset
6341 = (STACK_POINTER_OFFSET
6342 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6344 cfun_frame_layout.f0_offset
6345 = (cfun_frame_layout.f4_offset
6346 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6348 cfun_frame_layout.gprs_offset
6349 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6352 if (current_function_is_leaf
6353 && !TARGET_TPF_PROFILING
6354 && cfun_frame_layout.frame_size == 0
6355 && !cfun_save_high_fprs_p
6356 && !current_function_calls_alloca
6357 && !current_function_stdarg)
6360 if (TARGET_BACKCHAIN)
6361 cfun_frame_layout.frame_size += (STARTING_FRAME_OFFSET
6362 + cfun_frame_layout.high_fprs * 8);
6365 cfun_frame_layout.frame_size += (cfun_frame_layout.save_backchain_p
6368 /* No alignment trouble here because f8-f15 are only saved under
6370 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6371 cfun_frame_layout.f4_offset),
6372 cfun_frame_layout.gprs_offset)
6373 - cfun_frame_layout.high_fprs * 8);
6375 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6377 for (i = 0; i < 8; i++)
6378 if (cfun_fpr_bit_p (i))
6379 cfun_frame_layout.frame_size += 8;
6381 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6383 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6384 the frame size to sustain 8 byte alignment of stack frames. */
6385 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6386 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6387 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6389 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6393 /* Generate frame layout. Fills in register and frame data for the current
6394 function in cfun->machine. This routine can be called multiple times;
6395 it will re-do the complete frame layout every time. */
6398 s390_init_frame_layout (void)
6400 HOST_WIDE_INT frame_size;
6404 /* If return address register is explicitly used, we need to save it. */
6405 if (regs_ever_live[RETURN_REGNUM]
6406 || !current_function_is_leaf
6407 || TARGET_TPF_PROFILING
6408 || current_function_stdarg
6409 || current_function_calls_eh_return)
6410 cfun_frame_layout.save_return_addr_p = true;
6412 /* On S/390 machines, we may need to perform branch splitting, which
6413 will require both base and return address register. We have no
6414 choice but to assume we're going to need them until right at the
6415 end of the machine dependent reorg phase. */
6416 if (!TARGET_CPU_ZARCH)
6417 cfun->machine->split_branches_pending_p = true;
6421 frame_size = cfun_frame_layout.frame_size;
6423 /* Try to predict whether we'll need the base register. */
6424 base_used = cfun->machine->split_branches_pending_p
6425 || current_function_uses_const_pool
6426 || (!DISP_IN_RANGE (-frame_size)
6427 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size, 'K', "K"));
6429 /* Decide which register to use as literal pool base. In small
6430 leaf functions, try to use an unused call-clobbered register
6431 as base register to avoid save/restore overhead. */
6433 cfun->machine->base_reg = NULL_RTX;
6434 else if (current_function_is_leaf && !regs_ever_live[5])
6435 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6437 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6439 s390_register_info (live_regs);
6442 while (frame_size != cfun_frame_layout.frame_size);
6445 /* Update frame layout. Recompute actual register save data based on
6446 current info and update regs_ever_live for the special registers.
6447 May be called multiple times, but may never cause *more* registers
6448 to be saved than s390_init_frame_layout allocated room for. */
6451 s390_update_frame_layout (void)
6455 s390_register_info (live_regs);
6457 regs_ever_live[BASE_REGNUM] = live_regs[BASE_REGNUM];
6458 regs_ever_live[RETURN_REGNUM] = live_regs[RETURN_REGNUM];
6459 regs_ever_live[STACK_POINTER_REGNUM] = live_regs[STACK_POINTER_REGNUM];
6461 if (cfun->machine->base_reg)
6462 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6465 /* Return true if register FROM can be eliminated via register TO. */
6468 s390_can_eliminate (int from, int to)
6470 gcc_assert (to == STACK_POINTER_REGNUM
6471 || to == HARD_FRAME_POINTER_REGNUM);
6473 gcc_assert (from == FRAME_POINTER_REGNUM
6474 || from == ARG_POINTER_REGNUM
6475 || from == RETURN_ADDRESS_POINTER_REGNUM);
6477 /* Make sure we actually saved the return address. */
6478 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6479 if (!current_function_calls_eh_return
6480 && !current_function_stdarg
6481 && !cfun_frame_layout.save_return_addr_p)
6487 /* Return offset between register FROM and TO initially after prolog. */
6490 s390_initial_elimination_offset (int from, int to)
6492 HOST_WIDE_INT offset;
6495 /* ??? Why are we called for non-eliminable pairs? */
6496 if (!s390_can_eliminate (from, to))
6501 case FRAME_POINTER_REGNUM:
6505 case ARG_POINTER_REGNUM:
6506 s390_init_frame_layout ();
6507 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6510 case RETURN_ADDRESS_POINTER_REGNUM:
6511 s390_init_frame_layout ();
6512 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6513 gcc_assert (index >= 0);
6514 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6515 offset += index * UNITS_PER_WORD;
6525 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6526 to register BASE. Return generated insn. */
6529 save_fpr (rtx base, int offset, int regnum)
6532 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6533 set_mem_alias_set (addr, s390_sr_alias_set);
6535 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6538 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6539 to register BASE. Return generated insn. */
6542 restore_fpr (rtx base, int offset, int regnum)
6545 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6546 set_mem_alias_set (addr, s390_sr_alias_set);
6548 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6551 /* Generate insn to save registers FIRST to LAST into
6552 the register save area located at offset OFFSET
6553 relative to register BASE. */
6556 save_gprs (rtx base, int offset, int first, int last)
6558 rtx addr, insn, note;
6561 addr = plus_constant (base, offset);
6562 addr = gen_rtx_MEM (Pmode, addr);
6563 set_mem_alias_set (addr, s390_sr_alias_set);
6565 /* Special-case single register. */
6569 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6571 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6573 RTX_FRAME_RELATED_P (insn) = 1;
6578 insn = gen_store_multiple (addr,
6579 gen_rtx_REG (Pmode, first),
6580 GEN_INT (last - first + 1));
6583 /* We need to set the FRAME_RELATED flag on all SETs
6584 inside the store-multiple pattern.
6586 However, we must not emit DWARF records for registers 2..5
6587 if they are stored for use by variable arguments ...
6589 ??? Unfortunately, it is not enough to simply not the the
6590 FRAME_RELATED flags for those SETs, because the first SET
6591 of the PARALLEL is always treated as if it had the flag
6592 set, even if it does not. Therefore we emit a new pattern
6593 without those registers as REG_FRAME_RELATED_EXPR note. */
6597 rtx pat = PATTERN (insn);
6599 for (i = 0; i < XVECLEN (pat, 0); i++)
6600 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6601 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6603 RTX_FRAME_RELATED_P (insn) = 1;
6607 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
6608 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
6609 gen_rtx_REG (Pmode, 6),
6610 GEN_INT (last - 6 + 1));
6611 note = PATTERN (note);
6614 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6615 note, REG_NOTES (insn));
6617 for (i = 0; i < XVECLEN (note, 0); i++)
6618 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6619 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6621 RTX_FRAME_RELATED_P (insn) = 1;
6627 /* Generate insn to restore registers FIRST to LAST from
6628 the register save area located at offset OFFSET
6629 relative to register BASE. */
6632 restore_gprs (rtx base, int offset, int first, int last)
6636 addr = plus_constant (base, offset);
6637 addr = gen_rtx_MEM (Pmode, addr);
6638 set_mem_alias_set (addr, s390_sr_alias_set);
6640 /* Special-case single register. */
6644 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6646 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6651 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6653 GEN_INT (last - first + 1));
6657 /* Return insn sequence to load the GOT register. */
6659 static GTY(()) rtx got_symbol;
6661 s390_load_got (void)
6667 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6668 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6673 if (TARGET_CPU_ZARCH)
6675 emit_move_insn (pic_offset_table_rtx, got_symbol);
6681 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
6682 UNSPEC_LTREL_OFFSET);
6683 offset = gen_rtx_CONST (Pmode, offset);
6684 offset = force_const_mem (Pmode, offset);
6686 emit_move_insn (pic_offset_table_rtx, offset);
6688 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
6690 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6692 emit_move_insn (pic_offset_table_rtx, offset);
6695 insns = get_insns ();
6700 /* Expand the prologue into a bunch of separate insns. */
6703 s390_emit_prologue (void)
6711 /* Complete frame layout. */
6713 s390_update_frame_layout ();
6715 /* Annotate all constant pool references to let the scheduler know
6716 they implicitly use the base register. */
6718 push_topmost_sequence ();
6720 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6722 annotate_constant_pool_refs (&PATTERN (insn));
6724 pop_topmost_sequence ();
6726 /* Choose best register to use for temp use within prologue.
6727 See below for why TPF must use the register 1. */
6729 if (!current_function_is_leaf && !TARGET_TPF_PROFILING)
6730 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6732 temp_reg = gen_rtx_REG (Pmode, 1);
6734 /* Save call saved gprs. */
6735 if (cfun_frame_layout.first_save_gpr != -1)
6737 insn = save_gprs (stack_pointer_rtx,
6738 cfun_frame_layout.gprs_offset,
6739 cfun_frame_layout.first_save_gpr,
6740 cfun_frame_layout.last_save_gpr);
6744 /* Dummy insn to mark literal pool slot. */
6746 if (cfun->machine->base_reg)
6747 emit_insn (gen_main_pool (cfun->machine->base_reg));
6749 offset = cfun_frame_layout.f0_offset;
6751 /* Save f0 and f2. */
6752 for (i = 0; i < 2; i++)
6754 if (cfun_fpr_bit_p (i))
6756 save_fpr (stack_pointer_rtx, offset, i + 16);
6759 else if (TARGET_BACKCHAIN)
6763 /* Save f4 and f6. */
6764 offset = cfun_frame_layout.f4_offset;
6765 for (i = 2; i < 4; i++)
6767 if (cfun_fpr_bit_p (i))
6769 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6772 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6773 therefore are not frame related. */
6774 if (!call_really_used_regs[i + 16])
6775 RTX_FRAME_RELATED_P (insn) = 1;
6777 else if (TARGET_BACKCHAIN)
6781 if (!TARGET_BACKCHAIN
6782 && cfun_save_high_fprs_p
6783 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6785 offset = (cfun_frame_layout.f8_offset
6786 + (cfun_frame_layout.high_fprs - 1) * 8);
6788 for (i = 15; i > 7 && offset >= 0; i--)
6789 if (cfun_fpr_bit_p (i))
6791 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6793 RTX_FRAME_RELATED_P (insn) = 1;
6796 if (offset >= cfun_frame_layout.f8_offset)
6800 if (TARGET_BACKCHAIN)
6801 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
6803 /* Decrement stack pointer. */
6805 if (cfun_frame_layout.frame_size > 0)
6807 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
6809 if (s390_stack_size)
6811 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6812 & ~(s390_stack_guard - 1));
6813 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6814 GEN_INT (stack_check_mask));
6817 gen_cmpdi (t, const0_rtx);
6819 gen_cmpsi (t, const0_rtx);
6821 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6822 gen_rtx_REG (CCmode,
6828 if (s390_warn_framesize > 0
6829 && cfun_frame_layout.frame_size >= s390_warn_framesize)
6830 warning ("frame size of `%s' is " HOST_WIDE_INT_PRINT_DEC " bytes",
6831 current_function_name (), cfun_frame_layout.frame_size);
6833 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
6834 warning ("`%s' uses dynamic stack allocation", current_function_name ());
6836 /* Save incoming stack pointer into temp reg. */
6837 if (cfun_frame_layout.save_backchain_p || next_fpr)
6838 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
6840 /* Subtract frame size from stack pointer. */
6842 if (DISP_IN_RANGE (INTVAL (frame_off)))
6844 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6845 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6847 insn = emit_insn (insn);
6851 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6852 frame_off = force_const_mem (Pmode, frame_off);
6854 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
6855 annotate_constant_pool_refs (&PATTERN (insn));
6858 RTX_FRAME_RELATED_P (insn) = 1;
6860 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6861 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6862 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6863 GEN_INT (-cfun_frame_layout.frame_size))),
6866 /* Set backchain. */
6868 if (cfun_frame_layout.save_backchain_p)
6870 if (cfun_frame_layout.backchain_offset)
6871 addr = gen_rtx_MEM (Pmode,
6872 plus_constant (stack_pointer_rtx,
6873 cfun_frame_layout.backchain_offset));
6875 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
6876 set_mem_alias_set (addr, s390_sr_alias_set);
6877 insn = emit_insn (gen_move_insn (addr, temp_reg));
6880 /* If we support asynchronous exceptions (e.g. for Java),
6881 we need to make sure the backchain pointer is set up
6882 before any possibly trapping memory access. */
6884 if (cfun_frame_layout.save_backchain_p && flag_non_call_exceptions)
6886 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6887 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6891 /* Save fprs 8 - 15 (64 bit ABI). */
6893 if (cfun_save_high_fprs_p && next_fpr)
6895 insn = emit_insn (gen_add2_insn (temp_reg,
6896 GEN_INT (cfun_frame_layout.f8_offset)));
6900 for (i = 24; i <= next_fpr; i++)
6901 if (cfun_fpr_bit_p (i - 16))
6903 rtx addr = plus_constant (stack_pointer_rtx,
6904 cfun_frame_layout.frame_size
6905 + cfun_frame_layout.f8_offset
6908 insn = save_fpr (temp_reg, offset, i);
6910 RTX_FRAME_RELATED_P (insn) = 1;
6912 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6913 gen_rtx_SET (VOIDmode,
6914 gen_rtx_MEM (DFmode, addr),
6915 gen_rtx_REG (DFmode, i)),
6920 /* Set frame pointer, if needed. */
6922 if (frame_pointer_needed)
6924 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6925 RTX_FRAME_RELATED_P (insn) = 1;
6928 /* Set up got pointer, if needed. */
6930 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6932 rtx insns = s390_load_got ();
6934 for (insn = insns; insn; insn = NEXT_INSN (insn))
6936 annotate_constant_pool_refs (&PATTERN (insn));
6938 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6945 if (TARGET_TPF_PROFILING)
6947 /* Generate a BAS instruction to serve as a function
6948 entry intercept to facilitate the use of tracing
6949 algorithms located at the branch target. */
6950 emit_insn (gen_prologue_tpf ());
6952 /* Emit a blockage here so that all code
6953 lies between the profiling mechanisms. */
6954 emit_insn (gen_blockage ());
6958 /* Expand the epilogue into a bunch of separate insns. */
6961 s390_emit_epilogue (bool sibcall)
6963 rtx frame_pointer, return_reg;
6964 int area_bottom, area_top, offset = 0;
6969 if (TARGET_TPF_PROFILING)
6972 /* Generate a BAS instruction to serve as a function
6973 entry intercept to facilitate the use of tracing
6974 algorithms located at the branch target. */
6976 /* Emit a blockage here so that all code
6977 lies between the profiling mechanisms. */
6978 emit_insn (gen_blockage ());
6980 emit_insn (gen_epilogue_tpf ());
6983 /* Check whether to use frame or stack pointer for restore. */
6985 frame_pointer = (frame_pointer_needed
6986 ? hard_frame_pointer_rtx : stack_pointer_rtx);
6988 s390_frame_area (&area_bottom, &area_top);
6990 /* Check whether we can access the register save area.
6991 If not, increment the frame pointer as required. */
6993 if (area_top <= area_bottom)
6995 /* Nothing to restore. */
6997 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
6998 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
7000 /* Area is in range. */
7001 offset = cfun_frame_layout.frame_size;
7005 rtx insn, frame_off;
7007 offset = area_bottom < 0 ? -area_bottom : 0;
7008 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7010 if (DISP_IN_RANGE (INTVAL (frame_off)))
7012 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7013 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7014 insn = emit_insn (insn);
7018 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
7019 frame_off = force_const_mem (Pmode, frame_off);
7021 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7022 annotate_constant_pool_refs (&PATTERN (insn));
7026 /* Restore call saved fprs. */
7030 if (cfun_save_high_fprs_p)
7032 next_offset = cfun_frame_layout.f8_offset;
7033 for (i = 24; i < 32; i++)
7035 if (cfun_fpr_bit_p (i - 16))
7037 restore_fpr (frame_pointer,
7038 offset + next_offset, i);
7047 next_offset = cfun_frame_layout.f4_offset;
7048 for (i = 18; i < 20; i++)
7050 if (cfun_fpr_bit_p (i - 16))
7052 restore_fpr (frame_pointer,
7053 offset + next_offset, i);
7056 else if (TARGET_BACKCHAIN)
7062 /* Return register. */
7064 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7066 /* Restore call saved gprs. */
7068 if (cfun_frame_layout.first_restore_gpr != -1)
7073 /* Check for global register and save them
7074 to stack location from where they get restored. */
7076 for (i = cfun_frame_layout.first_restore_gpr;
7077 i <= cfun_frame_layout.last_restore_gpr;
7080 /* These registers are special and need to be
7081 restored in any case. */
7082 if (i == STACK_POINTER_REGNUM
7083 || i == RETURN_REGNUM
7085 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7090 addr = plus_constant (frame_pointer,
7091 offset + cfun_frame_layout.gprs_offset
7092 + (i - cfun_frame_layout.first_save_gpr)
7094 addr = gen_rtx_MEM (Pmode, addr);
7095 set_mem_alias_set (addr, s390_sr_alias_set);
7096 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7102 /* Fetch return address from stack before load multiple,
7103 this will do good for scheduling. */
7105 if (cfun_frame_layout.save_return_addr_p
7106 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7107 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7109 int return_regnum = find_unused_clobbered_reg();
7112 return_reg = gen_rtx_REG (Pmode, return_regnum);
7114 addr = plus_constant (frame_pointer,
7115 offset + cfun_frame_layout.gprs_offset
7117 - cfun_frame_layout.first_save_gpr)
7119 addr = gen_rtx_MEM (Pmode, addr);
7120 set_mem_alias_set (addr, s390_sr_alias_set);
7121 emit_move_insn (return_reg, addr);
7125 insn = restore_gprs (frame_pointer,
7126 offset + cfun_frame_layout.gprs_offset
7127 + (cfun_frame_layout.first_restore_gpr
7128 - cfun_frame_layout.first_save_gpr)
7130 cfun_frame_layout.first_restore_gpr,
7131 cfun_frame_layout.last_restore_gpr);
7138 /* Return to caller. */
7140 p = rtvec_alloc (2);
7142 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7143 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7144 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7149 /* Return the size in bytes of a function argument of
7150 type TYPE and/or mode MODE. At least one of TYPE or
7151 MODE must be specified. */
7154 s390_function_arg_size (enum machine_mode mode, tree type)
7157 return int_size_in_bytes (type);
7159 /* No type info available for some library calls ... */
7160 if (mode != BLKmode)
7161 return GET_MODE_SIZE (mode);
7163 /* If we have neither type nor mode, abort */
7167 /* Return true if a function argument of type TYPE and mode MODE
7168 is to be passed in a floating-point register, if available. */
7171 s390_function_arg_float (enum machine_mode mode, tree type)
7173 int size = s390_function_arg_size (mode, type);
7177 /* Soft-float changes the ABI: no floating-point registers are used. */
7178 if (TARGET_SOFT_FLOAT)
7181 /* No type info available for some library calls ... */
7183 return mode == SFmode || mode == DFmode;
7185 /* The ABI says that record types with a single member are treated
7186 just like that member would be. */
7187 while (TREE_CODE (type) == RECORD_TYPE)
7189 tree field, single = NULL_TREE;
7191 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7193 if (TREE_CODE (field) != FIELD_DECL)
7196 if (single == NULL_TREE)
7197 single = TREE_TYPE (field);
7202 if (single == NULL_TREE)
7208 return TREE_CODE (type) == REAL_TYPE;
7211 /* Return true if a function argument of type TYPE and mode MODE
7212 is to be passed in an integer register, or a pair of integer
7213 registers, if available. */
7216 s390_function_arg_integer (enum machine_mode mode, tree type)
7218 int size = s390_function_arg_size (mode, type);
7222 /* No type info available for some library calls ... */
7224 return GET_MODE_CLASS (mode) == MODE_INT
7225 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7227 /* We accept small integral (and similar) types. */
7228 if (INTEGRAL_TYPE_P (type)
7229 || POINTER_TYPE_P (type)
7230 || TREE_CODE (type) == OFFSET_TYPE
7231 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7234 /* We also accept structs of size 1, 2, 4, 8 that are not
7235 passed in floating-point registers. */
7236 if (AGGREGATE_TYPE_P (type)
7237 && exact_log2 (size) >= 0
7238 && !s390_function_arg_float (mode, type))
7244 /* Return 1 if a function argument of type TYPE and mode MODE
7245 is to be passed by reference. The ABI specifies that only
7246 structures of size 1, 2, 4, or 8 bytes are passed by value,
7247 all other structures (and complex numbers) are passed by
7251 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7252 enum machine_mode mode, tree type,
7253 bool named ATTRIBUTE_UNUSED)
7255 int size = s390_function_arg_size (mode, type);
7261 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7264 if (TREE_CODE (type) == COMPLEX_TYPE
7265 || TREE_CODE (type) == VECTOR_TYPE)
7272 /* Update the data in CUM to advance over an argument of mode MODE and
7273 data type TYPE. (TYPE is null for libcalls where that information
7274 may not be available.). The boolean NAMED specifies whether the
7275 argument is a named argument (as opposed to an unnamed argument
7276 matching an ellipsis). */
7279 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7280 tree type, int named ATTRIBUTE_UNUSED)
7282 if (s390_function_arg_float (mode, type))
7286 else if (s390_function_arg_integer (mode, type))
7288 int size = s390_function_arg_size (mode, type);
7289 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7295 /* Define where to put the arguments to a function.
7296 Value is zero to push the argument on the stack,
7297 or a hard register in which to store the argument.
7299 MODE is the argument's machine mode.
7300 TYPE is the data type of the argument (as a tree).
7301 This is null for libcalls where that information may
7303 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7304 the preceding args and about the function being called.
7305 NAMED is nonzero if this argument is a named parameter
7306 (otherwise it is an extra parameter matching an ellipsis).
7308 On S/390, we use general purpose registers 2 through 6 to
7309 pass integer, pointer, and certain structure arguments, and
7310 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7311 to pass floating point arguments. All remaining arguments
7312 are pushed to the stack. */
7315 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7316 int named ATTRIBUTE_UNUSED)
7318 if (s390_function_arg_float (mode, type))
7320 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
7323 return gen_rtx_REG (mode, cum->fprs + 16);
7325 else if (s390_function_arg_integer (mode, type))
7327 int size = s390_function_arg_size (mode, type);
7328 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7330 if (cum->gprs + n_gprs > 5)
7333 return gen_rtx_REG (mode, cum->gprs + 2);
7336 /* After the real arguments, expand_call calls us once again
7337 with a void_type_node type. Whatever we return here is
7338 passed as operand 2 to the call expanders.
7340 We don't need this feature ... */
7341 else if (type == void_type_node)
7347 /* Return true if return values of type TYPE should be returned
7348 in a memory buffer whose address is passed by the caller as
7349 hidden first argument. */
7352 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7354 /* We accept small integral (and similar) types. */
7355 if (INTEGRAL_TYPE_P (type)
7356 || POINTER_TYPE_P (type)
7357 || TREE_CODE (type) == OFFSET_TYPE
7358 || TREE_CODE (type) == REAL_TYPE)
7359 return int_size_in_bytes (type) > 8;
7361 /* Aggregates and similar constructs are always returned
7363 if (AGGREGATE_TYPE_P (type)
7364 || TREE_CODE (type) == COMPLEX_TYPE
7365 || TREE_CODE (type) == VECTOR_TYPE)
7368 /* ??? We get called on all sorts of random stuff from
7369 aggregate_value_p. We can't abort, but it's not clear
7370 what's safe to return. Pretend it's a struct I guess. */
7374 /* Define where to return a (scalar) value of type TYPE.
7375 If TYPE is null, define where to return a (scalar)
7376 value of mode MODE from a libcall. */
7379 s390_function_value (tree type, enum machine_mode mode)
7383 int unsignedp = TYPE_UNSIGNED (type);
7384 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7387 if (GET_MODE_CLASS (mode) != MODE_INT
7388 && GET_MODE_CLASS (mode) != MODE_FLOAT)
7390 if (GET_MODE_SIZE (mode) > 8)
7393 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7394 return gen_rtx_REG (mode, 16);
7396 return gen_rtx_REG (mode, 2);
7400 /* Create and return the va_list datatype.
7402 On S/390, va_list is an array type equivalent to
7404 typedef struct __va_list_tag
7408 void *__overflow_arg_area;
7409 void *__reg_save_area;
7412 where __gpr and __fpr hold the number of general purpose
7413 or floating point arguments used up to now, respectively,
7414 __overflow_arg_area points to the stack location of the
7415 next argument passed on the stack, and __reg_save_area
7416 always points to the start of the register area in the
7417 call frame of the current function. The function prologue
7418 saves all registers used for argument passing into this
7419 area if the function uses variable arguments. */
7422 s390_build_builtin_va_list (void)
7424 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7426 record = lang_hooks.types.make_type (RECORD_TYPE);
7429 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7431 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7432 long_integer_type_node);
7433 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7434 long_integer_type_node);
7435 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7437 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7440 DECL_FIELD_CONTEXT (f_gpr) = record;
7441 DECL_FIELD_CONTEXT (f_fpr) = record;
7442 DECL_FIELD_CONTEXT (f_ovf) = record;
7443 DECL_FIELD_CONTEXT (f_sav) = record;
7445 TREE_CHAIN (record) = type_decl;
7446 TYPE_NAME (record) = type_decl;
7447 TYPE_FIELDS (record) = f_gpr;
7448 TREE_CHAIN (f_gpr) = f_fpr;
7449 TREE_CHAIN (f_fpr) = f_ovf;
7450 TREE_CHAIN (f_ovf) = f_sav;
7452 layout_type (record);
7454 /* The correct type is an array type of one element. */
7455 return build_array_type (record, build_index_type (size_zero_node));
7458 /* Implement va_start by filling the va_list structure VALIST.
7459 STDARG_P is always true, and ignored.
7460 NEXTARG points to the first anonymous stack argument.
7462 The following global variables are used to initialize
7463 the va_list structure:
7465 current_function_args_info:
7466 holds number of gprs and fprs used for named arguments.
7467 current_function_arg_offset_rtx:
7468 holds the offset of the first anonymous stack argument
7469 (relative to the virtual arg pointer). */
7472 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7474 HOST_WIDE_INT n_gpr, n_fpr;
7476 tree f_gpr, f_fpr, f_ovf, f_sav;
7477 tree gpr, fpr, ovf, sav, t;
7479 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7480 f_fpr = TREE_CHAIN (f_gpr);
7481 f_ovf = TREE_CHAIN (f_fpr);
7482 f_sav = TREE_CHAIN (f_ovf);
7484 valist = build_va_arg_indirect_ref (valist);
7485 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7486 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7487 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7488 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7490 /* Count number of gp and fp argument registers used. */
7492 n_gpr = current_function_args_info.gprs;
7493 n_fpr = current_function_args_info.fprs;
7495 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7496 build_int_cst (NULL_TREE, n_gpr));
7497 TREE_SIDE_EFFECTS (t) = 1;
7498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7500 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7501 build_int_cst (NULL_TREE, n_fpr));
7502 TREE_SIDE_EFFECTS (t) = 1;
7503 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7505 /* Find the overflow area. */
7506 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7508 off = INTVAL (current_function_arg_offset_rtx);
7509 off = off < 0 ? 0 : off;
7510 if (TARGET_DEBUG_ARG)
7511 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7512 (int)n_gpr, (int)n_fpr, off);
7514 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7516 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7517 TREE_SIDE_EFFECTS (t) = 1;
7518 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7520 /* Find the register save area. */
7521 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7522 if (TARGET_KERNEL_BACKCHAIN)
7523 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7524 build_int_cst (NULL_TREE,
7525 -(RETURN_REGNUM - 2) * UNITS_PER_WORD
7526 - (TARGET_64BIT ? 4 : 2) * 8));
7528 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7529 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7531 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7532 TREE_SIDE_EFFECTS (t) = 1;
7533 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7536 /* Implement va_arg by updating the va_list structure
7537 VALIST as required to retrieve an argument of type
7538 TYPE, and returning that argument.
7540 Generates code equivalent to:
7542 if (integral value) {
7543 if (size <= 4 && args.gpr < 5 ||
7544 size > 4 && args.gpr < 4 )
7545 ret = args.reg_save_area[args.gpr+8]
7547 ret = *args.overflow_arg_area++;
7548 } else if (float value) {
7550 ret = args.reg_save_area[args.fpr+64]
7552 ret = *args.overflow_arg_area++;
7553 } else if (aggregate value) {
7555 ret = *args.reg_save_area[args.gpr]
7557 ret = **args.overflow_arg_area++;
7561 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7562 tree *post_p ATTRIBUTE_UNUSED)
7564 tree f_gpr, f_fpr, f_ovf, f_sav;
7565 tree gpr, fpr, ovf, sav, reg, t, u;
7566 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
7567 tree lab_false, lab_over, addr;
7569 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7570 f_fpr = TREE_CHAIN (f_gpr);
7571 f_ovf = TREE_CHAIN (f_fpr);
7572 f_sav = TREE_CHAIN (f_ovf);
7574 valist = build_va_arg_indirect_ref (valist);
7575 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7576 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7577 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7578 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7580 size = int_size_in_bytes (type);
7582 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7584 if (TARGET_DEBUG_ARG)
7586 fprintf (stderr, "va_arg: aggregate type");
7590 /* Aggregates are passed by reference. */
7595 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7596 will be added by s390_frame_info because for va_args always an even
7597 number of gprs has to be saved r15-r2 = 14 regs. */
7598 sav_ofs = (TARGET_KERNEL_BACKCHAIN
7599 ? (TARGET_64BIT ? 4 : 2) * 8 : 2 * UNITS_PER_WORD);
7600 sav_scale = UNITS_PER_WORD;
7601 size = UNITS_PER_WORD;
7604 else if (s390_function_arg_float (TYPE_MODE (type), type))
7606 if (TARGET_DEBUG_ARG)
7608 fprintf (stderr, "va_arg: float type");
7612 /* FP args go in FP registers, if present. */
7616 sav_ofs = TARGET_KERNEL_BACKCHAIN ? 0 : 16 * UNITS_PER_WORD;
7618 /* TARGET_64BIT has up to 4 parameter in fprs */
7619 max_reg = TARGET_64BIT ? 3 : 1;
7623 if (TARGET_DEBUG_ARG)
7625 fprintf (stderr, "va_arg: other type");
7629 /* Otherwise into GP registers. */
7632 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7634 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7635 will be added by s390_frame_info because for va_args always an even
7636 number of gprs has to be saved r15-r2 = 14 regs. */
7637 sav_ofs = TARGET_KERNEL_BACKCHAIN ?
7638 (TARGET_64BIT ? 4 : 2) * 8 : 2*UNITS_PER_WORD;
7640 if (size < UNITS_PER_WORD)
7641 sav_ofs += UNITS_PER_WORD - size;
7643 sav_scale = UNITS_PER_WORD;
7650 /* Pull the value out of the saved registers ... */
7652 lab_false = create_artificial_label ();
7653 lab_over = create_artificial_label ();
7654 addr = create_tmp_var (ptr_type_node, "addr");
7656 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
7657 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7658 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7659 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7660 gimplify_and_add (t, pre_p);
7662 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7663 fold_convert (ptr_type_node, size_int (sav_ofs)));
7664 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7665 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7666 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
7668 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7669 gimplify_and_add (t, pre_p);
7671 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7672 gimplify_and_add (t, pre_p);
7674 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7675 append_to_statement_list (t, pre_p);
7678 /* ... Otherwise out of the overflow area. */
7681 if (size < UNITS_PER_WORD)
7682 t = build2 (PLUS_EXPR, ptr_type_node, t,
7683 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
7685 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7687 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7688 gimplify_and_add (u, pre_p);
7690 t = build2 (PLUS_EXPR, ptr_type_node, t,
7691 fold_convert (ptr_type_node, size_int (size)));
7692 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
7693 gimplify_and_add (t, pre_p);
7695 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7696 append_to_statement_list (t, pre_p);
7699 /* Increment register save count. */
7701 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
7702 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
7703 gimplify_and_add (u, pre_p);
7707 t = build_pointer_type (build_pointer_type (type));
7708 addr = fold_convert (t, addr);
7709 addr = build_va_arg_indirect_ref (addr);
7713 t = build_pointer_type (type);
7714 addr = fold_convert (t, addr);
7717 return build_va_arg_indirect_ref (addr);
7725 S390_BUILTIN_THREAD_POINTER,
7726 S390_BUILTIN_SET_THREAD_POINTER,
7731 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7736 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7742 s390_init_builtins (void)
7746 ftype = build_function_type (ptr_type_node, void_list_node);
7747 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7748 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7751 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
7752 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7753 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7757 /* Expand an expression EXP that calls a built-in function,
7758 with result going to TARGET if that's convenient
7759 (and in mode MODE if that's convenient).
7760 SUBTARGET may be used as the target for computing one of EXP's operands.
7761 IGNORE is nonzero if the value is to be ignored. */
7764 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7765 enum machine_mode mode ATTRIBUTE_UNUSED,
7766 int ignore ATTRIBUTE_UNUSED)
7770 unsigned int const *code_for_builtin =
7771 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7773 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7774 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7775 tree arglist = TREE_OPERAND (exp, 1);
7776 enum insn_code icode;
7777 rtx op[MAX_ARGS], pat;
7781 if (fcode >= S390_BUILTIN_max)
7782 internal_error ("bad builtin fcode");
7783 icode = code_for_builtin[fcode];
7785 internal_error ("bad builtin fcode");
7787 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7789 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7791 arglist = TREE_CHAIN (arglist), arity++)
7793 const struct insn_operand_data *insn_op;
7795 tree arg = TREE_VALUE (arglist);
7796 if (arg == error_mark_node)
7798 if (arity > MAX_ARGS)
7801 insn_op = &insn_data[icode].operand[arity + nonvoid];
7803 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7805 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7806 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7811 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7813 || GET_MODE (target) != tmode
7814 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7815 target = gen_reg_rtx (tmode);
7821 pat = GEN_FCN (icode) (target);
7825 pat = GEN_FCN (icode) (target, op[0]);
7827 pat = GEN_FCN (icode) (op[0]);
7830 pat = GEN_FCN (icode) (target, op[0], op[1]);
7846 /* Output assembly code for the trampoline template to
7849 On S/390, we use gpr 1 internally in the trampoline code;
7850 gpr 0 is used to hold the static chain. */
7853 s390_trampoline_template (FILE *file)
7856 op[0] = gen_rtx_REG (Pmode, 0);
7857 op[1] = gen_rtx_REG (Pmode, 1);
7861 output_asm_insn ("basr\t%1,0", op);
7862 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7863 output_asm_insn ("br\t%1", op);
7864 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
7868 output_asm_insn ("basr\t%1,0", op);
7869 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7870 output_asm_insn ("br\t%1", op);
7871 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
7875 /* Emit RTL insns to initialize the variable parts of a trampoline.
7876 FNADDR is an RTX for the address of the function's pure code.
7877 CXT is an RTX for the static chain value for the function. */
7880 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
7882 emit_move_insn (gen_rtx_MEM (Pmode,
7883 memory_address (Pmode,
7884 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
7885 emit_move_insn (gen_rtx_MEM (Pmode,
7886 memory_address (Pmode,
7887 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
7890 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7891 LOW and HIGH, independent of the host word size. */
7894 s390_gen_rtx_const_DI (int high, int low)
7896 #if HOST_BITS_PER_WIDE_INT >= 64
7898 val = (HOST_WIDE_INT)high;
7900 val |= (HOST_WIDE_INT)low;
7902 return GEN_INT (val);
7904 #if HOST_BITS_PER_WIDE_INT >= 32
7905 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7912 /* Output assembler code to FILE to increment profiler label # LABELNO
7913 for profiling a function entry. */
7916 s390_function_profiler (FILE *file, int labelno)
7921 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
7923 fprintf (file, "# function profiler \n");
7925 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7926 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7927 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7929 op[2] = gen_rtx_REG (Pmode, 1);
7930 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
7931 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7933 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7936 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7937 op[4] = gen_rtx_CONST (Pmode, op[4]);
7942 output_asm_insn ("stg\t%0,%1", op);
7943 output_asm_insn ("larl\t%2,%3", op);
7944 output_asm_insn ("brasl\t%0,%4", op);
7945 output_asm_insn ("lg\t%0,%1", op);
7949 op[6] = gen_label_rtx ();
7951 output_asm_insn ("st\t%0,%1", op);
7952 output_asm_insn ("bras\t%2,%l6", op);
7953 output_asm_insn (".long\t%4", op);
7954 output_asm_insn (".long\t%3", op);
7955 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7956 output_asm_insn ("l\t%0,0(%2)", op);
7957 output_asm_insn ("l\t%2,4(%2)", op);
7958 output_asm_insn ("basr\t%0,%0", op);
7959 output_asm_insn ("l\t%0,%1", op);
7963 op[5] = gen_label_rtx ();
7964 op[6] = gen_label_rtx ();
7966 output_asm_insn ("st\t%0,%1", op);
7967 output_asm_insn ("bras\t%2,%l6", op);
7968 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7969 output_asm_insn (".long\t%4-%l5", op);
7970 output_asm_insn (".long\t%3-%l5", op);
7971 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7972 output_asm_insn ("lr\t%0,%2", op);
7973 output_asm_insn ("a\t%0,0(%2)", op);
7974 output_asm_insn ("a\t%2,4(%2)", op);
7975 output_asm_insn ("basr\t%0,%0", op);
7976 output_asm_insn ("l\t%0,%1", op);
7980 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7981 into its SYMBOL_REF_FLAGS. */
7984 s390_encode_section_info (tree decl, rtx rtl, int first)
7986 default_encode_section_info (decl, rtl, first);
7988 /* If a variable has a forced alignment to < 2 bytes, mark it with
7989 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7990 if (TREE_CODE (decl) == VAR_DECL
7991 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
7992 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
7995 /* Output thunk to FILE that implements a C++ virtual function call (with
7996 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7997 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7998 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7999 relative to the resulting this pointer. */
8002 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
8003 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8009 /* Operand 0 is the target function. */
8010 op[0] = XEXP (DECL_RTL (function), 0);
8011 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8014 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8015 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8016 op[0] = gen_rtx_CONST (Pmode, op[0]);
8019 /* Operand 1 is the 'this' pointer. */
8020 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8021 op[1] = gen_rtx_REG (Pmode, 3);
8023 op[1] = gen_rtx_REG (Pmode, 2);
8025 /* Operand 2 is the delta. */
8026 op[2] = GEN_INT (delta);
8028 /* Operand 3 is the vcall_offset. */
8029 op[3] = GEN_INT (vcall_offset);
8031 /* Operand 4 is the temporary register. */
8032 op[4] = gen_rtx_REG (Pmode, 1);
8034 /* Operands 5 to 8 can be used as labels. */
8040 /* Operand 9 can be used for temporary register. */
8043 /* Generate code. */
8046 /* Setup literal pool pointer if required. */
8047 if ((!DISP_IN_RANGE (delta)
8048 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8049 || (!DISP_IN_RANGE (vcall_offset)
8050 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8052 op[5] = gen_label_rtx ();
8053 output_asm_insn ("larl\t%4,%5", op);
8056 /* Add DELTA to this pointer. */
8059 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8060 output_asm_insn ("la\t%1,%2(%1)", op);
8061 else if (DISP_IN_RANGE (delta))
8062 output_asm_insn ("lay\t%1,%2(%1)", op);
8063 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8064 output_asm_insn ("aghi\t%1,%2", op);
8067 op[6] = gen_label_rtx ();
8068 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8072 /* Perform vcall adjustment. */
8075 if (DISP_IN_RANGE (vcall_offset))
8077 output_asm_insn ("lg\t%4,0(%1)", op);
8078 output_asm_insn ("ag\t%1,%3(%4)", op);
8080 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8082 output_asm_insn ("lghi\t%4,%3", op);
8083 output_asm_insn ("ag\t%4,0(%1)", op);
8084 output_asm_insn ("ag\t%1,0(%4)", op);
8088 op[7] = gen_label_rtx ();
8089 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8090 output_asm_insn ("ag\t%4,0(%1)", op);
8091 output_asm_insn ("ag\t%1,0(%4)", op);
8095 /* Jump to target. */
8096 output_asm_insn ("jg\t%0", op);
8098 /* Output literal pool if required. */
8101 output_asm_insn (".align\t4", op);
8102 targetm.asm_out.internal_label (file, "L",
8103 CODE_LABEL_NUMBER (op[5]));
8107 targetm.asm_out.internal_label (file, "L",
8108 CODE_LABEL_NUMBER (op[6]));
8109 output_asm_insn (".long\t%2", op);
8113 targetm.asm_out.internal_label (file, "L",
8114 CODE_LABEL_NUMBER (op[7]));
8115 output_asm_insn (".long\t%3", op);
8120 /* Setup base pointer if required. */
8122 || (!DISP_IN_RANGE (delta)
8123 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8124 || (!DISP_IN_RANGE (delta)
8125 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8127 op[5] = gen_label_rtx ();
8128 output_asm_insn ("basr\t%4,0", op);
8129 targetm.asm_out.internal_label (file, "L",
8130 CODE_LABEL_NUMBER (op[5]));
8133 /* Add DELTA to this pointer. */
8136 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8137 output_asm_insn ("la\t%1,%2(%1)", op);
8138 else if (DISP_IN_RANGE (delta))
8139 output_asm_insn ("lay\t%1,%2(%1)", op);
8140 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8141 output_asm_insn ("ahi\t%1,%2", op);
8144 op[6] = gen_label_rtx ();
8145 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8149 /* Perform vcall adjustment. */
8152 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
8154 output_asm_insn ("lg\t%4,0(%1)", op);
8155 output_asm_insn ("a\t%1,%3(%4)", op);
8157 else if (DISP_IN_RANGE (vcall_offset))
8159 output_asm_insn ("lg\t%4,0(%1)", op);
8160 output_asm_insn ("ay\t%1,%3(%4)", op);
8162 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8164 output_asm_insn ("lhi\t%4,%3", op);
8165 output_asm_insn ("a\t%4,0(%1)", op);
8166 output_asm_insn ("a\t%1,0(%4)", op);
8170 op[7] = gen_label_rtx ();
8171 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8172 output_asm_insn ("a\t%4,0(%1)", op);
8173 output_asm_insn ("a\t%1,0(%4)", op);
8176 /* We had to clobber the base pointer register.
8177 Re-setup the base pointer (with a different base). */
8178 op[5] = gen_label_rtx ();
8179 output_asm_insn ("basr\t%4,0", op);
8180 targetm.asm_out.internal_label (file, "L",
8181 CODE_LABEL_NUMBER (op[5]));
8184 /* Jump to target. */
8185 op[8] = gen_label_rtx ();
8188 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8190 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8191 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8192 else if (flag_pic == 1)
8194 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8195 output_asm_insn ("l\t%4,%0(%4)", op);
8197 else if (flag_pic == 2)
8199 op[9] = gen_rtx_REG (Pmode, 0);
8200 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8201 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8202 output_asm_insn ("ar\t%4,%9", op);
8203 output_asm_insn ("l\t%4,0(%4)", op);
8206 output_asm_insn ("br\t%4", op);
8208 /* Output literal pool. */
8209 output_asm_insn (".align\t4", op);
8211 if (nonlocal && flag_pic == 2)
8212 output_asm_insn (".long\t%0", op);
8215 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8216 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8219 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8221 output_asm_insn (".long\t%0", op);
8223 output_asm_insn (".long\t%0-%5", op);
8227 targetm.asm_out.internal_label (file, "L",
8228 CODE_LABEL_NUMBER (op[6]));
8229 output_asm_insn (".long\t%2", op);
8233 targetm.asm_out.internal_label (file, "L",
8234 CODE_LABEL_NUMBER (op[7]));
8235 output_asm_insn (".long\t%3", op);
8241 s390_valid_pointer_mode (enum machine_mode mode)
8243 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8246 /* How to allocate a 'struct machine_function'. */
8248 static struct machine_function *
8249 s390_init_machine_status (void)
8251 return ggc_alloc_cleared (sizeof (struct machine_function));
8254 /* Checks whether the given ARGUMENT_LIST would use a caller
8255 saved register. This is used to decide whether sibling call
8256 optimization could be performed on the respective function
8260 s390_call_saved_register_used (tree argument_list)
8262 CUMULATIVE_ARGS cum;
8264 enum machine_mode mode;
8269 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8271 while (argument_list)
8273 parameter = TREE_VALUE (argument_list);
8274 argument_list = TREE_CHAIN (argument_list);
8279 /* For an undeclared variable passed as parameter we will get
8280 an ERROR_MARK node here. */
8281 if (TREE_CODE (parameter) == ERROR_MARK)
8284 if (! (type = TREE_TYPE (parameter)))
8287 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
8290 if (pass_by_reference (&cum, mode, type, true))
8293 type = build_pointer_type (type);
8296 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8298 s390_function_arg_advance (&cum, mode, type, 0);
8300 if (parm_rtx && REG_P (parm_rtx))
8303 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8305 if (! call_used_regs[reg + REGNO (parm_rtx)])
8312 /* Return true if the given call expression can be
8313 turned into a sibling call.
8314 DECL holds the declaration of the function to be called whereas
8315 EXP is the call expression itself. */
8318 s390_function_ok_for_sibcall (tree decl, tree exp)
8320 /* The TPF epilogue uses register 1. */
8321 if (TARGET_TPF_PROFILING)
8324 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8325 which would have to be restored before the sibcall. */
8326 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8329 /* Register 6 on s390 is available as an argument register but unfortunately
8330 "caller saved". This makes functions needing this register for arguments
8331 not suitable for sibcalls. */
8332 if (TREE_OPERAND (exp, 1)
8333 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8339 /* Return the fixed registers used for condition codes. */
8342 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8345 *p2 = INVALID_REGNUM;
8350 /* This function is used by the call expanders of the machine description.
8351 It emits the call insn itself together with the necessary operations
8352 to adjust the target address and returns the emitted insn.
8353 ADDR_LOCATION is the target address rtx
8354 TLS_CALL the location of the thread-local symbol
8355 RESULT_REG the register where the result of the call should be stored
8356 RETADDR_REG the register where the return address should be stored
8357 If this parameter is NULL_RTX the call is considered
8358 to be a sibling call. */
8361 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8364 bool plt_call = false;
8370 /* Direct function calls need special treatment. */
8371 if (GET_CODE (addr_location) == SYMBOL_REF)
8373 /* When calling a global routine in PIC mode, we must
8374 replace the symbol itself with the PLT stub. */
8375 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8377 addr_location = gen_rtx_UNSPEC (Pmode,
8378 gen_rtvec (1, addr_location),
8380 addr_location = gen_rtx_CONST (Pmode, addr_location);
8384 /* Unless we can use the bras(l) insn, force the
8385 routine address into a register. */
8386 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8389 addr_location = legitimize_pic_address (addr_location, 0);
8391 addr_location = force_reg (Pmode, addr_location);
8395 /* If it is already an indirect call or the code above moved the
8396 SYMBOL_REF to somewhere else make sure the address can be found in
8398 if (retaddr_reg == NULL_RTX
8399 && GET_CODE (addr_location) != SYMBOL_REF
8402 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8403 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8406 addr_location = gen_rtx_MEM (QImode, addr_location);
8407 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8409 if (result_reg != NULL_RTX)
8410 call = gen_rtx_SET (VOIDmode, result_reg, call);
8412 if (retaddr_reg != NULL_RTX)
8414 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8416 if (tls_call != NULL_RTX)
8417 vec = gen_rtvec (3, call, clobber,
8418 gen_rtx_USE (VOIDmode, tls_call));
8420 vec = gen_rtvec (2, call, clobber);
8422 call = gen_rtx_PARALLEL (VOIDmode, vec);
8425 insn = emit_call_insn (call);
8427 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8428 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8430 /* s390_function_ok_for_sibcall should
8431 have denied sibcalls in this case. */
8432 if (retaddr_reg == NULL_RTX)
8435 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8440 /* Implement CONDITIONAL_REGISTER_USAGE. */
8443 s390_conditional_register_usage (void)
8449 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8450 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8452 if (TARGET_CPU_ZARCH)
8454 fixed_regs[RETURN_REGNUM] = 0;
8455 call_used_regs[RETURN_REGNUM] = 0;
8459 for (i = 24; i < 32; i++)
8460 call_used_regs[i] = call_really_used_regs[i] = 0;
8464 for (i = 18; i < 20; i++)
8465 call_used_regs[i] = call_really_used_regs[i] = 0;
8468 if (TARGET_SOFT_FLOAT)
8470 for (i = 16; i < 32; i++)
8471 call_used_regs[i] = fixed_regs[i] = 1;
8475 /* Corresponding function to eh_return expander. */
8477 static GTY(()) rtx s390_tpf_eh_return_symbol;
8479 s390_emit_tpf_eh_return (rtx target)
8483 if (!s390_tpf_eh_return_symbol)
8484 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8486 reg = gen_rtx_REG (Pmode, 2);
8488 emit_move_insn (reg, target);
8489 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8490 gen_rtx_REG (Pmode, RETURN_REGNUM));
8491 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8493 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8496 #include "gt-s390.h"