1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx, unsigned int, int);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static bool s390_return_in_memory (tree, tree);
64 static void s390_init_builtins (void);
65 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
66 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
68 static enum attr_type s390_safe_attr_type (rtx);
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_first_cycle_multipass_dfa_lookahead (void);
73 static bool s390_cannot_copy_insn_p (rtx);
74 static bool s390_rtx_costs (rtx, int, int, int *);
75 static int s390_address_cost (rtx);
76 static void s390_reorg (void);
77 static bool s390_valid_pointer_mode (enum machine_mode);
78 static tree s390_build_builtin_va_list (void);
79 static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
80 static bool s390_function_ok_for_sibcall (tree, tree);
81 static bool s390_call_saved_register_used (tree);
82 static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
85 #undef TARGET_ASM_ALIGNED_HI_OP
86 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
87 #undef TARGET_ASM_ALIGNED_DI_OP
88 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
89 #undef TARGET_ASM_INTEGER
90 #define TARGET_ASM_INTEGER s390_assemble_integer
92 #undef TARGET_ASM_OPEN_PAREN
93 #define TARGET_ASM_OPEN_PAREN ""
95 #undef TARGET_ASM_CLOSE_PAREN
96 #define TARGET_ASM_CLOSE_PAREN ""
98 #undef TARGET_ENCODE_SECTION_INFO
99 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
102 #undef TARGET_HAVE_TLS
103 #define TARGET_HAVE_TLS true
105 #undef TARGET_CANNOT_FORCE_CONST_MEM
106 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108 #undef TARGET_DELEGITIMIZE_ADDRESS
109 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111 #undef TARGET_RETURN_IN_MEMORY
112 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114 #undef TARGET_INIT_BUILTINS
115 #define TARGET_INIT_BUILTINS s390_init_builtins
116 #undef TARGET_EXPAND_BUILTIN
117 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
119 #undef TARGET_ASM_OUTPUT_MI_THUNK
120 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
121 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
122 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
124 #undef TARGET_SCHED_ADJUST_PRIORITY
125 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
126 #undef TARGET_SCHED_ISSUE_RATE
127 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
128 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
129 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
131 #undef TARGET_CANNOT_COPY_INSN_P
132 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
133 #undef TARGET_RTX_COSTS
134 #define TARGET_RTX_COSTS s390_rtx_costs
135 #undef TARGET_ADDRESS_COST
136 #define TARGET_ADDRESS_COST s390_address_cost
138 #undef TARGET_MACHINE_DEPENDENT_REORG
139 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141 #undef TARGET_VALID_POINTER_MODE
142 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
144 #undef TARGET_BUILD_BUILTIN_VA_LIST
145 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
146 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
147 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
149 #undef TARGET_PROMOTE_FUNCTION_ARGS
150 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
151 #undef TARGET_PROMOTE_FUNCTION_RETURN
152 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
153 #undef TARGET_PASS_BY_REFERENCE
154 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
156 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
157 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
159 struct gcc_target targetm = TARGET_INITIALIZER;
161 extern int reload_completed;
163 /* The alias set for prologue/epilogue register save/restore. */
164 static int s390_sr_alias_set = 0;
166 /* Save information from a "cmpxx" operation until the branch or scc is
168 rtx s390_compare_op0, s390_compare_op1;
170 /* Structure used to hold the components of a S/390 memory
171 address. A legitimate address on S/390 is of the general
173 base + index + displacement
174 where any of the components is optional.
176 base and index are registers of the class ADDR_REGS,
177 displacement is an unsigned 12-bit immediate constant. */
187 /* Which cpu are we tuning for. */
188 enum processor_type s390_tune;
189 enum processor_flags s390_tune_flags;
190 /* Which instruction set architecture to use. */
191 enum processor_type s390_arch;
192 enum processor_flags s390_arch_flags;
194 /* Strings to hold which cpu and instruction set architecture to use. */
195 const char *s390_tune_string; /* for -mtune=<xxx> */
196 const char *s390_arch_string; /* for -march=<xxx> */
198 /* String to specify backchain mode:
199 "" no-backchain, "1" backchain, "2" kernel-backchain. */
200 const char *s390_backchain_string = TARGET_DEFAULT_BACKCHAIN;
202 const char *s390_warn_framesize_string;
203 const char *s390_warn_dynamicstack_string;
204 const char *s390_stack_size_string;
205 const char *s390_stack_guard_string;
207 HOST_WIDE_INT s390_warn_framesize = 0;
208 bool s390_warn_dynamicstack_p = 0;
209 HOST_WIDE_INT s390_stack_size = 0;
210 HOST_WIDE_INT s390_stack_guard = 0;
212 /* The following structure is embedded in the machine
213 specific part of struct function. */
215 struct s390_frame_layout GTY (())
217 /* Offset within stack frame. */
218 HOST_WIDE_INT gprs_offset;
219 HOST_WIDE_INT f0_offset;
220 HOST_WIDE_INT f4_offset;
221 HOST_WIDE_INT f8_offset;
222 HOST_WIDE_INT backchain_offset;
224 /* Number of first and last gpr to be saved, restored. */
226 int first_restore_gpr;
228 int last_restore_gpr;
230 /* Bits standing for floating point registers. Set, if the
231 respective register has to be saved. Starting with reg 16 (f0)
232 at the rightmost bit.
233 Bit 15 - 8 7 6 5 4 3 2 1 0
234 fpr 15 - 8 7 5 3 1 6 4 2 0
235 reg 31 - 24 23 22 21 20 19 18 17 16 */
236 unsigned int fpr_bitmap;
238 /* Number of floating point registers f8-f15 which must be saved. */
241 /* Set if return address needs to be saved. */
242 bool save_return_addr_p;
244 /* Set if backchain needs to be saved. */
245 bool save_backchain_p;
247 /* Size of stack frame. */
248 HOST_WIDE_INT frame_size;
251 /* Define the structure for the machine field in struct function. */
253 struct machine_function GTY(())
255 struct s390_frame_layout frame_layout;
257 /* Literal pool base register. */
260 /* True if we may need to perform branch splitting. */
261 bool split_branches_pending_p;
263 /* Some local-dynamic TLS symbol name. */
264 const char *some_ld_name;
267 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
269 #define cfun_frame_layout (cfun->machine->frame_layout)
270 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
271 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
272 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
273 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
275 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
278 static int s390_match_ccmode_set (rtx, enum machine_mode);
279 static int s390_branch_condition_mask (rtx);
280 static const char *s390_branch_condition_mnemonic (rtx, int);
281 static int check_mode (rtx, enum machine_mode *);
282 static int s390_short_displacement (rtx);
283 static int s390_decompose_address (rtx, struct s390_address *);
284 static rtx get_thread_pointer (void);
285 static rtx legitimize_tls_address (rtx, rtx);
286 static void print_shift_count_operand (FILE *, rtx);
287 static const char *get_some_local_dynamic_name (void);
288 static int get_some_local_dynamic_name_1 (rtx *, void *);
289 static int reg_used_in_mem_p (int, rtx);
290 static int addr_generation_dependency_p (rtx, rtx);
291 static int s390_split_branches (void);
292 static void annotate_constant_pool_refs (rtx *x);
293 static void find_constant_pool_ref (rtx, rtx *);
294 static void replace_constant_pool_ref (rtx *, rtx, rtx);
295 static rtx find_ltrel_base (rtx);
296 static void replace_ltrel_base (rtx *);
297 static void s390_optimize_prologue (void);
298 static int find_unused_clobbered_reg (void);
299 static void s390_frame_area (int *, int *);
300 static void s390_register_info (int []);
301 static void s390_frame_info (void);
302 static void s390_init_frame_layout (void);
303 static void s390_update_frame_layout (void);
304 static rtx save_fpr (rtx, int, int);
305 static rtx restore_fpr (rtx, int, int);
306 static rtx save_gprs (rtx, int, int, int);
307 static rtx restore_gprs (rtx, int, int, int);
308 static int s390_function_arg_size (enum machine_mode, tree);
309 static bool s390_function_arg_float (enum machine_mode, tree);
310 static struct machine_function * s390_init_machine_status (void);
312 /* Check whether integer displacement is in range. */
313 #define DISP_IN_RANGE(d) \
314 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
315 : ((d) >= 0 && (d) <= 4095))
317 /* Return true if SET either doesn't set the CC register, or else
318 the source and destination have matching CC modes and that
319 CC mode is at least as constrained as REQ_MODE. */
322 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
324 enum machine_mode set_mode;
326 if (GET_CODE (set) != SET)
329 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
332 set_mode = GET_MODE (SET_DEST (set));
346 if (req_mode != set_mode)
351 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
352 && req_mode != CCSRmode && req_mode != CCURmode)
358 if (req_mode != CCAmode)
366 return (GET_MODE (SET_SRC (set)) == set_mode);
369 /* Return true if every SET in INSN that sets the CC register
370 has source and destination with matching CC modes and that
371 CC mode is at least as constrained as REQ_MODE.
372 If REQ_MODE is VOIDmode, always return false. */
375 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
379 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
380 if (req_mode == VOIDmode)
383 if (GET_CODE (PATTERN (insn)) == SET)
384 return s390_match_ccmode_set (PATTERN (insn), req_mode);
386 if (GET_CODE (PATTERN (insn)) == PARALLEL)
387 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
389 rtx set = XVECEXP (PATTERN (insn), 0, i);
390 if (GET_CODE (set) == SET)
391 if (!s390_match_ccmode_set (set, req_mode))
398 /* If a test-under-mask instruction can be used to implement
399 (compare (and ... OP1) OP2), return the CC mode required
400 to do that. Otherwise, return VOIDmode.
401 MIXED is true if the instruction can distinguish between
402 CC1 and CC2 for mixed selected bits (TMxx), it is false
403 if the instruction cannot (TM). */
406 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
410 /* ??? Fixme: should work on CONST_DOUBLE as well. */
411 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
414 /* Selected bits all zero: CC0. */
415 if (INTVAL (op2) == 0)
418 /* Selected bits all one: CC3. */
419 if (INTVAL (op2) == INTVAL (op1))
422 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
425 bit1 = exact_log2 (INTVAL (op2));
426 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
427 if (bit0 != -1 && bit1 != -1)
428 return bit0 > bit1 ? CCT1mode : CCT2mode;
434 /* Given a comparison code OP (EQ, NE, etc.) and the operands
435 OP0 and OP1 of a COMPARE, return the mode to be used for the
439 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
445 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
446 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
448 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
449 || GET_CODE (op1) == NEG)
450 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
453 if (GET_CODE (op0) == AND)
455 /* Check whether we can potentially do it via TM. */
456 enum machine_mode ccmode;
457 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
458 if (ccmode != VOIDmode)
460 /* Relax CCTmode to CCZmode to allow fall-back to AND
461 if that turns out to be beneficial. */
462 return ccmode == CCTmode ? CCZmode : ccmode;
466 if (register_operand (op0, HImode)
467 && GET_CODE (op1) == CONST_INT
468 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
470 if (register_operand (op0, QImode)
471 && GET_CODE (op1) == CONST_INT
472 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
481 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
482 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
484 if (INTVAL (XEXP((op0), 1)) < 0)
497 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
498 && GET_CODE (op1) != CONST_INT)
504 if (GET_CODE (op0) == PLUS
505 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
508 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
509 && GET_CODE (op1) != CONST_INT)
515 if (GET_CODE (op0) == MINUS
516 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
519 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
520 && GET_CODE (op1) != CONST_INT)
529 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
530 that we can implement more efficiently. */
533 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
535 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
536 if ((*code == EQ || *code == NE)
537 && *op1 == const0_rtx
538 && GET_CODE (*op0) == ZERO_EXTRACT
539 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
540 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
541 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
543 rtx inner = XEXP (*op0, 0);
544 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
545 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
546 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
548 if (len > 0 && len < modesize
549 && pos >= 0 && pos + len <= modesize
550 && modesize <= HOST_BITS_PER_WIDE_INT)
552 unsigned HOST_WIDE_INT block;
553 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
554 block <<= modesize - pos - len;
556 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
557 gen_int_mode (block, GET_MODE (inner)));
561 /* Narrow AND of memory against immediate to enable TM. */
562 if ((*code == EQ || *code == NE)
563 && *op1 == const0_rtx
564 && GET_CODE (*op0) == AND
565 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
566 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
568 rtx inner = XEXP (*op0, 0);
569 rtx mask = XEXP (*op0, 1);
571 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
572 if (GET_CODE (inner) == SUBREG
573 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
574 && (GET_MODE_SIZE (GET_MODE (inner))
575 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
577 & GET_MODE_MASK (GET_MODE (inner))
578 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
580 inner = SUBREG_REG (inner);
582 /* Do not change volatile MEMs. */
583 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
585 int part = s390_single_part (XEXP (*op0, 1),
586 GET_MODE (inner), QImode, 0);
589 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
590 inner = adjust_address_nv (inner, QImode, part);
591 *op0 = gen_rtx_AND (QImode, inner, mask);
596 /* Narrow comparisons against 0xffff to HImode if possible. */
597 if ((*code == EQ || *code == NE)
598 && GET_CODE (*op1) == CONST_INT
599 && INTVAL (*op1) == 0xffff
600 && SCALAR_INT_MODE_P (GET_MODE (*op0))
601 && (nonzero_bits (*op0, GET_MODE (*op0))
602 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
604 *op0 = gen_lowpart (HImode, *op0);
609 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
610 if (GET_CODE (*op0) == UNSPEC
611 && XINT (*op0, 1) == UNSPEC_CMPINT
612 && XVECLEN (*op0, 0) == 1
613 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
614 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
615 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
616 && *op1 == const0_rtx)
618 enum rtx_code new_code = UNKNOWN;
621 case EQ: new_code = EQ; break;
622 case NE: new_code = NE; break;
623 case LT: new_code = LTU; break;
624 case GT: new_code = GTU; break;
625 case LE: new_code = LEU; break;
626 case GE: new_code = GEU; break;
630 if (new_code != UNKNOWN)
632 *op0 = XVECEXP (*op0, 0, 0);
638 /* Emit a compare instruction suitable to implement the comparison
639 OP0 CODE OP1. Return the correct condition RTL to be placed in
640 the IF_THEN_ELSE of the conditional branch testing the result. */
643 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
645 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
646 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
648 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
649 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
652 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
653 unconditional jump, else a conditional jump under condition COND. */
656 s390_emit_jump (rtx target, rtx cond)
660 target = gen_rtx_LABEL_REF (VOIDmode, target);
662 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
664 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
665 emit_jump_insn (insn);
668 /* Return nonzero if OP is a valid comparison operator
669 for a branch condition in mode MODE. */
672 s390_comparison (rtx op, enum machine_mode mode)
674 if (mode != VOIDmode && mode != GET_MODE (op))
677 if (!COMPARISON_P (op))
680 if (GET_CODE (XEXP (op, 0)) != REG
681 || REGNO (XEXP (op, 0)) != CC_REGNUM
682 || XEXP (op, 1) != const0_rtx)
685 return s390_branch_condition_mask (op) >= 0;
688 /* Return nonzero if OP is a valid comparison operator
689 for an ALC condition in mode MODE. */
692 s390_alc_comparison (rtx op, enum machine_mode mode)
694 if (mode != VOIDmode && mode != GET_MODE (op))
697 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
700 if (!COMPARISON_P (op))
703 if (GET_CODE (XEXP (op, 0)) != REG
704 || REGNO (XEXP (op, 0)) != CC_REGNUM
705 || XEXP (op, 1) != const0_rtx)
708 switch (GET_MODE (XEXP (op, 0)))
711 return GET_CODE (op) == LTU;
714 return GET_CODE (op) == LEU;
717 return GET_CODE (op) == GEU;
720 return GET_CODE (op) == GTU;
723 return GET_CODE (op) == LTU;
726 return GET_CODE (op) == UNGT;
729 return GET_CODE (op) == UNLT;
736 /* Return nonzero if OP is a valid comparison operator
737 for an SLB condition in mode MODE. */
740 s390_slb_comparison (rtx op, enum machine_mode mode)
742 if (mode != VOIDmode && mode != GET_MODE (op))
745 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
748 if (!COMPARISON_P (op))
751 if (GET_CODE (XEXP (op, 0)) != REG
752 || REGNO (XEXP (op, 0)) != CC_REGNUM
753 || XEXP (op, 1) != const0_rtx)
756 switch (GET_MODE (XEXP (op, 0)))
759 return GET_CODE (op) == GEU;
762 return GET_CODE (op) == GTU;
765 return GET_CODE (op) == LTU;
768 return GET_CODE (op) == LEU;
771 return GET_CODE (op) == GEU;
774 return GET_CODE (op) == LE;
777 return GET_CODE (op) == GE;
784 /* Return branch condition mask to implement a branch
785 specified by CODE. Return -1 for invalid comparisons. */
788 s390_branch_condition_mask (rtx code)
790 const int CC0 = 1 << 3;
791 const int CC1 = 1 << 2;
792 const int CC2 = 1 << 1;
793 const int CC3 = 1 << 0;
795 if (GET_CODE (XEXP (code, 0)) != REG
796 || REGNO (XEXP (code, 0)) != CC_REGNUM
797 || XEXP (code, 1) != const0_rtx)
800 switch (GET_MODE (XEXP (code, 0)))
803 switch (GET_CODE (code))
806 case NE: return CC1 | CC2 | CC3;
812 switch (GET_CODE (code))
815 case NE: return CC0 | CC2 | CC3;
821 switch (GET_CODE (code))
824 case NE: return CC0 | CC1 | CC3;
830 switch (GET_CODE (code))
833 case NE: return CC0 | CC1 | CC2;
839 switch (GET_CODE (code))
841 case EQ: return CC0 | CC2;
842 case NE: return CC1 | CC3;
848 switch (GET_CODE (code))
850 case LTU: return CC2 | CC3; /* carry */
851 case GEU: return CC0 | CC1; /* no carry */
857 switch (GET_CODE (code))
859 case GTU: return CC0 | CC1; /* borrow */
860 case LEU: return CC2 | CC3; /* no borrow */
866 switch (GET_CODE (code))
868 case EQ: return CC0 | CC2;
869 case NE: return CC1 | CC3;
870 case LTU: return CC1;
871 case GTU: return CC3;
872 case LEU: return CC1 | CC2;
873 case GEU: return CC2 | CC3;
878 switch (GET_CODE (code))
881 case NE: return CC1 | CC2 | CC3;
882 case LTU: return CC1;
883 case GTU: return CC2;
884 case LEU: return CC0 | CC1;
885 case GEU: return CC0 | CC2;
891 switch (GET_CODE (code))
894 case NE: return CC2 | CC1 | CC3;
895 case LTU: return CC2;
896 case GTU: return CC1;
897 case LEU: return CC0 | CC2;
898 case GEU: return CC0 | CC1;
904 switch (GET_CODE (code))
907 case NE: return CC1 | CC2 | CC3;
908 case LT: return CC1 | CC3;
910 case LE: return CC0 | CC1 | CC3;
911 case GE: return CC0 | CC2;
917 switch (GET_CODE (code))
920 case NE: return CC1 | CC2 | CC3;
922 case GT: return CC2 | CC3;
923 case LE: return CC0 | CC1;
924 case GE: return CC0 | CC2 | CC3;
930 switch (GET_CODE (code))
933 case NE: return CC1 | CC2 | CC3;
936 case LE: return CC0 | CC1;
937 case GE: return CC0 | CC2;
938 case UNORDERED: return CC3;
939 case ORDERED: return CC0 | CC1 | CC2;
940 case UNEQ: return CC0 | CC3;
941 case UNLT: return CC1 | CC3;
942 case UNGT: return CC2 | CC3;
943 case UNLE: return CC0 | CC1 | CC3;
944 case UNGE: return CC0 | CC2 | CC3;
945 case LTGT: return CC1 | CC2;
951 switch (GET_CODE (code))
954 case NE: return CC2 | CC1 | CC3;
957 case LE: return CC0 | CC2;
958 case GE: return CC0 | CC1;
959 case UNORDERED: return CC3;
960 case ORDERED: return CC0 | CC2 | CC1;
961 case UNEQ: return CC0 | CC3;
962 case UNLT: return CC2 | CC3;
963 case UNGT: return CC1 | CC3;
964 case UNLE: return CC0 | CC2 | CC3;
965 case UNGE: return CC0 | CC1 | CC3;
966 case LTGT: return CC2 | CC1;
976 /* If INV is false, return assembler mnemonic string to implement
977 a branch specified by CODE. If INV is true, return mnemonic
978 for the corresponding inverted branch. */
981 s390_branch_condition_mnemonic (rtx code, int inv)
983 static const char *const mnemonic[16] =
985 NULL, "o", "h", "nle",
986 "l", "nhe", "lh", "ne",
987 "e", "nlh", "he", "nl",
988 "le", "nh", "no", NULL
991 int mask = s390_branch_condition_mask (code);
992 gcc_assert (mask >= 0);
997 if (mask < 1 || mask > 14)
1000 return mnemonic[mask];
1003 /* Return the part of op which has a value different from def.
1004 The size of the part is determined by mode.
1005 Use this function only if you already know that op really
1006 contains such a part. */
1008 unsigned HOST_WIDE_INT
1009 s390_extract_part (rtx op, enum machine_mode mode, int def)
1011 unsigned HOST_WIDE_INT value = 0;
1012 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1013 int part_bits = GET_MODE_BITSIZE (mode);
1014 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
1017 for (i = 0; i < max_parts; i++)
1020 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1022 value >>= part_bits;
1024 if ((value & part_mask) != (def & part_mask))
1025 return value & part_mask;
1031 /* If OP is an integer constant of mode MODE with exactly one
1032 part of mode PART_MODE unequal to DEF, return the number of that
1033 part. Otherwise, return -1. */
1036 s390_single_part (rtx op,
1037 enum machine_mode mode,
1038 enum machine_mode part_mode,
1041 unsigned HOST_WIDE_INT value = 0;
1042 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1043 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
1046 if (GET_CODE (op) != CONST_INT)
1049 for (i = 0; i < n_parts; i++)
1052 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1054 value >>= GET_MODE_BITSIZE (part_mode);
1056 if ((value & part_mask) != (def & part_mask))
1064 return part == -1 ? -1 : n_parts - 1 - part;
1067 /* Check whether we can (and want to) split a double-word
1068 move in mode MODE from SRC to DST into two single-word
1069 moves, moving the subword FIRST_SUBWORD first. */
1072 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1074 /* Floating point registers cannot be split. */
1075 if (FP_REG_P (src) || FP_REG_P (dst))
1078 /* We don't need to split if operands are directly accessible. */
1079 if (s_operand (src, mode) || s_operand (dst, mode))
1082 /* Non-offsettable memory references cannot be split. */
1083 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1084 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1087 /* Moving the first subword must not clobber a register
1088 needed to move the second subword. */
1089 if (register_operand (dst, mode))
1091 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1092 if (reg_overlap_mentioned_p (subreg, src))
1099 /* Check whether the address of memory reference MEM2 equals exactly
1100 the address of memory reference MEM1 plus DELTA. Return true if
1101 we can prove this to be the case, false otherwise. */
1104 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1106 rtx addr1, addr2, addr_delta;
1108 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1111 addr1 = XEXP (mem1, 0);
1112 addr2 = XEXP (mem2, 0);
1114 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1115 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1121 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1124 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1127 enum machine_mode wmode = mode;
1128 rtx dst = operands[0];
1129 rtx src1 = operands[1];
1130 rtx src2 = operands[2];
1133 /* If we cannot handle the operation directly, use a temp register. */
1134 if (!s390_logical_operator_ok_p (operands))
1135 dst = gen_reg_rtx (mode);
1137 /* QImode and HImode patterns make sense only if we have a destination
1138 in memory. Otherwise perform the operation in SImode. */
1139 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1142 /* Widen operands if required. */
1145 if (GET_CODE (dst) == SUBREG
1146 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1148 else if (REG_P (dst))
1149 dst = gen_rtx_SUBREG (wmode, dst, 0);
1151 dst = gen_reg_rtx (wmode);
1153 if (GET_CODE (src1) == SUBREG
1154 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1156 else if (GET_MODE (src1) != VOIDmode)
1157 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1159 if (GET_CODE (src2) == SUBREG
1160 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1162 else if (GET_MODE (src2) != VOIDmode)
1163 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1166 /* Emit the instruction. */
1167 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1168 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1169 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1171 /* Fix up the destination if needed. */
1172 if (dst != operands[0])
1173 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1176 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1179 s390_logical_operator_ok_p (rtx *operands)
1181 /* If the destination operand is in memory, it needs to coincide
1182 with one of the source operands. After reload, it has to be
1183 the first source operand. */
1184 if (GET_CODE (operands[0]) == MEM)
1185 return rtx_equal_p (operands[0], operands[1])
1186 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1191 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1192 operand IMMOP to switch from SS to SI type instructions. */
1195 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1197 int def = code == AND ? -1 : 0;
1201 gcc_assert (GET_CODE (*memop) == MEM);
1202 gcc_assert (!MEM_VOLATILE_P (*memop));
1204 mask = s390_extract_part (*immop, QImode, def);
1205 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1206 gcc_assert (part >= 0);
1208 *memop = adjust_address (*memop, QImode, part);
1209 *immop = gen_int_mode (mask, QImode);
1213 /* Change optimizations to be performed, depending on the
1216 LEVEL is the optimization level specified; 2 if `-O2' is
1217 specified, 1 if `-O' is specified, and 0 if neither is specified.
1219 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1222 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1224 /* ??? There are apparently still problems with -fcaller-saves. */
1225 flag_caller_saves = 0;
1227 /* By default, always emit DWARF-2 unwind info. This allows debugging
1228 without maintaining a stack frame back-chain. */
1229 flag_asynchronous_unwind_tables = 1;
1233 override_options (void)
1238 const char *const name; /* processor name or nickname. */
1239 const enum processor_type processor;
1240 const enum processor_flags flags;
1242 const processor_alias_table[] =
1244 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1245 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1246 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1247 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1248 | PF_LONG_DISPLACEMENT},
1251 int const pta_size = ARRAY_SIZE (processor_alias_table);
1253 /* Acquire a unique set number for our register saves and restores. */
1254 s390_sr_alias_set = new_alias_set ();
1256 /* Set up function hooks. */
1257 init_machine_status = s390_init_machine_status;
1259 /* Architecture mode defaults according to ABI. */
1260 if (!(target_flags_explicit & MASK_ZARCH))
1263 target_flags |= MASK_ZARCH;
1265 target_flags &= ~MASK_ZARCH;
1268 /* Determine processor architectural level. */
1269 if (!s390_arch_string)
1270 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1272 for (i = 0; i < pta_size; i++)
1273 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
1275 s390_arch = processor_alias_table[i].processor;
1276 s390_arch_flags = processor_alias_table[i].flags;
1280 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1282 /* Determine processor to tune for. */
1283 if (!s390_tune_string)
1285 s390_tune = s390_arch;
1286 s390_tune_flags = s390_arch_flags;
1287 s390_tune_string = s390_arch_string;
1291 for (i = 0; i < pta_size; i++)
1292 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1294 s390_tune = processor_alias_table[i].processor;
1295 s390_tune_flags = processor_alias_table[i].flags;
1299 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1302 /* Sanity checks. */
1303 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1304 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1305 if (TARGET_64BIT && !TARGET_ZARCH)
1306 error ("64-bit ABI not supported in ESA/390 mode.");
1308 if (s390_warn_framesize_string)
1310 if (sscanf (s390_warn_framesize_string, HOST_WIDE_INT_PRINT_DEC,
1311 &s390_warn_framesize) != 1)
1312 error ("invalid value for -mwarn-framesize");
1315 if (s390_warn_dynamicstack_string)
1316 s390_warn_dynamicstack_p = 1;
1318 if (s390_stack_size_string)
1320 if (sscanf (s390_stack_size_string, HOST_WIDE_INT_PRINT_DEC,
1321 &s390_stack_size) != 1)
1322 error ("invalid value for -mstack-size");
1324 if (exact_log2 (s390_stack_size) == -1)
1325 error ("stack size must be an exact power of 2");
1327 if (s390_stack_guard_string)
1329 if (sscanf (s390_stack_guard_string, HOST_WIDE_INT_PRINT_DEC,
1330 &s390_stack_guard) != 1)
1331 error ("invalid value for -mstack-guard");
1333 if (s390_stack_guard >= s390_stack_size)
1334 error ("stack size must be greater than the stack guard value");
1336 if (exact_log2 (s390_stack_guard) == -1)
1337 error ("stack guard value must be an exact power of 2");
1340 error ("-mstack-size implies use of -mstack-guard");
1343 if (s390_stack_guard_string && !s390_stack_size_string)
1344 error ("-mstack-guard implies use of -mstack-size");
1347 /* Map for smallest class containing reg regno. */
1349 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1350 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1351 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1352 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1353 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1354 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1355 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1356 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1357 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1358 ADDR_REGS, NO_REGS, ADDR_REGS, ADDR_REGS
1361 /* Return attribute type of insn. */
1363 static enum attr_type
1364 s390_safe_attr_type (rtx insn)
1366 if (recog_memoized (insn) >= 0)
1367 return get_attr_type (insn);
1372 /* Return true if OP a (const_int 0) operand.
1373 OP is the current operation.
1374 MODE is the current operation mode. */
1377 const0_operand (register rtx op, enum machine_mode mode)
1379 return op == CONST0_RTX (mode);
1382 /* Return true if OP is constant.
1383 OP is the current operation.
1384 MODE is the current operation mode. */
1387 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1389 return CONSTANT_P (op);
1392 /* Return true if the mode of operand OP matches MODE.
1393 If MODE is set to VOIDmode, set it to the mode of OP. */
1396 check_mode (register rtx op, enum machine_mode *mode)
1398 if (*mode == VOIDmode)
1399 *mode = GET_MODE (op);
1402 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1408 /* Return true if OP a valid operand for the LARL instruction.
1409 OP is the current operation.
1410 MODE is the current operation mode. */
1413 larl_operand (register rtx op, enum machine_mode mode)
1415 if (! check_mode (op, &mode))
1418 /* Allow labels and local symbols. */
1419 if (GET_CODE (op) == LABEL_REF)
1421 if (GET_CODE (op) == SYMBOL_REF)
1422 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1423 && SYMBOL_REF_TLS_MODEL (op) == 0
1424 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1426 /* Everything else must have a CONST, so strip it. */
1427 if (GET_CODE (op) != CONST)
1431 /* Allow adding *even* in-range constants. */
1432 if (GET_CODE (op) == PLUS)
1434 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1435 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1437 #if HOST_BITS_PER_WIDE_INT > 32
1438 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1439 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1445 /* Labels and local symbols allowed here as well. */
1446 if (GET_CODE (op) == LABEL_REF)
1448 if (GET_CODE (op) == SYMBOL_REF)
1449 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1450 && SYMBOL_REF_TLS_MODEL (op) == 0
1451 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1453 /* Now we must have a @GOTENT offset or @PLT stub
1454 or an @INDNTPOFF TLS offset. */
1455 if (GET_CODE (op) == UNSPEC
1456 && XINT (op, 1) == UNSPEC_GOTENT)
1458 if (GET_CODE (op) == UNSPEC
1459 && XINT (op, 1) == UNSPEC_PLT)
1461 if (GET_CODE (op) == UNSPEC
1462 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1468 /* Return true if OP is a valid S-type operand.
1469 OP is the current operation.
1470 MODE is the current operation mode. */
1473 s_operand (rtx op, enum machine_mode mode)
1475 struct s390_address addr;
1477 /* Call general_operand first, so that we don't have to
1478 check for many special cases. */
1479 if (!general_operand (op, mode))
1482 /* Just like memory_operand, allow (subreg (mem ...))
1484 if (reload_completed
1485 && GET_CODE (op) == SUBREG
1486 && GET_CODE (SUBREG_REG (op)) == MEM)
1487 op = SUBREG_REG (op);
1489 if (GET_CODE (op) != MEM)
1491 if (!s390_decompose_address (XEXP (op, 0), &addr))
1499 /* Return true if OP is a memory operand pointing to the
1500 literal pool, or an immediate operand. */
1503 s390_pool_operand (rtx op)
1505 struct s390_address addr;
1507 /* Just like memory_operand, allow (subreg (mem ...))
1509 if (reload_completed
1510 && GET_CODE (op) == SUBREG
1511 && GET_CODE (SUBREG_REG (op)) == MEM)
1512 op = SUBREG_REG (op);
1514 switch (GET_CODE (op))
1521 if (!s390_decompose_address (XEXP (op, 0), &addr))
1523 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1525 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1534 /* Return true if OP a valid shift count operand.
1535 OP is the current operation.
1536 MODE is the current operation mode. */
1539 shift_count_operand (rtx op, enum machine_mode mode)
1541 HOST_WIDE_INT offset = 0;
1543 if (! check_mode (op, &mode))
1546 /* We can have an integer constant, an address register,
1547 or a sum of the two. Note that reload already checks
1548 that any register present is an address register, so
1549 we just check for any register here. */
1550 if (GET_CODE (op) == CONST_INT)
1552 offset = INTVAL (op);
1555 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1557 offset = INTVAL (XEXP (op, 1));
1560 while (op && GET_CODE (op) == SUBREG)
1561 op = SUBREG_REG (op);
1562 if (op && GET_CODE (op) != REG)
1565 /* Unfortunately we have to reject constants that are invalid
1566 for an address, or else reload will get confused. */
1567 if (!DISP_IN_RANGE (offset))
1573 /* Return true if DISP is a valid short displacement. */
1576 s390_short_displacement (rtx disp)
1578 /* No displacement is OK. */
1582 /* Integer displacement in range. */
1583 if (GET_CODE (disp) == CONST_INT)
1584 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1586 /* GOT offset is not OK, the GOT can be large. */
1587 if (GET_CODE (disp) == CONST
1588 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1589 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1592 /* All other symbolic constants are literal pool references,
1593 which are OK as the literal pool must be small. */
1594 if (GET_CODE (disp) == CONST)
1600 /* Return true if OP is a valid operand for a C constraint. */
1603 s390_extra_constraint_str (rtx op, int c, const char * str)
1605 struct s390_address addr;
1610 /* Check for offsettable variants of memory constraints. */
1613 /* Only accept non-volatile MEMs. */
1614 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1617 if ((reload_completed || reload_in_progress)
1618 ? !offsettable_memref_p (op)
1619 : !offsettable_nonstrict_memref_p (op))
1628 if (GET_CODE (op) != MEM)
1630 if (!s390_decompose_address (XEXP (op, 0), &addr))
1635 if (TARGET_LONG_DISPLACEMENT)
1637 if (!s390_short_displacement (addr.disp))
1643 if (GET_CODE (op) != MEM)
1646 if (TARGET_LONG_DISPLACEMENT)
1648 if (!s390_decompose_address (XEXP (op, 0), &addr))
1650 if (!s390_short_displacement (addr.disp))
1656 if (!TARGET_LONG_DISPLACEMENT)
1658 if (GET_CODE (op) != MEM)
1660 if (!s390_decompose_address (XEXP (op, 0), &addr))
1664 if (s390_short_displacement (addr.disp))
1669 if (!TARGET_LONG_DISPLACEMENT)
1671 if (GET_CODE (op) != MEM)
1673 /* Any invalid address here will be fixed up by reload,
1674 so accept it for the most generic constraint. */
1675 if (s390_decompose_address (XEXP (op, 0), &addr)
1676 && s390_short_displacement (addr.disp))
1681 if (TARGET_LONG_DISPLACEMENT)
1683 if (!s390_decompose_address (op, &addr))
1685 if (!s390_short_displacement (addr.disp))
1691 if (!TARGET_LONG_DISPLACEMENT)
1693 /* Any invalid address here will be fixed up by reload,
1694 so accept it for the most generic constraint. */
1695 if (s390_decompose_address (op, &addr)
1696 && s390_short_displacement (addr.disp))
1701 return shift_count_operand (op, VOIDmode);
1710 /* Return true if VALUE matches the constraint STR. */
1713 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1717 enum machine_mode mode, part_mode;
1719 int part, part_goal;
1727 return (unsigned int)value < 256;
1730 return (unsigned int)value < 4096;
1733 return value >= -32768 && value < 32768;
1736 return (TARGET_LONG_DISPLACEMENT ?
1737 (value >= -524288 && value <= 524287)
1738 : (value >= 0 && value <= 4095));
1740 return value == 2147483647;
1746 part_goal = str[1] - '0';
1750 case 'H': part_mode = HImode; break;
1751 case 'Q': part_mode = QImode; break;
1757 case 'H': mode = HImode; break;
1758 case 'S': mode = SImode; break;
1759 case 'D': mode = DImode; break;
1765 case '0': def = 0; break;
1766 case 'F': def = -1; break;
1770 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1773 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
1776 if (part_goal != -1 && part_goal != part)
1788 /* Compute a (partial) cost for rtx X. Return true if the complete
1789 cost has been computed, and false if subexpressions should be
1790 scanned. In either case, *TOTAL contains the cost result. */
1793 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1798 if (GET_CODE (XEXP (x, 0)) == MINUS
1799 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1806 /* Force_const_mem does not work out of reload, because the
1807 saveable_obstack is set to reload_obstack, which does not
1808 live long enough. Because of this we cannot use force_const_mem
1809 in addsi3. This leads to problems with gen_add2_insn with a
1810 constant greater than a short. Because of that we give an
1811 addition of greater constants a cost of 3 (reload1.c 10096). */
1812 /* ??? saveable_obstack no longer exists. */
1813 if (outer_code == PLUS
1814 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1815 *total = COSTS_N_INSNS (3);
1836 *total = COSTS_N_INSNS (1);
1840 if (GET_MODE (XEXP (x, 0)) == DImode)
1841 *total = COSTS_N_INSNS (40);
1843 *total = COSTS_N_INSNS (7);
1850 *total = COSTS_N_INSNS (33);
1858 /* Return the cost of an address rtx ADDR. */
1861 s390_address_cost (rtx addr)
1863 struct s390_address ad;
1864 if (!s390_decompose_address (addr, &ad))
1867 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1870 /* Return true if OP is a valid operand for the BRAS instruction.
1871 OP is the current operation.
1872 MODE is the current operation mode. */
1875 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1877 register enum rtx_code code = GET_CODE (op);
1879 /* Allow SYMBOL_REFs. */
1880 if (code == SYMBOL_REF)
1883 /* Allow @PLT stubs. */
1885 && GET_CODE (XEXP (op, 0)) == UNSPEC
1886 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1891 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1892 otherwise return 0. */
1895 tls_symbolic_operand (register rtx op)
1897 if (GET_CODE (op) != SYMBOL_REF)
1899 return SYMBOL_REF_TLS_MODEL (op);
1902 /* Return true if OP is a load multiple operation. It is known to be a
1903 PARALLEL and the first section will be tested.
1904 OP is the current operation.
1905 MODE is the current operation mode. */
1908 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1910 enum machine_mode elt_mode;
1911 int count = XVECLEN (op, 0);
1912 unsigned int dest_regno;
1917 /* Perform a quick check so we don't blow up below. */
1919 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1920 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1921 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1924 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1925 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1926 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
1928 /* Check, is base, or base + displacement. */
1930 if (GET_CODE (src_addr) == REG)
1932 else if (GET_CODE (src_addr) == PLUS
1933 && GET_CODE (XEXP (src_addr, 0)) == REG
1934 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1936 off = INTVAL (XEXP (src_addr, 1));
1937 src_addr = XEXP (src_addr, 0);
1942 for (i = 1; i < count; i++)
1944 rtx elt = XVECEXP (op, 0, i);
1946 if (GET_CODE (elt) != SET
1947 || GET_CODE (SET_DEST (elt)) != REG
1948 || GET_MODE (SET_DEST (elt)) != elt_mode
1949 || REGNO (SET_DEST (elt)) != dest_regno + i
1950 || GET_CODE (SET_SRC (elt)) != MEM
1951 || GET_MODE (SET_SRC (elt)) != elt_mode
1952 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1953 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1954 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1955 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1956 != off + i * GET_MODE_SIZE (elt_mode))
1963 /* Return true if OP is a store multiple operation. It is known to be a
1964 PARALLEL and the first section will be tested.
1965 OP is the current operation.
1966 MODE is the current operation mode. */
1969 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1971 enum machine_mode elt_mode;
1972 int count = XVECLEN (op, 0);
1973 unsigned int src_regno;
1977 /* Perform a quick check so we don't blow up below. */
1979 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1980 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1981 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1984 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1985 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1986 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
1988 /* Check, is base, or base + displacement. */
1990 if (GET_CODE (dest_addr) == REG)
1992 else if (GET_CODE (dest_addr) == PLUS
1993 && GET_CODE (XEXP (dest_addr, 0)) == REG
1994 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1996 off = INTVAL (XEXP (dest_addr, 1));
1997 dest_addr = XEXP (dest_addr, 0);
2002 for (i = 1; i < count; i++)
2004 rtx elt = XVECEXP (op, 0, i);
2006 if (GET_CODE (elt) != SET
2007 || GET_CODE (SET_SRC (elt)) != REG
2008 || GET_MODE (SET_SRC (elt)) != elt_mode
2009 || REGNO (SET_SRC (elt)) != src_regno + i
2010 || GET_CODE (SET_DEST (elt)) != MEM
2011 || GET_MODE (SET_DEST (elt)) != elt_mode
2012 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2013 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2014 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2015 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
2016 != off + i * GET_MODE_SIZE (elt_mode))
2023 /* Return true if OP contains a symbol reference */
2026 symbolic_reference_mentioned_p (rtx op)
2028 register const char *fmt;
2031 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2034 fmt = GET_RTX_FORMAT (GET_CODE (op));
2035 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2041 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2042 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2046 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2053 /* Return true if OP contains a reference to a thread-local symbol. */
2056 tls_symbolic_reference_mentioned_p (rtx op)
2058 register const char *fmt;
2061 if (GET_CODE (op) == SYMBOL_REF)
2062 return tls_symbolic_operand (op);
2064 fmt = GET_RTX_FORMAT (GET_CODE (op));
2065 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2071 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2072 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2076 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2084 /* Return true if OP is a legitimate general operand when
2085 generating PIC code. It is given that flag_pic is on
2086 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2089 legitimate_pic_operand_p (register rtx op)
2091 /* Accept all non-symbolic constants. */
2092 if (!SYMBOLIC_CONST (op))
2095 /* Reject everything else; must be handled
2096 via emit_symbolic_move. */
2100 /* Returns true if the constant value OP is a legitimate general operand.
2101 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2104 legitimate_constant_p (register rtx op)
2106 /* Accept all non-symbolic constants. */
2107 if (!SYMBOLIC_CONST (op))
2110 /* Accept immediate LARL operands. */
2111 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2114 /* Thread-local symbols are never legal constants. This is
2115 so that emit_call knows that computing such addresses
2116 might require a function call. */
2117 if (TLS_SYMBOLIC_CONST (op))
2120 /* In the PIC case, symbolic constants must *not* be
2121 forced into the literal pool. We accept them here,
2122 so that they will be handled by emit_symbolic_move. */
2126 /* All remaining non-PIC symbolic constants are
2127 forced into the literal pool. */
2131 /* Determine if it's legal to put X into the constant pool. This
2132 is not possible if X contains the address of a symbol that is
2133 not constant (TLS) or not known at final link time (PIC). */
2136 s390_cannot_force_const_mem (rtx x)
2138 switch (GET_CODE (x))
2142 /* Accept all non-symbolic constants. */
2146 /* Labels are OK iff we are non-PIC. */
2147 return flag_pic != 0;
2150 /* 'Naked' TLS symbol references are never OK,
2151 non-TLS symbols are OK iff we are non-PIC. */
2152 if (tls_symbolic_operand (x))
2155 return flag_pic != 0;
2158 return s390_cannot_force_const_mem (XEXP (x, 0));
2161 return s390_cannot_force_const_mem (XEXP (x, 0))
2162 || s390_cannot_force_const_mem (XEXP (x, 1));
2165 switch (XINT (x, 1))
2167 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2168 case UNSPEC_LTREL_OFFSET:
2176 case UNSPEC_GOTNTPOFF:
2177 case UNSPEC_INDNTPOFF:
2180 /* If the literal pool shares the code section, be put
2181 execute template placeholders into the pool as well. */
2183 return TARGET_CPU_ZARCH;
2195 /* Returns true if the constant value OP is a legitimate general
2196 operand during and after reload. The difference to
2197 legitimate_constant_p is that this function will not accept
2198 a constant that would need to be forced to the literal pool
2199 before it can be used as operand. */
2202 legitimate_reload_constant_p (register rtx op)
2204 /* Accept la(y) operands. */
2205 if (GET_CODE (op) == CONST_INT
2206 && DISP_IN_RANGE (INTVAL (op)))
2209 /* Accept l(g)hi operands. */
2210 if (GET_CODE (op) == CONST_INT
2211 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
2214 /* Accept lliXX operands. */
2216 && s390_single_part (op, DImode, HImode, 0) >= 0)
2219 /* Accept larl operands. */
2220 if (TARGET_CPU_ZARCH
2221 && larl_operand (op, VOIDmode))
2224 /* Everything else cannot be handled without reload. */
2228 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2229 return the class of reg to actually use. */
2232 s390_preferred_reload_class (rtx op, enum reg_class class)
2234 switch (GET_CODE (op))
2236 /* Constants we cannot reload must be forced into the
2241 if (legitimate_reload_constant_p (op))
2246 /* If a symbolic constant or a PLUS is reloaded,
2247 it is most likely being used as an address, so
2248 prefer ADDR_REGS. If 'class' is not a superset
2249 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2254 if (reg_class_subset_p (ADDR_REGS, class))
2266 /* Return the register class of a scratch register needed to
2267 load IN into a register of class CLASS in MODE.
2269 We need a temporary when loading a PLUS expression which
2270 is not a legitimate operand of the LOAD ADDRESS instruction. */
2273 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
2274 enum machine_mode mode, rtx in)
2276 if (s390_plus_operand (in, mode))
2278 /* ??? Reload sometimes pushes a PLUS reload with a too-large constant.
2279 Until reload is fixed, we need to force_const_mem while emitting the
2280 secondary reload insn -- thus we need to make sure here that we do
2281 have a literal pool for the current function. */
2282 if (CONSTANT_P (XEXP (in, 1))
2283 && !legitimate_reload_constant_p (XEXP (in, 1)))
2284 current_function_uses_const_pool = true;
2292 /* Return the register class of a scratch register needed to
2293 store a register of class CLASS in MODE into OUT:
2295 We need a temporary when storing a double-word to a
2296 non-offsettable memory address. */
2299 s390_secondary_output_reload_class (enum reg_class class,
2300 enum machine_mode mode, rtx out)
2302 if ((TARGET_64BIT ? mode == TImode
2303 : (mode == DImode || mode == DFmode))
2304 && reg_classes_intersect_p (GENERAL_REGS, class)
2305 && GET_CODE (out) == MEM
2306 && !offsettable_memref_p (out)
2307 && !s_operand (out, VOIDmode))
2313 /* Return true if OP is a PLUS that is not a legitimate
2314 operand for the LA instruction.
2315 OP is the current operation.
2316 MODE is the current operation mode. */
2319 s390_plus_operand (register rtx op, enum machine_mode mode)
2321 if (!check_mode (op, &mode) || mode != Pmode)
2324 if (GET_CODE (op) != PLUS)
2327 if (legitimate_la_operand_p (op))
2333 /* Generate code to load SRC, which is PLUS that is not a
2334 legitimate operand for the LA instruction, into TARGET.
2335 SCRATCH may be used as scratch register. */
2338 s390_expand_plus_operand (register rtx target, register rtx src,
2339 register rtx scratch)
2342 struct s390_address ad;
2344 /* src must be a PLUS; get its two operands. */
2345 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2348 /* Check if any of the two operands is already scheduled
2349 for replacement by reload. This can happen e.g. when
2350 float registers occur in an address. */
2351 sum1 = find_replacement (&XEXP (src, 0));
2352 sum2 = find_replacement (&XEXP (src, 1));
2353 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2355 /* If the address is already strictly valid, there's nothing to do. */
2356 if (!s390_decompose_address (src, &ad)
2357 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2358 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2360 /* Otherwise, one of the operands cannot be an address register;
2361 we reload its value into the scratch register. */
2362 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2364 emit_move_insn (scratch, sum1);
2367 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2369 /* ??? See comment in s390_secondary_input_reload_class. */
2370 if (CONSTANT_P (sum2) && !legitimate_reload_constant_p (sum2))
2371 sum2 = force_const_mem (Pmode, sum2);
2373 emit_move_insn (scratch, sum2);
2377 /* According to the way these invalid addresses are generated
2378 in reload.c, it should never happen (at least on s390) that
2379 *neither* of the PLUS components, after find_replacements
2380 was applied, is an address register. */
2381 if (sum1 == scratch && sum2 == scratch)
2387 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2390 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2391 is only ever performed on addresses, so we can mark the
2392 sum as legitimate for LA in any case. */
2393 s390_load_address (target, src);
2397 /* Decompose a RTL expression ADDR for a memory address into
2398 its components, returned in OUT.
2400 Returns 0 if ADDR is not a valid memory address, nonzero
2401 otherwise. If OUT is NULL, don't return the components,
2402 but check for validity only.
2404 Note: Only addresses in canonical form are recognized.
2405 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2406 canonical form so that they will be recognized. */
2409 s390_decompose_address (register rtx addr, struct s390_address *out)
2411 HOST_WIDE_INT offset = 0;
2412 rtx base = NULL_RTX;
2413 rtx indx = NULL_RTX;
2414 rtx disp = NULL_RTX;
2416 int pointer = FALSE;
2417 int base_ptr = FALSE;
2418 int indx_ptr = FALSE;
2420 /* Decompose address into base + index + displacement. */
2422 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2425 else if (GET_CODE (addr) == PLUS)
2427 rtx op0 = XEXP (addr, 0);
2428 rtx op1 = XEXP (addr, 1);
2429 enum rtx_code code0 = GET_CODE (op0);
2430 enum rtx_code code1 = GET_CODE (op1);
2432 if (code0 == REG || code0 == UNSPEC)
2434 if (code1 == REG || code1 == UNSPEC)
2436 indx = op0; /* index + base */
2442 base = op0; /* base + displacement */
2447 else if (code0 == PLUS)
2449 indx = XEXP (op0, 0); /* index + base + disp */
2450 base = XEXP (op0, 1);
2461 disp = addr; /* displacement */
2463 /* Extract integer part of displacement. */
2467 if (GET_CODE (disp) == CONST_INT)
2469 offset = INTVAL (disp);
2472 else if (GET_CODE (disp) == CONST
2473 && GET_CODE (XEXP (disp, 0)) == PLUS
2474 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2476 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2477 disp = XEXP (XEXP (disp, 0), 0);
2481 /* Strip off CONST here to avoid special case tests later. */
2482 if (disp && GET_CODE (disp) == CONST)
2483 disp = XEXP (disp, 0);
2485 /* We can convert literal pool addresses to
2486 displacements by basing them off the base register. */
2487 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2489 /* Either base or index must be free to hold the base register. */
2491 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2493 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2497 /* Mark up the displacement. */
2498 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2499 UNSPEC_LTREL_OFFSET);
2502 /* Validate base register. */
2505 if (GET_CODE (base) == UNSPEC)
2506 switch (XINT (base, 1))
2510 disp = gen_rtx_UNSPEC (Pmode,
2511 gen_rtvec (1, XVECEXP (base, 0, 0)),
2512 UNSPEC_LTREL_OFFSET);
2516 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2519 case UNSPEC_LTREL_BASE:
2520 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2527 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2530 if (REGNO (base) == BASE_REGNUM
2531 || REGNO (base) == STACK_POINTER_REGNUM
2532 || REGNO (base) == FRAME_POINTER_REGNUM
2533 || ((reload_completed || reload_in_progress)
2534 && frame_pointer_needed
2535 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2536 || REGNO (base) == ARG_POINTER_REGNUM
2538 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2539 pointer = base_ptr = TRUE;
2542 /* Validate index register. */
2545 if (GET_CODE (indx) == UNSPEC)
2546 switch (XINT (indx, 1))
2550 disp = gen_rtx_UNSPEC (Pmode,
2551 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2552 UNSPEC_LTREL_OFFSET);
2556 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2559 case UNSPEC_LTREL_BASE:
2560 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2567 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2570 if (REGNO (indx) == BASE_REGNUM
2571 || REGNO (indx) == STACK_POINTER_REGNUM
2572 || REGNO (indx) == FRAME_POINTER_REGNUM
2573 || ((reload_completed || reload_in_progress)
2574 && frame_pointer_needed
2575 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2576 || REGNO (indx) == ARG_POINTER_REGNUM
2578 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2579 pointer = indx_ptr = TRUE;
2582 /* Prefer to use pointer as base, not index. */
2583 if (base && indx && !base_ptr
2584 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2591 /* Validate displacement. */
2594 /* If the argument pointer or the return address pointer are involved,
2595 the displacement will change later anyway as the virtual registers get
2596 eliminated. This could make a valid displacement invalid, but it is
2597 more likely to make an invalid displacement valid, because we sometimes
2598 access the register save area via negative offsets to one of those
2600 Thus we don't check the displacement for validity here. If after
2601 elimination the displacement turns out to be invalid after all,
2602 this is fixed up by reload in any case. */
2603 if (base != arg_pointer_rtx
2604 && indx != arg_pointer_rtx
2605 && base != return_address_pointer_rtx
2606 && indx != return_address_pointer_rtx)
2607 if (!DISP_IN_RANGE (offset))
2612 /* All the special cases are pointers. */
2615 /* In the small-PIC case, the linker converts @GOT
2616 and @GOTNTPOFF offsets to possible displacements. */
2617 if (GET_CODE (disp) == UNSPEC
2618 && (XINT (disp, 1) == UNSPEC_GOT
2619 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2626 /* Accept chunkified literal pool symbol references. */
2627 else if (GET_CODE (disp) == MINUS
2628 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2629 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
2634 /* Accept literal pool references. */
2635 else if (GET_CODE (disp) == UNSPEC
2636 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2638 orig_disp = gen_rtx_CONST (Pmode, disp);
2641 /* If we have an offset, make sure it does not
2642 exceed the size of the constant pool entry. */
2643 rtx sym = XVECEXP (disp, 0, 0);
2644 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2647 orig_disp = plus_constant (orig_disp, offset);
2662 out->disp = orig_disp;
2663 out->pointer = pointer;
2669 /* Return nonzero if ADDR is a valid memory address.
2670 STRICT specifies whether strict register checking applies. */
2673 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2674 register rtx addr, int strict)
2676 struct s390_address ad;
2677 if (!s390_decompose_address (addr, &ad))
2682 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2684 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2689 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2691 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2698 /* Return 1 if OP is a valid operand for the LA instruction.
2699 In 31-bit, we need to prove that the result is used as an
2700 address, as LA performs only a 31-bit addition. */
2703 legitimate_la_operand_p (register rtx op)
2705 struct s390_address addr;
2706 if (!s390_decompose_address (op, &addr))
2709 if (TARGET_64BIT || addr.pointer)
2715 /* Return 1 if it is valid *and* preferable to use LA to
2716 compute the sum of OP1 and OP2. */
2719 preferred_la_operand_p (rtx op1, rtx op2)
2721 struct s390_address addr;
2723 if (op2 != const0_rtx)
2724 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2726 if (!s390_decompose_address (op1, &addr))
2728 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2730 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
2733 if (!TARGET_64BIT && !addr.pointer)
2739 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2740 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2746 /* Emit a forced load-address operation to load SRC into DST.
2747 This will use the LOAD ADDRESS instruction even in situations
2748 where legitimate_la_operand_p (SRC) returns false. */
2751 s390_load_address (rtx dst, rtx src)
2754 emit_move_insn (dst, src);
2756 emit_insn (gen_force_la_31 (dst, src));
2759 /* Return a legitimate reference for ORIG (an address) using the
2760 register REG. If REG is 0, a new pseudo is generated.
2762 There are two types of references that must be handled:
2764 1. Global data references must load the address from the GOT, via
2765 the PIC reg. An insn is emitted to do this load, and the reg is
2768 2. Static data references, constant pool addresses, and code labels
2769 compute the address as an offset from the GOT, whose base is in
2770 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2771 differentiate them from global data objects. The returned
2772 address is the PIC reg + an unspec constant.
2774 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2775 reg also appears in the address. */
2778 legitimize_pic_address (rtx orig, rtx reg)
2784 if (GET_CODE (addr) == LABEL_REF
2785 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2787 /* This is a local symbol. */
2788 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2790 /* Access local symbols PC-relative via LARL.
2791 This is the same as in the non-PIC case, so it is
2792 handled automatically ... */
2796 /* Access local symbols relative to the GOT. */
2798 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2800 if (reload_in_progress || reload_completed)
2801 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2803 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2804 addr = gen_rtx_CONST (Pmode, addr);
2805 addr = force_const_mem (Pmode, addr);
2806 emit_move_insn (temp, addr);
2808 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2811 emit_move_insn (reg, new);
2816 else if (GET_CODE (addr) == SYMBOL_REF)
2819 reg = gen_reg_rtx (Pmode);
2823 /* Assume GOT offset < 4k. This is handled the same way
2824 in both 31- and 64-bit code (@GOT). */
2826 if (reload_in_progress || reload_completed)
2827 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2829 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2830 new = gen_rtx_CONST (Pmode, new);
2831 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2832 new = gen_const_mem (Pmode, new);
2833 emit_move_insn (reg, new);
2836 else if (TARGET_CPU_ZARCH)
2838 /* If the GOT offset might be >= 4k, we determine the position
2839 of the GOT entry via a PC-relative LARL (@GOTENT). */
2841 rtx temp = gen_reg_rtx (Pmode);
2843 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2844 new = gen_rtx_CONST (Pmode, new);
2845 emit_move_insn (temp, new);
2847 new = gen_const_mem (Pmode, temp);
2848 emit_move_insn (reg, new);
2853 /* If the GOT offset might be >= 4k, we have to load it
2854 from the literal pool (@GOT). */
2856 rtx temp = gen_reg_rtx (Pmode);
2858 if (reload_in_progress || reload_completed)
2859 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2861 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2862 addr = gen_rtx_CONST (Pmode, addr);
2863 addr = force_const_mem (Pmode, addr);
2864 emit_move_insn (temp, addr);
2866 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2867 new = gen_const_mem (Pmode, new);
2868 emit_move_insn (reg, new);
2874 if (GET_CODE (addr) == CONST)
2876 addr = XEXP (addr, 0);
2877 if (GET_CODE (addr) == UNSPEC)
2879 if (XVECLEN (addr, 0) != 1)
2881 switch (XINT (addr, 1))
2883 /* If someone moved a GOT-relative UNSPEC
2884 out of the literal pool, force them back in. */
2887 new = force_const_mem (Pmode, orig);
2890 /* @GOT is OK as is if small. */
2893 new = force_const_mem (Pmode, orig);
2896 /* @GOTENT is OK as is. */
2900 /* @PLT is OK as is on 64-bit, must be converted to
2901 GOT-relative @PLTOFF on 31-bit. */
2903 if (!TARGET_CPU_ZARCH)
2905 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2907 if (reload_in_progress || reload_completed)
2908 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2910 addr = XVECEXP (addr, 0, 0);
2911 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2913 addr = gen_rtx_CONST (Pmode, addr);
2914 addr = force_const_mem (Pmode, addr);
2915 emit_move_insn (temp, addr);
2917 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2920 emit_move_insn (reg, new);
2926 /* Everything else cannot happen. */
2931 else if (GET_CODE (addr) != PLUS)
2934 if (GET_CODE (addr) == PLUS)
2936 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2937 /* Check first to see if this is a constant offset
2938 from a local symbol reference. */
2939 if ((GET_CODE (op0) == LABEL_REF
2940 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2941 && GET_CODE (op1) == CONST_INT)
2943 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2945 if (INTVAL (op1) & 1)
2947 /* LARL can't handle odd offsets, so emit a
2948 pair of LARL and LA. */
2949 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2951 if (!DISP_IN_RANGE (INTVAL (op1)))
2953 int even = INTVAL (op1) - 1;
2954 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2955 op0 = gen_rtx_CONST (Pmode, op0);
2959 emit_move_insn (temp, op0);
2960 new = gen_rtx_PLUS (Pmode, temp, op1);
2964 emit_move_insn (reg, new);
2970 /* If the offset is even, we can just use LARL.
2971 This will happen automatically. */
2976 /* Access local symbols relative to the GOT. */
2978 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2980 if (reload_in_progress || reload_completed)
2981 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2983 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2985 addr = gen_rtx_PLUS (Pmode, addr, op1);
2986 addr = gen_rtx_CONST (Pmode, addr);
2987 addr = force_const_mem (Pmode, addr);
2988 emit_move_insn (temp, addr);
2990 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2993 emit_move_insn (reg, new);
2999 /* Now, check whether it is a GOT relative symbol plus offset
3000 that was pulled out of the literal pool. Force it back in. */
3002 else if (GET_CODE (op0) == UNSPEC
3003 && GET_CODE (op1) == CONST_INT
3004 && XINT (op0, 1) == UNSPEC_GOTOFF)
3006 if (XVECLEN (op0, 0) != 1)
3009 new = force_const_mem (Pmode, orig);
3012 /* Otherwise, compute the sum. */
3015 base = legitimize_pic_address (XEXP (addr, 0), reg);
3016 new = legitimize_pic_address (XEXP (addr, 1),
3017 base == reg ? NULL_RTX : reg);
3018 if (GET_CODE (new) == CONST_INT)
3019 new = plus_constant (base, INTVAL (new));
3022 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3024 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3025 new = XEXP (new, 1);
3027 new = gen_rtx_PLUS (Pmode, base, new);
3030 if (GET_CODE (new) == CONST)
3031 new = XEXP (new, 0);
3032 new = force_operand (new, 0);
3039 /* Load the thread pointer into a register. */
3042 get_thread_pointer (void)
3046 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
3047 tp = force_reg (Pmode, tp);
3048 mark_reg_pointer (tp, BITS_PER_WORD);
3053 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3054 in s390_tls_symbol which always refers to __tls_get_offset.
3055 The returned offset is written to RESULT_REG and an USE rtx is
3056 generated for TLS_CALL. */
3058 static GTY(()) rtx s390_tls_symbol;
3061 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3068 if (!s390_tls_symbol)
3069 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3071 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3072 gen_rtx_REG (Pmode, RETURN_REGNUM));
3074 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3075 CONST_OR_PURE_CALL_P (insn) = 1;
3078 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3079 this (thread-local) address. REG may be used as temporary. */
3082 legitimize_tls_address (rtx addr, rtx reg)
3084 rtx new, tls_call, temp, base, r2, insn;
3086 if (GET_CODE (addr) == SYMBOL_REF)
3087 switch (tls_symbolic_operand (addr))
3089 case TLS_MODEL_GLOBAL_DYNAMIC:
3091 r2 = gen_rtx_REG (Pmode, 2);
3092 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3093 new = gen_rtx_CONST (Pmode, tls_call);
3094 new = force_const_mem (Pmode, new);
3095 emit_move_insn (r2, new);
3096 s390_emit_tls_call_insn (r2, tls_call);
3097 insn = get_insns ();
3100 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3101 temp = gen_reg_rtx (Pmode);
3102 emit_libcall_block (insn, temp, r2, new);
3104 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3107 s390_load_address (reg, new);
3112 case TLS_MODEL_LOCAL_DYNAMIC:
3114 r2 = gen_rtx_REG (Pmode, 2);
3115 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3116 new = gen_rtx_CONST (Pmode, tls_call);
3117 new = force_const_mem (Pmode, new);
3118 emit_move_insn (r2, new);
3119 s390_emit_tls_call_insn (r2, tls_call);
3120 insn = get_insns ();
3123 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3124 temp = gen_reg_rtx (Pmode);
3125 emit_libcall_block (insn, temp, r2, new);
3127 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3128 base = gen_reg_rtx (Pmode);
3129 s390_load_address (base, new);
3131 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3132 new = gen_rtx_CONST (Pmode, new);
3133 new = force_const_mem (Pmode, new);
3134 temp = gen_reg_rtx (Pmode);
3135 emit_move_insn (temp, new);
3137 new = gen_rtx_PLUS (Pmode, base, temp);
3140 s390_load_address (reg, new);
3145 case TLS_MODEL_INITIAL_EXEC:
3148 /* Assume GOT offset < 4k. This is handled the same way
3149 in both 31- and 64-bit code. */
3151 if (reload_in_progress || reload_completed)
3152 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3154 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3155 new = gen_rtx_CONST (Pmode, new);
3156 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3157 new = gen_const_mem (Pmode, new);
3158 temp = gen_reg_rtx (Pmode);
3159 emit_move_insn (temp, new);
3161 else if (TARGET_CPU_ZARCH)
3163 /* If the GOT offset might be >= 4k, we determine the position
3164 of the GOT entry via a PC-relative LARL. */
3166 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3167 new = gen_rtx_CONST (Pmode, new);
3168 temp = gen_reg_rtx (Pmode);
3169 emit_move_insn (temp, new);
3171 new = gen_const_mem (Pmode, temp);
3172 temp = gen_reg_rtx (Pmode);
3173 emit_move_insn (temp, new);
3177 /* If the GOT offset might be >= 4k, we have to load it
3178 from the literal pool. */
3180 if (reload_in_progress || reload_completed)
3181 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3183 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3184 new = gen_rtx_CONST (Pmode, new);
3185 new = force_const_mem (Pmode, new);
3186 temp = gen_reg_rtx (Pmode);
3187 emit_move_insn (temp, new);
3189 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3190 new = gen_const_mem (Pmode, new);
3192 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3193 temp = gen_reg_rtx (Pmode);
3194 emit_insn (gen_rtx_SET (Pmode, temp, new));
3198 /* In position-dependent code, load the absolute address of
3199 the GOT entry from the literal pool. */
3201 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3202 new = gen_rtx_CONST (Pmode, new);
3203 new = force_const_mem (Pmode, new);
3204 temp = gen_reg_rtx (Pmode);
3205 emit_move_insn (temp, new);
3208 new = gen_const_mem (Pmode, new);
3209 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3210 temp = gen_reg_rtx (Pmode);
3211 emit_insn (gen_rtx_SET (Pmode, temp, new));
3214 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3217 s390_load_address (reg, new);
3222 case TLS_MODEL_LOCAL_EXEC:
3223 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3224 new = gen_rtx_CONST (Pmode, new);
3225 new = force_const_mem (Pmode, new);
3226 temp = gen_reg_rtx (Pmode);
3227 emit_move_insn (temp, new);
3229 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3232 s390_load_address (reg, new);
3241 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3243 switch (XINT (XEXP (addr, 0), 1))
3245 case UNSPEC_INDNTPOFF:
3246 if (TARGET_CPU_ZARCH)
3257 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3258 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3260 new = XEXP (XEXP (addr, 0), 0);
3261 if (GET_CODE (new) != SYMBOL_REF)
3262 new = gen_rtx_CONST (Pmode, new);
3264 new = legitimize_tls_address (new, reg);
3265 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3266 new = force_operand (new, 0);
3270 abort (); /* for now ... */
3275 /* Emit insns to move operands[1] into operands[0]. */
3278 emit_symbolic_move (rtx *operands)
3280 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3282 if (GET_CODE (operands[0]) == MEM)
3283 operands[1] = force_reg (Pmode, operands[1]);
3284 else if (TLS_SYMBOLIC_CONST (operands[1]))
3285 operands[1] = legitimize_tls_address (operands[1], temp);
3287 operands[1] = legitimize_pic_address (operands[1], temp);
3290 /* Try machine-dependent ways of modifying an illegitimate address X
3291 to be legitimate. If we find one, return the new, valid address.
3293 OLDX is the address as it was before break_out_memory_refs was called.
3294 In some cases it is useful to look at this to decide what needs to be done.
3296 MODE is the mode of the operand pointed to by X.
3298 When -fpic is used, special handling is needed for symbolic references.
3299 See comments by legitimize_pic_address for details. */
3302 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
3303 enum machine_mode mode ATTRIBUTE_UNUSED)
3305 rtx constant_term = const0_rtx;
3307 if (TLS_SYMBOLIC_CONST (x))
3309 x = legitimize_tls_address (x, 0);
3311 if (legitimate_address_p (mode, x, FALSE))
3316 if (SYMBOLIC_CONST (x)
3317 || (GET_CODE (x) == PLUS
3318 && (SYMBOLIC_CONST (XEXP (x, 0))
3319 || SYMBOLIC_CONST (XEXP (x, 1)))))
3320 x = legitimize_pic_address (x, 0);
3322 if (legitimate_address_p (mode, x, FALSE))
3326 x = eliminate_constant_term (x, &constant_term);
3328 /* Optimize loading of large displacements by splitting them
3329 into the multiple of 4K and the rest; this allows the
3330 former to be CSE'd if possible.
3332 Don't do this if the displacement is added to a register
3333 pointing into the stack frame, as the offsets will
3334 change later anyway. */
3336 if (GET_CODE (constant_term) == CONST_INT
3337 && !TARGET_LONG_DISPLACEMENT
3338 && !DISP_IN_RANGE (INTVAL (constant_term))
3339 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3341 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3342 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3344 rtx temp = gen_reg_rtx (Pmode);
3345 rtx val = force_operand (GEN_INT (upper), temp);
3347 emit_move_insn (temp, val);
3349 x = gen_rtx_PLUS (Pmode, x, temp);
3350 constant_term = GEN_INT (lower);
3353 if (GET_CODE (x) == PLUS)
3355 if (GET_CODE (XEXP (x, 0)) == REG)
3357 register rtx temp = gen_reg_rtx (Pmode);
3358 register rtx val = force_operand (XEXP (x, 1), temp);
3360 emit_move_insn (temp, val);
3362 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3365 else if (GET_CODE (XEXP (x, 1)) == REG)
3367 register rtx temp = gen_reg_rtx (Pmode);
3368 register rtx val = force_operand (XEXP (x, 0), temp);
3370 emit_move_insn (temp, val);
3372 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3376 if (constant_term != const0_rtx)
3377 x = gen_rtx_PLUS (Pmode, x, constant_term);
3382 /* Try a machine-dependent way of reloading an illegitimate address AD
3383 operand. If we find one, push the reload and and return the new address.
3385 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3386 and TYPE is the reload type of the current reload. */
3389 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3390 int opnum, int type)
3392 if (!optimize || TARGET_LONG_DISPLACEMENT)
3395 if (GET_CODE (ad) == PLUS)
3397 rtx tem = simplify_binary_operation (PLUS, Pmode,
3398 XEXP (ad, 0), XEXP (ad, 1));
3403 if (GET_CODE (ad) == PLUS
3404 && GET_CODE (XEXP (ad, 0)) == REG
3405 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3406 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3408 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3409 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3412 cst = GEN_INT (upper);
3413 if (!legitimate_reload_constant_p (cst))
3414 cst = force_const_mem (Pmode, cst);
3416 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3417 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3419 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3420 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3421 opnum, (enum reload_type) type);
3428 /* Emit code to move LEN bytes from DST to SRC. */
3431 s390_expand_movmem (rtx dst, rtx src, rtx len)
3433 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3435 if (INTVAL (len) > 0)
3436 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3439 else if (TARGET_MVCLE)
3441 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3446 rtx dst_addr, src_addr, count, blocks, temp;
3447 rtx loop_start_label = gen_label_rtx ();
3448 rtx loop_end_label = gen_label_rtx ();
3449 rtx end_label = gen_label_rtx ();
3450 enum machine_mode mode;
3452 mode = GET_MODE (len);
3453 if (mode == VOIDmode)
3456 dst_addr = gen_reg_rtx (Pmode);
3457 src_addr = gen_reg_rtx (Pmode);
3458 count = gen_reg_rtx (mode);
3459 blocks = gen_reg_rtx (mode);
3461 convert_move (count, len, 1);
3462 emit_cmp_and_jump_insns (count, const0_rtx,
3463 EQ, NULL_RTX, mode, 1, end_label);
3465 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3466 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3467 dst = change_address (dst, VOIDmode, dst_addr);
3468 src = change_address (src, VOIDmode, src_addr);
3470 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3472 emit_move_insn (count, temp);
3474 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3476 emit_move_insn (blocks, temp);
3478 emit_cmp_and_jump_insns (blocks, const0_rtx,
3479 EQ, NULL_RTX, mode, 1, loop_end_label);
3481 emit_label (loop_start_label);
3483 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3484 s390_load_address (dst_addr,
3485 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3486 s390_load_address (src_addr,
3487 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3489 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3491 emit_move_insn (blocks, temp);
3493 emit_cmp_and_jump_insns (blocks, const0_rtx,
3494 EQ, NULL_RTX, mode, 1, loop_end_label);
3496 emit_jump (loop_start_label);
3497 emit_label (loop_end_label);
3499 emit_insn (gen_movmem_short (dst, src,
3500 convert_to_mode (Pmode, count, 1)));
3501 emit_label (end_label);
3505 /* Emit code to clear LEN bytes at DST. */
3508 s390_expand_clrmem (rtx dst, rtx len)
3510 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3512 if (INTVAL (len) > 0)
3513 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3516 else if (TARGET_MVCLE)
3518 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
3523 rtx dst_addr, src_addr, count, blocks, temp;
3524 rtx loop_start_label = gen_label_rtx ();
3525 rtx loop_end_label = gen_label_rtx ();
3526 rtx end_label = gen_label_rtx ();
3527 enum machine_mode mode;
3529 mode = GET_MODE (len);
3530 if (mode == VOIDmode)
3533 dst_addr = gen_reg_rtx (Pmode);
3534 src_addr = gen_reg_rtx (Pmode);
3535 count = gen_reg_rtx (mode);
3536 blocks = gen_reg_rtx (mode);
3538 convert_move (count, len, 1);
3539 emit_cmp_and_jump_insns (count, const0_rtx,
3540 EQ, NULL_RTX, mode, 1, end_label);
3542 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3543 dst = change_address (dst, VOIDmode, dst_addr);
3545 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3547 emit_move_insn (count, temp);
3549 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3551 emit_move_insn (blocks, temp);
3553 emit_cmp_and_jump_insns (blocks, const0_rtx,
3554 EQ, NULL_RTX, mode, 1, loop_end_label);
3556 emit_label (loop_start_label);
3558 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3559 s390_load_address (dst_addr,
3560 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3562 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3564 emit_move_insn (blocks, temp);
3566 emit_cmp_and_jump_insns (blocks, const0_rtx,
3567 EQ, NULL_RTX, mode, 1, loop_end_label);
3569 emit_jump (loop_start_label);
3570 emit_label (loop_end_label);
3572 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3573 emit_label (end_label);
3577 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3578 and return the result in TARGET. */
3581 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3583 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3584 rtx result = gen_rtx_UNSPEC (SImode, gen_rtvec (1, ccreg), UNSPEC_CMPINT);
3586 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3588 if (INTVAL (len) > 0)
3590 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3591 emit_move_insn (target, result);
3594 emit_move_insn (target, const0_rtx);
3597 else /* if (TARGET_MVCLE) */
3599 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3600 emit_move_insn (target, result);
3604 /* Deactivate for now as profile code cannot cope with
3605 CC being live across basic block boundaries. */
3608 rtx addr0, addr1, count, blocks, temp;
3609 rtx loop_start_label = gen_label_rtx ();
3610 rtx loop_end_label = gen_label_rtx ();
3611 rtx end_label = gen_label_rtx ();
3612 enum machine_mode mode;
3614 mode = GET_MODE (len);
3615 if (mode == VOIDmode)
3618 addr0 = gen_reg_rtx (Pmode);
3619 addr1 = gen_reg_rtx (Pmode);
3620 count = gen_reg_rtx (mode);
3621 blocks = gen_reg_rtx (mode);
3623 convert_move (count, len, 1);
3624 emit_cmp_and_jump_insns (count, const0_rtx,
3625 EQ, NULL_RTX, mode, 1, end_label);
3627 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3628 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3629 op0 = change_address (op0, VOIDmode, addr0);
3630 op1 = change_address (op1, VOIDmode, addr1);
3632 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3634 emit_move_insn (count, temp);
3636 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3638 emit_move_insn (blocks, temp);
3640 emit_cmp_and_jump_insns (blocks, const0_rtx,
3641 EQ, NULL_RTX, mode, 1, loop_end_label);
3643 emit_label (loop_start_label);
3645 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3646 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3647 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3648 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3649 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3650 emit_jump_insn (temp);
3652 s390_load_address (addr0,
3653 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3654 s390_load_address (addr1,
3655 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3657 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3659 emit_move_insn (blocks, temp);
3661 emit_cmp_and_jump_insns (blocks, const0_rtx,
3662 EQ, NULL_RTX, mode, 1, loop_end_label);
3664 emit_jump (loop_start_label);
3665 emit_label (loop_end_label);
3667 emit_insn (gen_cmpmem_short (op0, op1,
3668 convert_to_mode (Pmode, count, 1)));
3669 emit_label (end_label);
3671 emit_move_insn (target, result);
3677 /* Expand conditional increment or decrement using alc/slb instructions.
3678 Should generate code setting DST to either SRC or SRC + INCREMENT,
3679 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3680 Returns true if successful, false otherwise. */
3683 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3684 rtx dst, rtx src, rtx increment)
3686 enum machine_mode cmp_mode;
3687 enum machine_mode cc_mode;
3692 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3693 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3695 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3696 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3701 /* Try ADD LOGICAL WITH CARRY. */
3702 if (increment == const1_rtx)
3704 /* Determine CC mode to use. */
3705 if (cmp_code == EQ || cmp_code == NE)
3707 if (cmp_op1 != const0_rtx)
3709 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3710 NULL_RTX, 0, OPTAB_WIDEN);
3711 cmp_op1 = const0_rtx;
3714 cmp_code = cmp_code == EQ ? LEU : GTU;
3717 if (cmp_code == LTU || cmp_code == LEU)
3722 cmp_code = swap_condition (cmp_code);
3739 /* Emit comparison instruction pattern. */
3740 if (!register_operand (cmp_op0, cmp_mode))
3741 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3743 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3744 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3745 /* We use insn_invalid_p here to add clobbers if required. */
3746 if (insn_invalid_p (emit_insn (insn)))
3749 /* Emit ALC instruction pattern. */
3750 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3751 gen_rtx_REG (cc_mode, CC_REGNUM),
3754 if (src != const0_rtx)
3756 if (!register_operand (src, GET_MODE (dst)))
3757 src = force_reg (GET_MODE (dst), src);
3759 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3760 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3763 p = rtvec_alloc (2);
3765 gen_rtx_SET (VOIDmode, dst, op_res);
3767 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3768 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3773 /* Try SUBTRACT LOGICAL WITH BORROW. */
3774 if (increment == constm1_rtx)
3776 /* Determine CC mode to use. */
3777 if (cmp_code == EQ || cmp_code == NE)
3779 if (cmp_op1 != const0_rtx)
3781 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3782 NULL_RTX, 0, OPTAB_WIDEN);
3783 cmp_op1 = const0_rtx;
3786 cmp_code = cmp_code == EQ ? LEU : GTU;
3789 if (cmp_code == GTU || cmp_code == GEU)
3794 cmp_code = swap_condition (cmp_code);
3811 /* Emit comparison instruction pattern. */
3812 if (!register_operand (cmp_op0, cmp_mode))
3813 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3815 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3816 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3817 /* We use insn_invalid_p here to add clobbers if required. */
3818 if (insn_invalid_p (emit_insn (insn)))
3821 /* Emit SLB instruction pattern. */
3822 if (!register_operand (src, GET_MODE (dst)))
3823 src = force_reg (GET_MODE (dst), src);
3825 op_res = gen_rtx_MINUS (GET_MODE (dst),
3826 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3827 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3828 gen_rtx_REG (cc_mode, CC_REGNUM),
3830 p = rtvec_alloc (2);
3832 gen_rtx_SET (VOIDmode, dst, op_res);
3834 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3835 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3844 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3845 We need to emit DTP-relative relocations. */
3848 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3853 fputs ("\t.long\t", file);
3856 fputs ("\t.quad\t", file);
3861 output_addr_const (file, x);
3862 fputs ("@DTPOFF", file);
3865 /* In the name of slightly smaller debug output, and to cater to
3866 general assembler losage, recognize various UNSPEC sequences
3867 and turn them back into a direct symbol reference. */
3870 s390_delegitimize_address (rtx orig_x)
3874 if (GET_CODE (x) != MEM)
3878 if (GET_CODE (x) == PLUS
3879 && GET_CODE (XEXP (x, 1)) == CONST
3880 && GET_CODE (XEXP (x, 0)) == REG
3881 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3883 y = XEXP (XEXP (x, 1), 0);
3884 if (GET_CODE (y) == UNSPEC
3885 && XINT (y, 1) == UNSPEC_GOT)
3886 return XVECEXP (y, 0, 0);
3890 if (GET_CODE (x) == CONST)
3893 if (GET_CODE (y) == UNSPEC
3894 && XINT (y, 1) == UNSPEC_GOTENT)
3895 return XVECEXP (y, 0, 0);
3902 /* Output shift count operand OP to stdio stream FILE. */
3905 print_shift_count_operand (FILE *file, rtx op)
3907 HOST_WIDE_INT offset = 0;
3909 /* We can have an integer constant, an address register,
3910 or a sum of the two. */
3911 if (GET_CODE (op) == CONST_INT)
3913 offset = INTVAL (op);
3916 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3918 offset = INTVAL (XEXP (op, 1));
3921 while (op && GET_CODE (op) == SUBREG)
3922 op = SUBREG_REG (op);
3925 if (op && (GET_CODE (op) != REG
3926 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3927 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3930 /* Shift counts are truncated to the low six bits anyway. */
3931 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3933 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3936 /* Locate some local-dynamic symbol still in use by this function
3937 so that we can print its name in local-dynamic base patterns. */
3940 get_some_local_dynamic_name (void)
3944 if (cfun->machine->some_ld_name)
3945 return cfun->machine->some_ld_name;
3947 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3949 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3950 return cfun->machine->some_ld_name;
3956 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3960 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3962 x = get_pool_constant (x);
3963 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3966 if (GET_CODE (x) == SYMBOL_REF
3967 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3969 cfun->machine->some_ld_name = XSTR (x, 0);
3976 /* Output machine-dependent UNSPECs occurring in address constant X
3977 in assembler syntax to stdio stream FILE. Returns true if the
3978 constant X could be recognized, false otherwise. */
3981 s390_output_addr_const_extra (FILE *file, rtx x)
3983 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3984 switch (XINT (x, 1))
3987 output_addr_const (file, XVECEXP (x, 0, 0));
3988 fprintf (file, "@GOTENT");
3991 output_addr_const (file, XVECEXP (x, 0, 0));
3992 fprintf (file, "@GOT");
3995 output_addr_const (file, XVECEXP (x, 0, 0));
3996 fprintf (file, "@GOTOFF");
3999 output_addr_const (file, XVECEXP (x, 0, 0));
4000 fprintf (file, "@PLT");
4003 output_addr_const (file, XVECEXP (x, 0, 0));
4004 fprintf (file, "@PLTOFF");
4007 output_addr_const (file, XVECEXP (x, 0, 0));
4008 fprintf (file, "@TLSGD");
4011 assemble_name (file, get_some_local_dynamic_name ());
4012 fprintf (file, "@TLSLDM");
4015 output_addr_const (file, XVECEXP (x, 0, 0));
4016 fprintf (file, "@DTPOFF");
4019 output_addr_const (file, XVECEXP (x, 0, 0));
4020 fprintf (file, "@NTPOFF");
4022 case UNSPEC_GOTNTPOFF:
4023 output_addr_const (file, XVECEXP (x, 0, 0));
4024 fprintf (file, "@GOTNTPOFF");
4026 case UNSPEC_INDNTPOFF:
4027 output_addr_const (file, XVECEXP (x, 0, 0));
4028 fprintf (file, "@INDNTPOFF");
4035 /* Output address operand ADDR in assembler syntax to
4036 stdio stream FILE. */
4039 print_operand_address (FILE *file, rtx addr)
4041 struct s390_address ad;
4043 if (!s390_decompose_address (addr, &ad)
4044 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4045 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4046 output_operand_lossage ("Cannot decompose address.");
4049 output_addr_const (file, ad.disp);
4051 fprintf (file, "0");
4053 if (ad.base && ad.indx)
4054 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4055 reg_names[REGNO (ad.base)]);
4057 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4060 /* Output operand X in assembler syntax to stdio stream FILE.
4061 CODE specified the format flag. The following format flags
4064 'C': print opcode suffix for branch condition.
4065 'D': print opcode suffix for inverse branch condition.
4066 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4067 'O': print only the displacement of a memory reference.
4068 'R': print only the base register of a memory reference.
4069 'S': print S-type memory reference (base+displacement).
4070 'N': print the second word of a DImode operand.
4071 'M': print the second word of a TImode operand.
4072 'Y': print shift count operand.
4074 'b': print integer X as if it's an unsigned byte.
4075 'x': print integer X as if it's an unsigned word.
4076 'h': print integer X as if it's a signed word.
4077 'i': print the first nonzero HImode part of X.
4078 'j': print the first HImode part unequal to 0xffff of X. */
4081 print_operand (FILE *file, rtx x, int code)
4086 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4090 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4094 if (GET_CODE (x) == SYMBOL_REF)
4096 fprintf (file, "%s", ":tls_load:");
4097 output_addr_const (file, x);
4099 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4101 fprintf (file, "%s", ":tls_gdcall:");
4102 output_addr_const (file, XVECEXP (x, 0, 0));
4104 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4106 fprintf (file, "%s", ":tls_ldcall:");
4107 assemble_name (file, get_some_local_dynamic_name ());
4115 struct s390_address ad;
4117 if (GET_CODE (x) != MEM
4118 || !s390_decompose_address (XEXP (x, 0), &ad)
4119 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4124 output_addr_const (file, ad.disp);
4126 fprintf (file, "0");
4132 struct s390_address ad;
4134 if (GET_CODE (x) != MEM
4135 || !s390_decompose_address (XEXP (x, 0), &ad)
4136 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4141 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4143 fprintf (file, "0");
4149 struct s390_address ad;
4151 if (GET_CODE (x) != MEM
4152 || !s390_decompose_address (XEXP (x, 0), &ad)
4153 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4158 output_addr_const (file, ad.disp);
4160 fprintf (file, "0");
4163 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4168 if (GET_CODE (x) == REG)
4169 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4170 else if (GET_CODE (x) == MEM)
4171 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4177 if (GET_CODE (x) == REG)
4178 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4179 else if (GET_CODE (x) == MEM)
4180 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4186 print_shift_count_operand (file, x);
4190 switch (GET_CODE (x))
4193 fprintf (file, "%s", reg_names[REGNO (x)]);
4197 output_address (XEXP (x, 0));
4204 output_addr_const (file, x);
4209 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4210 else if (code == 'x')
4211 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4212 else if (code == 'h')
4213 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4214 else if (code == 'i')
4215 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4216 s390_extract_part (x, HImode, 0));
4217 else if (code == 'j')
4218 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4219 s390_extract_part (x, HImode, -1));
4221 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4225 if (GET_MODE (x) != VOIDmode)
4228 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4229 else if (code == 'x')
4230 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4231 else if (code == 'h')
4232 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4238 fatal_insn ("UNKNOWN in print_operand !?", x);
4243 /* Target hook for assembling integer objects. We need to define it
4244 here to work a round a bug in some versions of GAS, which couldn't
4245 handle values smaller than INT_MIN when printed in decimal. */
4248 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4250 if (size == 8 && aligned_p
4251 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4253 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4257 return default_assemble_integer (x, size, aligned_p);
4260 /* Returns true if register REGNO is used for forming
4261 a memory address in expression X. */
4264 reg_used_in_mem_p (int regno, rtx x)
4266 enum rtx_code code = GET_CODE (x);
4272 if (refers_to_regno_p (regno, regno+1,
4276 else if (code == SET
4277 && GET_CODE (SET_DEST (x)) == PC)
4279 if (refers_to_regno_p (regno, regno+1,
4284 fmt = GET_RTX_FORMAT (code);
4285 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4288 && reg_used_in_mem_p (regno, XEXP (x, i)))
4291 else if (fmt[i] == 'E')
4292 for (j = 0; j < XVECLEN (x, i); j++)
4293 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4299 /* Returns true if expression DEP_RTX sets an address register
4300 used by instruction INSN to address memory. */
4303 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4307 if (GET_CODE (dep_rtx) == INSN)
4308 dep_rtx = PATTERN (dep_rtx);
4310 if (GET_CODE (dep_rtx) == SET)
4312 target = SET_DEST (dep_rtx);
4313 if (GET_CODE (target) == STRICT_LOW_PART)
4314 target = XEXP (target, 0);
4315 while (GET_CODE (target) == SUBREG)
4316 target = SUBREG_REG (target);
4318 if (GET_CODE (target) == REG)
4320 int regno = REGNO (target);
4322 if (s390_safe_attr_type (insn) == TYPE_LA)
4324 pat = PATTERN (insn);
4325 if (GET_CODE (pat) == PARALLEL)
4327 if (XVECLEN (pat, 0) != 2)
4329 pat = XVECEXP (pat, 0, 0);
4331 if (GET_CODE (pat) == SET)
4332 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4336 else if (get_attr_atype (insn) == ATYPE_AGEN)
4337 return reg_used_in_mem_p (regno, PATTERN (insn));
4343 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4346 s390_agen_dep_p (rtx dep_insn, rtx insn)
4348 rtx dep_rtx = PATTERN (dep_insn);
4351 if (GET_CODE (dep_rtx) == SET
4352 && addr_generation_dependency_p (dep_rtx, insn))
4354 else if (GET_CODE (dep_rtx) == PARALLEL)
4356 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4358 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4365 /* A C statement (sans semicolon) to update the integer scheduling priority
4366 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4367 reduce the priority to execute INSN later. Do not define this macro if
4368 you do not need to adjust the scheduling priorities of insns.
4370 A STD instruction should be scheduled earlier,
4371 in order to use the bypass. */
4374 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4376 if (! INSN_P (insn))
4379 if (s390_tune != PROCESSOR_2084_Z990)
4382 switch (s390_safe_attr_type (insn))
4386 priority = priority << 3;
4390 priority = priority << 1;
4398 /* The number of instructions that can be issued per cycle. */
4401 s390_issue_rate (void)
4403 if (s390_tune == PROCESSOR_2084_Z990)
4409 s390_first_cycle_multipass_dfa_lookahead (void)
4415 /* Split all branches that exceed the maximum distance.
4416 Returns true if this created a new literal pool entry. */
4419 s390_split_branches (void)
4421 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4422 int new_literal = 0;
4423 rtx insn, pat, tmp, target;
4426 /* We need correct insn addresses. */
4428 shorten_branches (get_insns ());
4430 /* Find all branches that exceed 64KB, and split them. */
4432 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4434 if (GET_CODE (insn) != JUMP_INSN)
4437 pat = PATTERN (insn);
4438 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4439 pat = XVECEXP (pat, 0, 0);
4440 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4443 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4445 label = &SET_SRC (pat);
4447 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4449 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4450 label = &XEXP (SET_SRC (pat), 1);
4451 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4452 label = &XEXP (SET_SRC (pat), 2);
4459 if (get_attr_length (insn) <= 4)
4462 /* We are going to use the return register as scratch register,
4463 make sure it will be saved/restored by the prologue/epilogue. */
4464 cfun_frame_layout.save_return_addr_p = 1;
4469 tmp = force_const_mem (Pmode, *label);
4470 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4471 INSN_ADDRESSES_NEW (tmp, -1);
4472 annotate_constant_pool_refs (&PATTERN (tmp));
4479 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4480 UNSPEC_LTREL_OFFSET);
4481 target = gen_rtx_CONST (Pmode, target);
4482 target = force_const_mem (Pmode, target);
4483 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4484 INSN_ADDRESSES_NEW (tmp, -1);
4485 annotate_constant_pool_refs (&PATTERN (tmp));
4487 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4488 cfun->machine->base_reg),
4490 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4493 if (!validate_change (insn, label, target, 0))
4500 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4501 Fix up MEMs as required. */
4504 annotate_constant_pool_refs (rtx *x)
4509 if (GET_CODE (*x) == SYMBOL_REF
4510 && CONSTANT_POOL_ADDRESS_P (*x))
4513 /* Literal pool references can only occur inside a MEM ... */
4514 if (GET_CODE (*x) == MEM)
4516 rtx memref = XEXP (*x, 0);
4518 if (GET_CODE (memref) == SYMBOL_REF
4519 && CONSTANT_POOL_ADDRESS_P (memref))
4521 rtx base = cfun->machine->base_reg;
4522 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4525 *x = replace_equiv_address (*x, addr);
4529 if (GET_CODE (memref) == CONST
4530 && GET_CODE (XEXP (memref, 0)) == PLUS
4531 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4532 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4533 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4535 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4536 rtx sym = XEXP (XEXP (memref, 0), 0);
4537 rtx base = cfun->machine->base_reg;
4538 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4541 *x = replace_equiv_address (*x, plus_constant (addr, off));
4546 /* ... or a load-address type pattern. */
4547 if (GET_CODE (*x) == SET)
4549 rtx addrref = SET_SRC (*x);
4551 if (GET_CODE (addrref) == SYMBOL_REF
4552 && CONSTANT_POOL_ADDRESS_P (addrref))
4554 rtx base = cfun->machine->base_reg;
4555 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4558 SET_SRC (*x) = addr;
4562 if (GET_CODE (addrref) == CONST
4563 && GET_CODE (XEXP (addrref, 0)) == PLUS
4564 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4565 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4566 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4568 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4569 rtx sym = XEXP (XEXP (addrref, 0), 0);
4570 rtx base = cfun->machine->base_reg;
4571 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4574 SET_SRC (*x) = plus_constant (addr, off);
4579 /* Annotate LTREL_BASE as well. */
4580 if (GET_CODE (*x) == UNSPEC
4581 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4583 rtx base = cfun->machine->base_reg;
4584 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4589 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4590 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4594 annotate_constant_pool_refs (&XEXP (*x, i));
4596 else if (fmt[i] == 'E')
4598 for (j = 0; j < XVECLEN (*x, i); j++)
4599 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4605 /* Find an annotated literal pool symbol referenced in RTX X,
4606 and store it at REF. Will abort if X contains references to
4607 more than one such pool symbol; multiple references to the same
4608 symbol are allowed, however.
4610 The rtx pointed to by REF must be initialized to NULL_RTX
4611 by the caller before calling this routine. */
4614 find_constant_pool_ref (rtx x, rtx *ref)
4619 /* Ignore LTREL_BASE references. */
4620 if (GET_CODE (x) == UNSPEC
4621 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4623 /* Likewise POOL_ENTRY insns. */
4624 if (GET_CODE (x) == UNSPEC_VOLATILE
4625 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4628 if (GET_CODE (x) == SYMBOL_REF
4629 && CONSTANT_POOL_ADDRESS_P (x))
4632 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4634 rtx sym = XVECEXP (x, 0, 0);
4635 if (GET_CODE (sym) != SYMBOL_REF
4636 || !CONSTANT_POOL_ADDRESS_P (sym))
4639 if (*ref == NULL_RTX)
4641 else if (*ref != sym)
4647 fmt = GET_RTX_FORMAT (GET_CODE (x));
4648 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4652 find_constant_pool_ref (XEXP (x, i), ref);
4654 else if (fmt[i] == 'E')
4656 for (j = 0; j < XVECLEN (x, i); j++)
4657 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4662 /* Replace every reference to the annotated literal pool
4663 symbol REF in X by its base plus OFFSET. */
4666 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4674 if (GET_CODE (*x) == UNSPEC
4675 && XINT (*x, 1) == UNSPEC_LTREF
4676 && XVECEXP (*x, 0, 0) == ref)
4678 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4682 if (GET_CODE (*x) == PLUS
4683 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4684 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4685 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4686 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4688 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4689 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4693 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4694 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4698 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4700 else if (fmt[i] == 'E')
4702 for (j = 0; j < XVECLEN (*x, i); j++)
4703 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4708 /* Check whether X contains an UNSPEC_LTREL_BASE.
4709 Return its constant pool symbol if found, NULL_RTX otherwise. */
4712 find_ltrel_base (rtx x)
4717 if (GET_CODE (x) == UNSPEC
4718 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4719 return XVECEXP (x, 0, 0);
4721 fmt = GET_RTX_FORMAT (GET_CODE (x));
4722 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4726 rtx fnd = find_ltrel_base (XEXP (x, i));
4730 else if (fmt[i] == 'E')
4732 for (j = 0; j < XVECLEN (x, i); j++)
4734 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4744 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4747 replace_ltrel_base (rtx *x)
4752 if (GET_CODE (*x) == UNSPEC
4753 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4755 *x = XVECEXP (*x, 0, 1);
4759 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4760 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4764 replace_ltrel_base (&XEXP (*x, i));
4766 else if (fmt[i] == 'E')
4768 for (j = 0; j < XVECLEN (*x, i); j++)
4769 replace_ltrel_base (&XVECEXP (*x, i, j));
4775 /* We keep a list of constants which we have to add to internal
4776 constant tables in the middle of large functions. */
4778 #define NR_C_MODES 7
4779 enum machine_mode constant_modes[NR_C_MODES] =
4790 struct constant *next;
4795 struct constant_pool
4797 struct constant_pool *next;
4802 struct constant *constants[NR_C_MODES];
4803 struct constant *execute;
4808 static struct constant_pool * s390_mainpool_start (void);
4809 static void s390_mainpool_finish (struct constant_pool *);
4810 static void s390_mainpool_cancel (struct constant_pool *);
4812 static struct constant_pool * s390_chunkify_start (void);
4813 static void s390_chunkify_finish (struct constant_pool *);
4814 static void s390_chunkify_cancel (struct constant_pool *);
4816 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4817 static void s390_end_pool (struct constant_pool *, rtx);
4818 static void s390_add_pool_insn (struct constant_pool *, rtx);
4819 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4820 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4821 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4822 static void s390_add_execute (struct constant_pool *, rtx);
4823 static rtx s390_find_execute (struct constant_pool *, rtx);
4824 static rtx s390_execute_label (rtx);
4825 static rtx s390_execute_target (rtx);
4826 static void s390_dump_pool (struct constant_pool *, bool);
4827 static void s390_dump_execute (struct constant_pool *);
4828 static struct constant_pool *s390_alloc_pool (void);
4829 static void s390_free_pool (struct constant_pool *);
4831 /* Create new constant pool covering instructions starting at INSN
4832 and chain it to the end of POOL_LIST. */
4834 static struct constant_pool *
4835 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4837 struct constant_pool *pool, **prev;
4839 pool = s390_alloc_pool ();
4840 pool->first_insn = insn;
4842 for (prev = pool_list; *prev; prev = &(*prev)->next)
4849 /* End range of instructions covered by POOL at INSN and emit
4850 placeholder insn representing the pool. */
4853 s390_end_pool (struct constant_pool *pool, rtx insn)
4855 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4858 insn = get_last_insn ();
4860 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4861 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4864 /* Add INSN to the list of insns covered by POOL. */
4867 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4869 bitmap_set_bit (pool->insns, INSN_UID (insn));
4872 /* Return pool out of POOL_LIST that covers INSN. */
4874 static struct constant_pool *
4875 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4877 struct constant_pool *pool;
4879 for (pool = pool_list; pool; pool = pool->next)
4880 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4886 /* Add constant VAL of mode MODE to the constant pool POOL. */
4889 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4894 for (i = 0; i < NR_C_MODES; i++)
4895 if (constant_modes[i] == mode)
4897 if (i == NR_C_MODES)
4900 for (c = pool->constants[i]; c != NULL; c = c->next)
4901 if (rtx_equal_p (val, c->value))
4906 c = (struct constant *) xmalloc (sizeof *c);
4908 c->label = gen_label_rtx ();
4909 c->next = pool->constants[i];
4910 pool->constants[i] = c;
4911 pool->size += GET_MODE_SIZE (mode);
4915 /* Find constant VAL of mode MODE in the constant pool POOL.
4916 Return an RTX describing the distance from the start of
4917 the pool to the location of the new constant. */
4920 s390_find_constant (struct constant_pool *pool, rtx val,
4921 enum machine_mode mode)
4927 for (i = 0; i < NR_C_MODES; i++)
4928 if (constant_modes[i] == mode)
4930 if (i == NR_C_MODES)
4933 for (c = pool->constants[i]; c != NULL; c = c->next)
4934 if (rtx_equal_p (val, c->value))
4940 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4941 gen_rtx_LABEL_REF (Pmode, pool->label));
4942 offset = gen_rtx_CONST (Pmode, offset);
4946 /* Add execute target for INSN to the constant pool POOL. */
4949 s390_add_execute (struct constant_pool *pool, rtx insn)
4953 for (c = pool->execute; c != NULL; c = c->next)
4954 if (INSN_UID (insn) == INSN_UID (c->value))
4959 rtx label = s390_execute_label (insn);
4962 c = (struct constant *) xmalloc (sizeof *c);
4964 c->label = label == const0_rtx ? gen_label_rtx () : XEXP (label, 0);
4965 c->next = pool->execute;
4967 pool->size += label == const0_rtx ? 6 : 0;
4971 /* Find execute target for INSN in the constant pool POOL.
4972 Return an RTX describing the distance from the start of
4973 the pool to the location of the execute target. */
4976 s390_find_execute (struct constant_pool *pool, rtx insn)
4981 for (c = pool->execute; c != NULL; c = c->next)
4982 if (INSN_UID (insn) == INSN_UID (c->value))
4988 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4989 gen_rtx_LABEL_REF (Pmode, pool->label));
4990 offset = gen_rtx_CONST (Pmode, offset);
4994 /* Check whether INSN is an execute. Return the label_ref to its
4995 execute target template if so, NULL_RTX otherwise. */
4998 s390_execute_label (rtx insn)
5000 if (GET_CODE (insn) == INSN
5001 && GET_CODE (PATTERN (insn)) == PARALLEL
5002 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5003 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5004 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5009 /* For an execute INSN, extract the execute target template. */
5012 s390_execute_target (rtx insn)
5014 rtx pattern = PATTERN (insn);
5015 gcc_assert (s390_execute_label (insn));
5017 if (XVECLEN (pattern, 0) == 2)
5019 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5023 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5026 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5027 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5029 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5035 /* Indicate that INSN cannot be duplicated. This is the case for
5036 execute insns that carry a unique label. */
5039 s390_cannot_copy_insn_p (rtx insn)
5041 rtx label = s390_execute_label (insn);
5042 return label && label != const0_rtx;
5045 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5046 do not emit the pool base label. */
5049 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5052 rtx insn = pool->pool_insn;
5055 /* Switch to rodata section. */
5056 if (TARGET_CPU_ZARCH)
5058 insn = emit_insn_after (gen_pool_section_start (), insn);
5059 INSN_ADDRESSES_NEW (insn, -1);
5062 /* Ensure minimum pool alignment. */
5063 if (TARGET_CPU_ZARCH)
5064 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5066 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5067 INSN_ADDRESSES_NEW (insn, -1);
5069 /* Emit pool base label. */
5072 insn = emit_label_after (pool->label, insn);
5073 INSN_ADDRESSES_NEW (insn, -1);
5076 /* Dump constants in descending alignment requirement order,
5077 ensuring proper alignment for every constant. */
5078 for (i = 0; i < NR_C_MODES; i++)
5079 for (c = pool->constants[i]; c; c = c->next)
5081 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5082 rtx value = c->value;
5083 if (GET_CODE (value) == CONST
5084 && GET_CODE (XEXP (value, 0)) == UNSPEC
5085 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5086 && XVECLEN (XEXP (value, 0), 0) == 1)
5088 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5089 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5090 value = gen_rtx_CONST (VOIDmode, value);
5093 insn = emit_label_after (c->label, insn);
5094 INSN_ADDRESSES_NEW (insn, -1);
5096 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5097 gen_rtvec (1, value),
5098 UNSPECV_POOL_ENTRY);
5099 insn = emit_insn_after (value, insn);
5100 INSN_ADDRESSES_NEW (insn, -1);
5103 /* Ensure minimum alignment for instructions. */
5104 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5105 INSN_ADDRESSES_NEW (insn, -1);
5107 /* Output in-pool execute template insns. */
5108 for (c = pool->execute; c; c = c->next)
5110 if (s390_execute_label (c->value) != const0_rtx)
5113 insn = emit_label_after (c->label, insn);
5114 INSN_ADDRESSES_NEW (insn, -1);
5116 insn = emit_insn_after (s390_execute_target (c->value), insn);
5117 INSN_ADDRESSES_NEW (insn, -1);
5120 /* Switch back to previous section. */
5121 if (TARGET_CPU_ZARCH)
5123 insn = emit_insn_after (gen_pool_section_end (), insn);
5124 INSN_ADDRESSES_NEW (insn, -1);
5127 insn = emit_barrier_after (insn);
5128 INSN_ADDRESSES_NEW (insn, -1);
5130 /* Remove placeholder insn. */
5131 remove_insn (pool->pool_insn);
5133 /* Output out-of-pool execute template isns. */
5134 s390_dump_execute (pool);
5137 /* Dump out the out-of-pool execute template insns in POOL
5138 at the end of the instruction stream. */
5141 s390_dump_execute (struct constant_pool *pool)
5146 for (c = pool->execute; c; c = c->next)
5148 if (s390_execute_label (c->value) == const0_rtx)
5151 insn = emit_label (c->label);
5152 INSN_ADDRESSES_NEW (insn, -1);
5154 insn = emit_insn (s390_execute_target (c->value));
5155 INSN_ADDRESSES_NEW (insn, -1);
5159 /* Allocate new constant_pool structure. */
5161 static struct constant_pool *
5162 s390_alloc_pool (void)
5164 struct constant_pool *pool;
5167 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5169 for (i = 0; i < NR_C_MODES; i++)
5170 pool->constants[i] = NULL;
5172 pool->execute = NULL;
5173 pool->label = gen_label_rtx ();
5174 pool->first_insn = NULL_RTX;
5175 pool->pool_insn = NULL_RTX;
5176 pool->insns = BITMAP_XMALLOC ();
5182 /* Free all memory used by POOL. */
5185 s390_free_pool (struct constant_pool *pool)
5187 struct constant *c, *next;
5190 for (i = 0; i < NR_C_MODES; i++)
5191 for (c = pool->constants[i]; c; c = next)
5197 for (c = pool->execute; c; c = next)
5203 BITMAP_XFREE (pool->insns);
5208 /* Collect main literal pool. Return NULL on overflow. */
5210 static struct constant_pool *
5211 s390_mainpool_start (void)
5213 struct constant_pool *pool;
5216 pool = s390_alloc_pool ();
5218 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5220 if (GET_CODE (insn) == INSN
5221 && GET_CODE (PATTERN (insn)) == SET
5222 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5223 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5225 if (pool->pool_insn)
5227 pool->pool_insn = insn;
5230 if (s390_execute_label (insn))
5232 s390_add_execute (pool, insn);
5234 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5236 rtx pool_ref = NULL_RTX;
5237 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5240 rtx constant = get_pool_constant (pool_ref);
5241 enum machine_mode mode = get_pool_mode (pool_ref);
5242 s390_add_constant (pool, constant, mode);
5247 if (!pool->pool_insn && pool->size > 0)
5250 if (pool->size >= 4096)
5252 /* We're going to chunkify the pool, so remove the main
5253 pool placeholder insn. */
5254 remove_insn (pool->pool_insn);
5256 s390_free_pool (pool);
5263 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5264 Modify the current function to output the pool constants as well as
5265 the pool register setup instruction. */
5268 s390_mainpool_finish (struct constant_pool *pool)
5270 rtx base_reg = cfun->machine->base_reg;
5273 /* If the pool is empty, we're done. */
5274 if (pool->size == 0)
5276 /* However, we may have out-of-pool execute templates. */
5277 s390_dump_execute (pool);
5279 /* We don't actually need a base register after all. */
5280 cfun->machine->base_reg = NULL_RTX;
5282 if (pool->pool_insn)
5283 remove_insn (pool->pool_insn);
5284 s390_free_pool (pool);
5288 /* We need correct insn addresses. */
5289 shorten_branches (get_insns ());
5291 /* On zSeries, we use a LARL to load the pool register. The pool is
5292 located in the .rodata section, so we emit it after the function. */
5293 if (TARGET_CPU_ZARCH)
5295 insn = gen_main_base_64 (base_reg, pool->label);
5296 insn = emit_insn_after (insn, pool->pool_insn);
5297 INSN_ADDRESSES_NEW (insn, -1);
5298 remove_insn (pool->pool_insn);
5300 insn = get_last_insn ();
5301 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5302 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5304 s390_dump_pool (pool, 0);
5307 /* On S/390, if the total size of the function's code plus literal pool
5308 does not exceed 4096 bytes, we use BASR to set up a function base
5309 pointer, and emit the literal pool at the end of the function. */
5310 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5311 + pool->size + 8 /* alignment slop */ < 4096)
5313 insn = gen_main_base_31_small (base_reg, pool->label);
5314 insn = emit_insn_after (insn, pool->pool_insn);
5315 INSN_ADDRESSES_NEW (insn, -1);
5316 remove_insn (pool->pool_insn);
5318 insn = emit_label_after (pool->label, insn);
5319 INSN_ADDRESSES_NEW (insn, -1);
5321 insn = get_last_insn ();
5322 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5323 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5325 s390_dump_pool (pool, 1);
5328 /* Otherwise, we emit an inline literal pool and use BASR to branch
5329 over it, setting up the pool register at the same time. */
5332 rtx pool_end = gen_label_rtx ();
5334 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5335 insn = emit_insn_after (insn, pool->pool_insn);
5336 INSN_ADDRESSES_NEW (insn, -1);
5337 remove_insn (pool->pool_insn);
5339 insn = emit_label_after (pool->label, insn);
5340 INSN_ADDRESSES_NEW (insn, -1);
5342 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5343 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5345 insn = emit_label_after (pool_end, pool->pool_insn);
5346 INSN_ADDRESSES_NEW (insn, -1);
5348 s390_dump_pool (pool, 1);
5352 /* Replace all literal pool references. */
5354 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5357 replace_ltrel_base (&PATTERN (insn));
5359 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5361 rtx addr, pool_ref = NULL_RTX;
5362 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5365 if (s390_execute_label (insn))
5366 addr = s390_find_execute (pool, insn);
5368 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5369 get_pool_mode (pool_ref));
5371 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5372 INSN_CODE (insn) = -1;
5378 /* Free the pool. */
5379 s390_free_pool (pool);
5382 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5383 We have decided we cannot use this pool, so revert all changes
5384 to the current function that were done by s390_mainpool_start. */
5386 s390_mainpool_cancel (struct constant_pool *pool)
5388 /* We didn't actually change the instruction stream, so simply
5389 free the pool memory. */
5390 s390_free_pool (pool);
5394 /* Chunkify the literal pool. */
5396 #define S390_POOL_CHUNK_MIN 0xc00
5397 #define S390_POOL_CHUNK_MAX 0xe00
5399 static struct constant_pool *
5400 s390_chunkify_start (void)
5402 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5405 rtx pending_ltrel = NULL_RTX;
5408 rtx (*gen_reload_base) (rtx, rtx) =
5409 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5412 /* We need correct insn addresses. */
5414 shorten_branches (get_insns ());
5416 /* Scan all insns and move literals to pool chunks. */
5418 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5420 /* Check for pending LTREL_BASE. */
5423 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5426 if (ltrel_base == pending_ltrel)
5427 pending_ltrel = NULL_RTX;
5433 if (s390_execute_label (insn))
5436 curr_pool = s390_start_pool (&pool_list, insn);
5438 s390_add_execute (curr_pool, insn);
5439 s390_add_pool_insn (curr_pool, insn);
5441 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5443 rtx pool_ref = NULL_RTX;
5444 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5447 rtx constant = get_pool_constant (pool_ref);
5448 enum machine_mode mode = get_pool_mode (pool_ref);
5451 curr_pool = s390_start_pool (&pool_list, insn);
5453 s390_add_constant (curr_pool, constant, mode);
5454 s390_add_pool_insn (curr_pool, insn);
5456 /* Don't split the pool chunk between a LTREL_OFFSET load
5457 and the corresponding LTREL_BASE. */
5458 if (GET_CODE (constant) == CONST
5459 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5460 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5464 pending_ltrel = pool_ref;
5469 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5472 s390_add_pool_insn (curr_pool, insn);
5473 /* An LTREL_BASE must follow within the same basic block. */
5479 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5480 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5483 if (TARGET_CPU_ZARCH)
5485 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5488 s390_end_pool (curr_pool, NULL_RTX);
5493 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5494 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5497 /* We will later have to insert base register reload insns.
5498 Those will have an effect on code size, which we need to
5499 consider here. This calculation makes rather pessimistic
5500 worst-case assumptions. */
5501 if (GET_CODE (insn) == CODE_LABEL)
5504 if (chunk_size < S390_POOL_CHUNK_MIN
5505 && curr_pool->size < S390_POOL_CHUNK_MIN)
5508 /* Pool chunks can only be inserted after BARRIERs ... */
5509 if (GET_CODE (insn) == BARRIER)
5511 s390_end_pool (curr_pool, insn);
5516 /* ... so if we don't find one in time, create one. */
5517 else if ((chunk_size > S390_POOL_CHUNK_MAX
5518 || curr_pool->size > S390_POOL_CHUNK_MAX))
5520 rtx label, jump, barrier;
5522 /* We can insert the barrier only after a 'real' insn. */
5523 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5525 if (get_attr_length (insn) == 0)
5528 /* Don't separate LTREL_BASE from the corresponding
5529 LTREL_OFFSET load. */
5533 label = gen_label_rtx ();
5534 jump = emit_jump_insn_after (gen_jump (label), insn);
5535 barrier = emit_barrier_after (jump);
5536 insn = emit_label_after (label, barrier);
5537 JUMP_LABEL (jump) = label;
5538 LABEL_NUSES (label) = 1;
5540 INSN_ADDRESSES_NEW (jump, -1);
5541 INSN_ADDRESSES_NEW (barrier, -1);
5542 INSN_ADDRESSES_NEW (insn, -1);
5544 s390_end_pool (curr_pool, barrier);
5552 s390_end_pool (curr_pool, NULL_RTX);
5557 /* Find all labels that are branched into
5558 from an insn belonging to a different chunk. */
5560 far_labels = BITMAP_XMALLOC ();
5562 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5564 /* Labels marked with LABEL_PRESERVE_P can be target
5565 of non-local jumps, so we have to mark them.
5566 The same holds for named labels.
5568 Don't do that, however, if it is the label before
5571 if (GET_CODE (insn) == CODE_LABEL
5572 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5574 rtx vec_insn = next_real_insn (insn);
5575 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5576 PATTERN (vec_insn) : NULL_RTX;
5578 || !(GET_CODE (vec_pat) == ADDR_VEC
5579 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5580 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5583 /* If we have a direct jump (conditional or unconditional)
5584 or a casesi jump, check all potential targets. */
5585 else if (GET_CODE (insn) == JUMP_INSN)
5587 rtx pat = PATTERN (insn);
5588 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5589 pat = XVECEXP (pat, 0, 0);
5591 if (GET_CODE (pat) == SET)
5593 rtx label = JUMP_LABEL (insn);
5596 if (s390_find_pool (pool_list, label)
5597 != s390_find_pool (pool_list, insn))
5598 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5601 else if (GET_CODE (pat) == PARALLEL
5602 && XVECLEN (pat, 0) == 2
5603 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5604 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5605 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5607 /* Find the jump table used by this casesi jump. */
5608 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5609 rtx vec_insn = next_real_insn (vec_label);
5610 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5611 PATTERN (vec_insn) : NULL_RTX;
5613 && (GET_CODE (vec_pat) == ADDR_VEC
5614 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5616 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5618 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5620 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5622 if (s390_find_pool (pool_list, label)
5623 != s390_find_pool (pool_list, insn))
5624 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5631 /* Insert base register reload insns before every pool. */
5633 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5635 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5637 rtx insn = curr_pool->first_insn;
5638 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5641 /* Insert base register reload insns at every far label. */
5643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5644 if (GET_CODE (insn) == CODE_LABEL
5645 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5647 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5650 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5652 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5657 BITMAP_XFREE (far_labels);
5660 /* Recompute insn addresses. */
5662 init_insn_lengths ();
5663 shorten_branches (get_insns ());
5668 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5669 After we have decided to use this list, finish implementing
5670 all changes to the current function as required. */
5673 s390_chunkify_finish (struct constant_pool *pool_list)
5675 struct constant_pool *curr_pool = NULL;
5679 /* Replace all literal pool references. */
5681 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5684 replace_ltrel_base (&PATTERN (insn));
5686 curr_pool = s390_find_pool (pool_list, insn);
5690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5692 rtx addr, pool_ref = NULL_RTX;
5693 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5696 if (s390_execute_label (insn))
5697 addr = s390_find_execute (curr_pool, insn);
5699 addr = s390_find_constant (curr_pool,
5700 get_pool_constant (pool_ref),
5701 get_pool_mode (pool_ref));
5703 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5704 INSN_CODE (insn) = -1;
5709 /* Dump out all literal pools. */
5711 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5712 s390_dump_pool (curr_pool, 0);
5714 /* Free pool list. */
5718 struct constant_pool *next = pool_list->next;
5719 s390_free_pool (pool_list);
5724 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5725 We have decided we cannot use this list, so revert all changes
5726 to the current function that were done by s390_chunkify_start. */
5729 s390_chunkify_cancel (struct constant_pool *pool_list)
5731 struct constant_pool *curr_pool = NULL;
5734 /* Remove all pool placeholder insns. */
5736 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5738 /* Did we insert an extra barrier? Remove it. */
5739 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5740 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5741 rtx label = NEXT_INSN (curr_pool->pool_insn);
5743 if (jump && GET_CODE (jump) == JUMP_INSN
5744 && barrier && GET_CODE (barrier) == BARRIER
5745 && label && GET_CODE (label) == CODE_LABEL
5746 && GET_CODE (PATTERN (jump)) == SET
5747 && SET_DEST (PATTERN (jump)) == pc_rtx
5748 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5749 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5752 remove_insn (barrier);
5753 remove_insn (label);
5756 remove_insn (curr_pool->pool_insn);
5759 /* Remove all base register reload insns. */
5761 for (insn = get_insns (); insn; )
5763 rtx next_insn = NEXT_INSN (insn);
5765 if (GET_CODE (insn) == INSN
5766 && GET_CODE (PATTERN (insn)) == SET
5767 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5768 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5774 /* Free pool list. */
5778 struct constant_pool *next = pool_list->next;
5779 s390_free_pool (pool_list);
5785 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5788 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5792 switch (GET_MODE_CLASS (mode))
5795 if (GET_CODE (exp) != CONST_DOUBLE)
5798 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5799 assemble_real (r, mode, align);
5803 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5812 /* Rework the prologue/epilogue to avoid saving/restoring
5813 registers unnecessarily. */
5816 s390_optimize_prologue (void)
5818 rtx insn, new_insn, next_insn;
5820 /* Do a final recompute of the frame-related data. */
5822 s390_update_frame_layout ();
5824 /* If all special registers are in fact used, there's nothing we
5825 can do, so no point in walking the insn list. */
5827 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
5828 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
5829 && (TARGET_CPU_ZARCH
5830 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
5831 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
5834 /* Search for prologue/epilogue insns and replace them. */
5836 for (insn = get_insns (); insn; insn = next_insn)
5838 int first, last, off;
5839 rtx set, base, offset;
5841 next_insn = NEXT_INSN (insn);
5843 if (GET_CODE (insn) != INSN)
5846 if (GET_CODE (PATTERN (insn)) == PARALLEL
5847 && store_multiple_operation (PATTERN (insn), VOIDmode))
5849 set = XVECEXP (PATTERN (insn), 0, 0);
5850 first = REGNO (SET_SRC (set));
5851 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5852 offset = const0_rtx;
5853 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5854 off = INTVAL (offset);
5856 if (GET_CODE (base) != REG || off < 0)
5858 if (REGNO (base) != STACK_POINTER_REGNUM
5859 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5861 if (first > BASE_REGNUM || last < BASE_REGNUM)
5864 if (cfun_frame_layout.first_save_gpr != -1)
5866 new_insn = save_gprs (base,
5867 off + (cfun_frame_layout.first_save_gpr
5868 - first) * UNITS_PER_WORD,
5869 cfun_frame_layout.first_save_gpr,
5870 cfun_frame_layout.last_save_gpr);
5871 new_insn = emit_insn_before (new_insn, insn);
5872 INSN_ADDRESSES_NEW (new_insn, -1);
5879 if (GET_CODE (PATTERN (insn)) == SET
5880 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5881 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5882 || (!TARGET_CPU_ZARCH
5883 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
5884 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5886 set = PATTERN (insn);
5887 first = REGNO (SET_SRC (set));
5888 offset = const0_rtx;
5889 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5890 off = INTVAL (offset);
5892 if (GET_CODE (base) != REG || off < 0)
5894 if (REGNO (base) != STACK_POINTER_REGNUM
5895 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5897 if (cfun_frame_layout.first_save_gpr != -1)
5899 new_insn = save_gprs (base,
5900 off + (cfun_frame_layout.first_save_gpr
5901 - first) * UNITS_PER_WORD,
5902 cfun_frame_layout.first_save_gpr,
5903 cfun_frame_layout.last_save_gpr);
5904 new_insn = emit_insn_before (new_insn, insn);
5905 INSN_ADDRESSES_NEW (new_insn, -1);
5912 if (GET_CODE (PATTERN (insn)) == PARALLEL
5913 && load_multiple_operation (PATTERN (insn), VOIDmode))
5915 set = XVECEXP (PATTERN (insn), 0, 0);
5916 first = REGNO (SET_DEST (set));
5917 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5918 offset = const0_rtx;
5919 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5920 off = INTVAL (offset);
5922 if (GET_CODE (base) != REG || off < 0)
5924 if (REGNO (base) != STACK_POINTER_REGNUM
5925 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5927 if (first > BASE_REGNUM || last < BASE_REGNUM)
5930 if (cfun_frame_layout.first_restore_gpr != -1)
5932 new_insn = restore_gprs (base,
5933 off + (cfun_frame_layout.first_restore_gpr
5934 - first) * UNITS_PER_WORD,
5935 cfun_frame_layout.first_restore_gpr,
5936 cfun_frame_layout.last_restore_gpr);
5937 new_insn = emit_insn_before (new_insn, insn);
5938 INSN_ADDRESSES_NEW (new_insn, -1);
5945 if (GET_CODE (PATTERN (insn)) == SET
5946 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5947 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5948 || (!TARGET_CPU_ZARCH
5949 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
5950 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5952 set = PATTERN (insn);
5953 first = REGNO (SET_DEST (set));
5954 offset = const0_rtx;
5955 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5956 off = INTVAL (offset);
5958 if (GET_CODE (base) != REG || off < 0)
5960 if (REGNO (base) != STACK_POINTER_REGNUM
5961 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5963 if (cfun_frame_layout.first_restore_gpr != -1)
5965 new_insn = restore_gprs (base,
5966 off + (cfun_frame_layout.first_restore_gpr
5967 - first) * UNITS_PER_WORD,
5968 cfun_frame_layout.first_restore_gpr,
5969 cfun_frame_layout.last_restore_gpr);
5970 new_insn = emit_insn_before (new_insn, insn);
5971 INSN_ADDRESSES_NEW (new_insn, -1);
5980 /* Perform machine-dependent processing. */
5985 bool pool_overflow = false;
5987 /* Make sure all splits have been performed; splits after
5988 machine_dependent_reorg might confuse insn length counts. */
5989 split_all_insns_noflow ();
5992 /* Install the main literal pool and the associated base
5993 register load insns.
5995 In addition, there are two problematic situations we need
5998 - the literal pool might be > 4096 bytes in size, so that
5999 some of its elements cannot be directly accessed
6001 - a branch target might be > 64K away from the branch, so that
6002 it is not possible to use a PC-relative instruction.
6004 To fix those, we split the single literal pool into multiple
6005 pool chunks, reloading the pool base register at various
6006 points throughout the function to ensure it always points to
6007 the pool chunk the following code expects, and / or replace
6008 PC-relative branches by absolute branches.
6010 However, the two problems are interdependent: splitting the
6011 literal pool can move a branch further away from its target,
6012 causing the 64K limit to overflow, and on the other hand,
6013 replacing a PC-relative branch by an absolute branch means
6014 we need to put the branch target address into the literal
6015 pool, possibly causing it to overflow.
6017 So, we loop trying to fix up both problems until we manage
6018 to satisfy both conditions at the same time. Note that the
6019 loop is guaranteed to terminate as every pass of the loop
6020 strictly decreases the total number of PC-relative branches
6021 in the function. (This is not completely true as there
6022 might be branch-over-pool insns introduced by chunkify_start.
6023 Those never need to be split however.) */
6027 struct constant_pool *pool = NULL;
6029 /* Collect the literal pool. */
6032 pool = s390_mainpool_start ();
6034 pool_overflow = true;
6037 /* If literal pool overflowed, start to chunkify it. */
6039 pool = s390_chunkify_start ();
6041 /* Split out-of-range branches. If this has created new
6042 literal pool entries, cancel current chunk list and
6043 recompute it. zSeries machines have large branch
6044 instructions, so we never need to split a branch. */
6045 if (!TARGET_CPU_ZARCH && s390_split_branches ())
6048 s390_chunkify_cancel (pool);
6050 s390_mainpool_cancel (pool);
6055 /* If we made it up to here, both conditions are satisfied.
6056 Finish up literal pool related changes. */
6058 s390_chunkify_finish (pool);
6060 s390_mainpool_finish (pool);
6062 /* We're done splitting branches. */
6063 cfun->machine->split_branches_pending_p = false;
6067 s390_optimize_prologue ();
6071 /* Return an RTL expression representing the value of the return address
6072 for the frame COUNT steps up from the current frame. FRAME is the
6073 frame pointer of that frame. */
6076 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6081 /* Without backchain, we fail for all but the current frame. */
6083 if (!TARGET_BACKCHAIN && !TARGET_KERNEL_BACKCHAIN && count > 0)
6086 /* For the current frame, we need to make sure the initial
6087 value of RETURN_REGNUM is actually saved. */
6091 cfun_frame_layout.save_return_addr_p = true;
6092 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6095 if (TARGET_BACKCHAIN)
6096 offset = RETURN_REGNUM * UNITS_PER_WORD;
6098 offset = -2 * UNITS_PER_WORD;
6100 addr = plus_constant (frame, offset);
6101 addr = memory_address (Pmode, addr);
6102 return gen_rtx_MEM (Pmode, addr);
6105 /* Return an RTL expression representing the back chain stored in
6106 the current stack frame. */
6109 s390_back_chain_rtx (void)
6113 gcc_assert (TARGET_BACKCHAIN || TARGET_KERNEL_BACKCHAIN);
6115 if (TARGET_BACKCHAIN)
6116 chain = stack_pointer_rtx;
6118 chain = plus_constant (stack_pointer_rtx,
6119 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6121 chain = gen_rtx_MEM (Pmode, chain);
6125 /* Find first call clobbered register unused in a function.
6126 This could be used as base register in a leaf function
6127 or for holding the return address before epilogue. */
6130 find_unused_clobbered_reg (void)
6133 for (i = 0; i < 6; i++)
6134 if (!regs_ever_live[i])
6139 /* Determine the frame area which actually has to be accessed
6140 in the function epilogue. The values are stored at the
6141 given pointers AREA_BOTTOM (address of the lowest used stack
6142 address) and AREA_TOP (address of the first item which does
6143 not belong to the stack frame). */
6146 s390_frame_area (int *area_bottom, int *area_top)
6154 if (cfun_frame_layout.first_restore_gpr != -1)
6156 b = (cfun_frame_layout.gprs_offset
6157 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6158 t = b + (cfun_frame_layout.last_restore_gpr
6159 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6162 if (TARGET_64BIT && cfun_save_high_fprs_p)
6164 b = MIN (b, cfun_frame_layout.f8_offset);
6165 t = MAX (t, (cfun_frame_layout.f8_offset
6166 + cfun_frame_layout.high_fprs * 8));
6170 for (i = 2; i < 4; i++)
6171 if (cfun_fpr_bit_p (i))
6173 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6174 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6181 /* Fill cfun->machine with info about register usage of current function.
6182 Return in LIVE_REGS which GPRs are currently considered live. */
6185 s390_register_info (int live_regs[])
6189 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6190 cfun_frame_layout.fpr_bitmap = 0;
6191 cfun_frame_layout.high_fprs = 0;
6193 for (i = 24; i < 32; i++)
6194 if (regs_ever_live[i] && !global_regs[i])
6196 cfun_set_fpr_bit (i - 16);
6197 cfun_frame_layout.high_fprs++;
6200 /* Find first and last gpr to be saved. We trust regs_ever_live
6201 data, except that we don't save and restore global registers.
6203 Also, all registers with special meaning to the compiler need
6204 to be handled extra. */
6206 for (i = 0; i < 16; i++)
6207 live_regs[i] = regs_ever_live[i] && !global_regs[i];
6210 live_regs[PIC_OFFSET_TABLE_REGNUM]
6211 = regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6213 live_regs[BASE_REGNUM]
6214 = cfun->machine->base_reg
6215 && REGNO (cfun->machine->base_reg) == BASE_REGNUM;
6217 live_regs[RETURN_REGNUM]
6218 = cfun->machine->split_branches_pending_p
6219 || cfun_frame_layout.save_return_addr_p;
6221 live_regs[STACK_POINTER_REGNUM]
6222 = !current_function_is_leaf
6223 || TARGET_TPF_PROFILING
6224 || cfun_save_high_fprs_p
6225 || get_frame_size () > 0
6226 || current_function_calls_alloca
6227 || current_function_stdarg;
6229 for (i = 6; i < 16; i++)
6232 for (j = 15; j > i; j--)
6238 /* Nothing to save/restore. */
6239 cfun_frame_layout.first_save_gpr = -1;
6240 cfun_frame_layout.first_restore_gpr = -1;
6241 cfun_frame_layout.last_save_gpr = -1;
6242 cfun_frame_layout.last_restore_gpr = -1;
6246 /* Save / Restore from gpr i to j. */
6247 cfun_frame_layout.first_save_gpr = i;
6248 cfun_frame_layout.first_restore_gpr = i;
6249 cfun_frame_layout.last_save_gpr = j;
6250 cfun_frame_layout.last_restore_gpr = j;
6253 if (current_function_stdarg)
6255 /* Varargs functions need to save gprs 2 to 6. */
6256 if (cfun_frame_layout.first_save_gpr == -1
6257 || cfun_frame_layout.first_save_gpr > 2)
6258 cfun_frame_layout.first_save_gpr = 2;
6260 if (cfun_frame_layout.last_save_gpr == -1
6261 || cfun_frame_layout.last_save_gpr < 6)
6262 cfun_frame_layout.last_save_gpr = 6;
6264 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6265 if (TARGET_HARD_FLOAT)
6266 for (i = 0; i < (TARGET_64BIT ? 4 : 2); i++)
6267 cfun_set_fpr_bit (i);
6271 for (i = 2; i < 4; i++)
6272 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6273 cfun_set_fpr_bit (i);
6276 /* Fill cfun->machine with info about frame of current function. */
6279 s390_frame_info (void)
6283 cfun_frame_layout.frame_size = get_frame_size ();
6284 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6285 fatal_error ("Total size of local variables exceeds architecture limit.");
6287 cfun_frame_layout.save_backchain_p = (TARGET_BACKCHAIN
6288 || TARGET_KERNEL_BACKCHAIN);
6290 if (TARGET_BACKCHAIN)
6292 cfun_frame_layout.backchain_offset = 0;
6293 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6294 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6295 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6296 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6299 else if (TARGET_KERNEL_BACKCHAIN)
6301 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6303 cfun_frame_layout.gprs_offset
6304 = (cfun_frame_layout.backchain_offset
6305 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6310 cfun_frame_layout.f4_offset
6311 = (cfun_frame_layout.gprs_offset
6312 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6314 cfun_frame_layout.f0_offset
6315 = (cfun_frame_layout.f4_offset
6316 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6320 /* On 31 bit we have to care about alignment of the
6321 floating point regs to provide fastest access. */
6322 cfun_frame_layout.f0_offset
6323 = ((cfun_frame_layout.gprs_offset
6324 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6325 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6327 cfun_frame_layout.f4_offset
6328 = (cfun_frame_layout.f0_offset
6329 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6332 else /* no backchain */
6334 cfun_frame_layout.f4_offset
6335 = (STACK_POINTER_OFFSET
6336 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6338 cfun_frame_layout.f0_offset
6339 = (cfun_frame_layout.f4_offset
6340 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6342 cfun_frame_layout.gprs_offset
6343 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6346 if (current_function_is_leaf
6347 && !TARGET_TPF_PROFILING
6348 && cfun_frame_layout.frame_size == 0
6349 && !cfun_save_high_fprs_p
6350 && !current_function_calls_alloca
6351 && !current_function_stdarg)
6354 if (TARGET_BACKCHAIN)
6355 cfun_frame_layout.frame_size += (STARTING_FRAME_OFFSET
6356 + cfun_frame_layout.high_fprs * 8);
6359 cfun_frame_layout.frame_size += (cfun_frame_layout.save_backchain_p
6362 /* No alignment trouble here because f8-f15 are only saved under
6364 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6365 cfun_frame_layout.f4_offset),
6366 cfun_frame_layout.gprs_offset)
6367 - cfun_frame_layout.high_fprs * 8);
6369 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6371 for (i = 0; i < 8; i++)
6372 if (cfun_fpr_bit_p (i))
6373 cfun_frame_layout.frame_size += 8;
6375 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6377 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6378 the frame size to sustain 8 byte alignment of stack frames. */
6379 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6380 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6381 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6383 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6387 /* Generate frame layout. Fills in register and frame data for the current
6388 function in cfun->machine. This routine can be called multiple times;
6389 it will re-do the complete frame layout every time. */
6392 s390_init_frame_layout (void)
6394 HOST_WIDE_INT frame_size;
6398 /* If return address register is explicitly used, we need to save it. */
6399 if (regs_ever_live[RETURN_REGNUM]
6400 || !current_function_is_leaf
6401 || TARGET_TPF_PROFILING
6402 || current_function_stdarg
6403 || current_function_calls_eh_return)
6404 cfun_frame_layout.save_return_addr_p = true;
6406 /* On S/390 machines, we may need to perform branch splitting, which
6407 will require both base and return address register. We have no
6408 choice but to assume we're going to need them until right at the
6409 end of the machine dependent reorg phase. */
6410 if (!TARGET_CPU_ZARCH)
6411 cfun->machine->split_branches_pending_p = true;
6415 frame_size = cfun_frame_layout.frame_size;
6417 /* Try to predict whether we'll need the base register. */
6418 base_used = cfun->machine->split_branches_pending_p
6419 || current_function_uses_const_pool
6420 || (!DISP_IN_RANGE (-frame_size)
6421 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size, 'K', "K"));
6423 /* Decide which register to use as literal pool base. In small
6424 leaf functions, try to use an unused call-clobbered register
6425 as base register to avoid save/restore overhead. */
6427 cfun->machine->base_reg = NULL_RTX;
6428 else if (current_function_is_leaf && !regs_ever_live[5])
6429 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6431 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6433 s390_register_info (live_regs);
6436 while (frame_size != cfun_frame_layout.frame_size);
6439 /* Update frame layout. Recompute actual register save data based on
6440 current info and update regs_ever_live for the special registers.
6441 May be called multiple times, but may never cause *more* registers
6442 to be saved than s390_init_frame_layout allocated room for. */
6445 s390_update_frame_layout (void)
6449 s390_register_info (live_regs);
6451 regs_ever_live[BASE_REGNUM] = live_regs[BASE_REGNUM];
6452 regs_ever_live[RETURN_REGNUM] = live_regs[RETURN_REGNUM];
6453 regs_ever_live[STACK_POINTER_REGNUM] = live_regs[STACK_POINTER_REGNUM];
6455 if (cfun->machine->base_reg)
6456 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6459 /* Return true if register FROM can be eliminated via register TO. */
6462 s390_can_eliminate (int from, int to)
6464 gcc_assert (to == STACK_POINTER_REGNUM
6465 || to == HARD_FRAME_POINTER_REGNUM);
6467 gcc_assert (from == FRAME_POINTER_REGNUM
6468 || from == ARG_POINTER_REGNUM
6469 || from == RETURN_ADDRESS_POINTER_REGNUM);
6471 /* Make sure we actually saved the return address. */
6472 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6473 if (!current_function_calls_eh_return
6474 && !current_function_stdarg
6475 && !cfun_frame_layout.save_return_addr_p)
6481 /* Return offset between register FROM and TO initially after prolog. */
6484 s390_initial_elimination_offset (int from, int to)
6486 HOST_WIDE_INT offset;
6489 /* ??? Why are we called for non-eliminable pairs? */
6490 if (!s390_can_eliminate (from, to))
6495 case FRAME_POINTER_REGNUM:
6499 case ARG_POINTER_REGNUM:
6500 s390_init_frame_layout ();
6501 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6504 case RETURN_ADDRESS_POINTER_REGNUM:
6505 s390_init_frame_layout ();
6506 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6507 gcc_assert (index >= 0);
6508 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6509 offset += index * UNITS_PER_WORD;
6519 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6520 to register BASE. Return generated insn. */
6523 save_fpr (rtx base, int offset, int regnum)
6526 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6527 set_mem_alias_set (addr, s390_sr_alias_set);
6529 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6532 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6533 to register BASE. Return generated insn. */
6536 restore_fpr (rtx base, int offset, int regnum)
6539 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6540 set_mem_alias_set (addr, s390_sr_alias_set);
6542 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6545 /* Generate insn to save registers FIRST to LAST into
6546 the register save area located at offset OFFSET
6547 relative to register BASE. */
6550 save_gprs (rtx base, int offset, int first, int last)
6552 rtx addr, insn, note;
6555 addr = plus_constant (base, offset);
6556 addr = gen_rtx_MEM (Pmode, addr);
6557 set_mem_alias_set (addr, s390_sr_alias_set);
6559 /* Special-case single register. */
6563 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6565 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6567 RTX_FRAME_RELATED_P (insn) = 1;
6572 insn = gen_store_multiple (addr,
6573 gen_rtx_REG (Pmode, first),
6574 GEN_INT (last - first + 1));
6577 /* We need to set the FRAME_RELATED flag on all SETs
6578 inside the store-multiple pattern.
6580 However, we must not emit DWARF records for registers 2..5
6581 if they are stored for use by variable arguments ...
6583 ??? Unfortunately, it is not enough to simply not the the
6584 FRAME_RELATED flags for those SETs, because the first SET
6585 of the PARALLEL is always treated as if it had the flag
6586 set, even if it does not. Therefore we emit a new pattern
6587 without those registers as REG_FRAME_RELATED_EXPR note. */
6591 rtx pat = PATTERN (insn);
6593 for (i = 0; i < XVECLEN (pat, 0); i++)
6594 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6595 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6597 RTX_FRAME_RELATED_P (insn) = 1;
6601 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
6602 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
6603 gen_rtx_REG (Pmode, 6),
6604 GEN_INT (last - 6 + 1));
6605 note = PATTERN (note);
6608 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6609 note, REG_NOTES (insn));
6611 for (i = 0; i < XVECLEN (note, 0); i++)
6612 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6613 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6615 RTX_FRAME_RELATED_P (insn) = 1;
6621 /* Generate insn to restore registers FIRST to LAST from
6622 the register save area located at offset OFFSET
6623 relative to register BASE. */
6626 restore_gprs (rtx base, int offset, int first, int last)
6630 addr = plus_constant (base, offset);
6631 addr = gen_rtx_MEM (Pmode, addr);
6632 set_mem_alias_set (addr, s390_sr_alias_set);
6634 /* Special-case single register. */
6638 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6640 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6645 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6647 GEN_INT (last - first + 1));
6651 /* Return insn sequence to load the GOT register. */
6653 static GTY(()) rtx got_symbol;
6655 s390_load_got (void)
6661 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6662 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6667 if (TARGET_CPU_ZARCH)
6669 emit_move_insn (pic_offset_table_rtx, got_symbol);
6675 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
6676 UNSPEC_LTREL_OFFSET);
6677 offset = gen_rtx_CONST (Pmode, offset);
6678 offset = force_const_mem (Pmode, offset);
6680 emit_move_insn (pic_offset_table_rtx, offset);
6682 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
6684 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6686 emit_move_insn (pic_offset_table_rtx, offset);
6689 insns = get_insns ();
6694 /* Expand the prologue into a bunch of separate insns. */
6697 s390_emit_prologue (void)
6705 /* Complete frame layout. */
6707 s390_update_frame_layout ();
6709 /* Annotate all constant pool references to let the scheduler know
6710 they implicitly use the base register. */
6712 push_topmost_sequence ();
6714 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6716 annotate_constant_pool_refs (&PATTERN (insn));
6718 pop_topmost_sequence ();
6720 /* Choose best register to use for temp use within prologue.
6721 See below for why TPF must use the register 1. */
6723 if (!current_function_is_leaf && !TARGET_TPF_PROFILING)
6724 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6726 temp_reg = gen_rtx_REG (Pmode, 1);
6728 /* Save call saved gprs. */
6729 if (cfun_frame_layout.first_save_gpr != -1)
6731 insn = save_gprs (stack_pointer_rtx,
6732 cfun_frame_layout.gprs_offset,
6733 cfun_frame_layout.first_save_gpr,
6734 cfun_frame_layout.last_save_gpr);
6738 /* Dummy insn to mark literal pool slot. */
6740 if (cfun->machine->base_reg)
6741 emit_insn (gen_main_pool (cfun->machine->base_reg));
6743 offset = cfun_frame_layout.f0_offset;
6745 /* Save f0 and f2. */
6746 for (i = 0; i < 2; i++)
6748 if (cfun_fpr_bit_p (i))
6750 save_fpr (stack_pointer_rtx, offset, i + 16);
6753 else if (TARGET_BACKCHAIN)
6757 /* Save f4 and f6. */
6758 offset = cfun_frame_layout.f4_offset;
6759 for (i = 2; i < 4; i++)
6761 if (cfun_fpr_bit_p (i))
6763 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6766 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6767 therefore are not frame related. */
6768 if (!call_really_used_regs[i + 16])
6769 RTX_FRAME_RELATED_P (insn) = 1;
6771 else if (TARGET_BACKCHAIN)
6775 if (!TARGET_BACKCHAIN
6776 && cfun_save_high_fprs_p
6777 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6779 offset = (cfun_frame_layout.f8_offset
6780 + (cfun_frame_layout.high_fprs - 1) * 8);
6782 for (i = 15; i > 7 && offset >= 0; i--)
6783 if (cfun_fpr_bit_p (i))
6785 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6787 RTX_FRAME_RELATED_P (insn) = 1;
6790 if (offset >= cfun_frame_layout.f8_offset)
6794 if (TARGET_BACKCHAIN)
6795 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
6797 /* Decrement stack pointer. */
6799 if (cfun_frame_layout.frame_size > 0)
6801 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
6803 if (s390_stack_size)
6805 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6806 & ~(s390_stack_guard - 1));
6807 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6808 GEN_INT (stack_check_mask));
6811 gen_cmpdi (t, const0_rtx);
6813 gen_cmpsi (t, const0_rtx);
6815 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6816 gen_rtx_REG (CCmode,
6822 if (s390_warn_framesize > 0
6823 && cfun_frame_layout.frame_size >= s390_warn_framesize)
6824 warning ("frame size of `%s' is " HOST_WIDE_INT_PRINT_DEC " bytes",
6825 current_function_name (), cfun_frame_layout.frame_size);
6827 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
6828 warning ("`%s' uses dynamic stack allocation", current_function_name ());
6830 /* Save incoming stack pointer into temp reg. */
6831 if (cfun_frame_layout.save_backchain_p || next_fpr)
6832 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
6834 /* Subtract frame size from stack pointer. */
6836 if (DISP_IN_RANGE (INTVAL (frame_off)))
6838 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6839 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6841 insn = emit_insn (insn);
6845 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6846 frame_off = force_const_mem (Pmode, frame_off);
6848 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
6849 annotate_constant_pool_refs (&PATTERN (insn));
6852 RTX_FRAME_RELATED_P (insn) = 1;
6854 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6855 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6856 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6857 GEN_INT (-cfun_frame_layout.frame_size))),
6860 /* Set backchain. */
6862 if (cfun_frame_layout.save_backchain_p)
6864 if (cfun_frame_layout.backchain_offset)
6865 addr = gen_rtx_MEM (Pmode,
6866 plus_constant (stack_pointer_rtx,
6867 cfun_frame_layout.backchain_offset));
6869 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
6870 set_mem_alias_set (addr, s390_sr_alias_set);
6871 insn = emit_insn (gen_move_insn (addr, temp_reg));
6874 /* If we support asynchronous exceptions (e.g. for Java),
6875 we need to make sure the backchain pointer is set up
6876 before any possibly trapping memory access. */
6878 if (cfun_frame_layout.save_backchain_p && flag_non_call_exceptions)
6880 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6881 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6885 /* Save fprs 8 - 15 (64 bit ABI). */
6887 if (cfun_save_high_fprs_p && next_fpr)
6889 insn = emit_insn (gen_add2_insn (temp_reg,
6890 GEN_INT (cfun_frame_layout.f8_offset)));
6894 for (i = 24; i <= next_fpr; i++)
6895 if (cfun_fpr_bit_p (i - 16))
6897 rtx addr = plus_constant (stack_pointer_rtx,
6898 cfun_frame_layout.frame_size
6899 + cfun_frame_layout.f8_offset
6902 insn = save_fpr (temp_reg, offset, i);
6904 RTX_FRAME_RELATED_P (insn) = 1;
6906 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6907 gen_rtx_SET (VOIDmode,
6908 gen_rtx_MEM (DFmode, addr),
6909 gen_rtx_REG (DFmode, i)),
6914 /* Set frame pointer, if needed. */
6916 if (frame_pointer_needed)
6918 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6919 RTX_FRAME_RELATED_P (insn) = 1;
6922 /* Set up got pointer, if needed. */
6924 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6926 rtx insns = s390_load_got ();
6928 for (insn = insns; insn; insn = NEXT_INSN (insn))
6930 annotate_constant_pool_refs (&PATTERN (insn));
6932 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6939 if (TARGET_TPF_PROFILING)
6941 /* Generate a BAS instruction to serve as a function
6942 entry intercept to facilitate the use of tracing
6943 algorithms located at the branch target. */
6944 emit_insn (gen_prologue_tpf ());
6946 /* Emit a blockage here so that all code
6947 lies between the profiling mechanisms. */
6948 emit_insn (gen_blockage ());
6952 /* Expand the epilogue into a bunch of separate insns. */
6955 s390_emit_epilogue (bool sibcall)
6957 rtx frame_pointer, return_reg;
6958 int area_bottom, area_top, offset = 0;
6963 if (TARGET_TPF_PROFILING)
6966 /* Generate a BAS instruction to serve as a function
6967 entry intercept to facilitate the use of tracing
6968 algorithms located at the branch target. */
6970 /* Emit a blockage here so that all code
6971 lies between the profiling mechanisms. */
6972 emit_insn (gen_blockage ());
6974 emit_insn (gen_epilogue_tpf ());
6977 /* Check whether to use frame or stack pointer for restore. */
6979 frame_pointer = (frame_pointer_needed
6980 ? hard_frame_pointer_rtx : stack_pointer_rtx);
6982 s390_frame_area (&area_bottom, &area_top);
6984 /* Check whether we can access the register save area.
6985 If not, increment the frame pointer as required. */
6987 if (area_top <= area_bottom)
6989 /* Nothing to restore. */
6991 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
6992 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
6994 /* Area is in range. */
6995 offset = cfun_frame_layout.frame_size;
6999 rtx insn, frame_off;
7001 offset = area_bottom < 0 ? -area_bottom : 0;
7002 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
7004 if (DISP_IN_RANGE (INTVAL (frame_off)))
7006 insn = gen_rtx_SET (VOIDmode, frame_pointer,
7007 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
7008 insn = emit_insn (insn);
7012 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
7013 frame_off = force_const_mem (Pmode, frame_off);
7015 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7016 annotate_constant_pool_refs (&PATTERN (insn));
7020 /* Restore call saved fprs. */
7024 if (cfun_save_high_fprs_p)
7026 next_offset = cfun_frame_layout.f8_offset;
7027 for (i = 24; i < 32; i++)
7029 if (cfun_fpr_bit_p (i - 16))
7031 restore_fpr (frame_pointer,
7032 offset + next_offset, i);
7041 next_offset = cfun_frame_layout.f4_offset;
7042 for (i = 18; i < 20; i++)
7044 if (cfun_fpr_bit_p (i - 16))
7046 restore_fpr (frame_pointer,
7047 offset + next_offset, i);
7050 else if (TARGET_BACKCHAIN)
7056 /* Return register. */
7058 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7060 /* Restore call saved gprs. */
7062 if (cfun_frame_layout.first_restore_gpr != -1)
7067 /* Check for global register and save them
7068 to stack location from where they get restored. */
7070 for (i = cfun_frame_layout.first_restore_gpr;
7071 i <= cfun_frame_layout.last_restore_gpr;
7074 /* These registers are special and need to be
7075 restored in any case. */
7076 if (i == STACK_POINTER_REGNUM
7077 || i == RETURN_REGNUM
7079 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7084 addr = plus_constant (frame_pointer,
7085 offset + cfun_frame_layout.gprs_offset
7086 + (i - cfun_frame_layout.first_save_gpr)
7088 addr = gen_rtx_MEM (Pmode, addr);
7089 set_mem_alias_set (addr, s390_sr_alias_set);
7090 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7096 /* Fetch return address from stack before load multiple,
7097 this will do good for scheduling. */
7099 if (cfun_frame_layout.save_return_addr_p
7100 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7101 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7103 int return_regnum = find_unused_clobbered_reg();
7106 return_reg = gen_rtx_REG (Pmode, return_regnum);
7108 addr = plus_constant (frame_pointer,
7109 offset + cfun_frame_layout.gprs_offset
7111 - cfun_frame_layout.first_save_gpr)
7113 addr = gen_rtx_MEM (Pmode, addr);
7114 set_mem_alias_set (addr, s390_sr_alias_set);
7115 emit_move_insn (return_reg, addr);
7119 insn = restore_gprs (frame_pointer,
7120 offset + cfun_frame_layout.gprs_offset
7121 + (cfun_frame_layout.first_restore_gpr
7122 - cfun_frame_layout.first_save_gpr)
7124 cfun_frame_layout.first_restore_gpr,
7125 cfun_frame_layout.last_restore_gpr);
7132 /* Return to caller. */
7134 p = rtvec_alloc (2);
7136 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7137 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7138 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7143 /* Return the size in bytes of a function argument of
7144 type TYPE and/or mode MODE. At least one of TYPE or
7145 MODE must be specified. */
7148 s390_function_arg_size (enum machine_mode mode, tree type)
7151 return int_size_in_bytes (type);
7153 /* No type info available for some library calls ... */
7154 if (mode != BLKmode)
7155 return GET_MODE_SIZE (mode);
7157 /* If we have neither type nor mode, abort */
7161 /* Return true if a function argument of type TYPE and mode MODE
7162 is to be passed in a floating-point register, if available. */
7165 s390_function_arg_float (enum machine_mode mode, tree type)
7167 int size = s390_function_arg_size (mode, type);
7171 /* Soft-float changes the ABI: no floating-point registers are used. */
7172 if (TARGET_SOFT_FLOAT)
7175 /* No type info available for some library calls ... */
7177 return mode == SFmode || mode == DFmode;
7179 /* The ABI says that record types with a single member are treated
7180 just like that member would be. */
7181 while (TREE_CODE (type) == RECORD_TYPE)
7183 tree field, single = NULL_TREE;
7185 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7187 if (TREE_CODE (field) != FIELD_DECL)
7190 if (single == NULL_TREE)
7191 single = TREE_TYPE (field);
7196 if (single == NULL_TREE)
7202 return TREE_CODE (type) == REAL_TYPE;
7205 /* Return true if a function argument of type TYPE and mode MODE
7206 is to be passed in an integer register, or a pair of integer
7207 registers, if available. */
7210 s390_function_arg_integer (enum machine_mode mode, tree type)
7212 int size = s390_function_arg_size (mode, type);
7216 /* No type info available for some library calls ... */
7218 return GET_MODE_CLASS (mode) == MODE_INT
7219 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7221 /* We accept small integral (and similar) types. */
7222 if (INTEGRAL_TYPE_P (type)
7223 || POINTER_TYPE_P (type)
7224 || TREE_CODE (type) == OFFSET_TYPE
7225 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7228 /* We also accept structs of size 1, 2, 4, 8 that are not
7229 passed in floating-point registers. */
7230 if (AGGREGATE_TYPE_P (type)
7231 && exact_log2 (size) >= 0
7232 && !s390_function_arg_float (mode, type))
7238 /* Return 1 if a function argument of type TYPE and mode MODE
7239 is to be passed by reference. The ABI specifies that only
7240 structures of size 1, 2, 4, or 8 bytes are passed by value,
7241 all other structures (and complex numbers) are passed by
7245 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7246 enum machine_mode mode, tree type,
7247 bool named ATTRIBUTE_UNUSED)
7249 int size = s390_function_arg_size (mode, type);
7255 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7258 if (TREE_CODE (type) == COMPLEX_TYPE
7259 || TREE_CODE (type) == VECTOR_TYPE)
7266 /* Update the data in CUM to advance over an argument of mode MODE and
7267 data type TYPE. (TYPE is null for libcalls where that information
7268 may not be available.). The boolean NAMED specifies whether the
7269 argument is a named argument (as opposed to an unnamed argument
7270 matching an ellipsis). */
7273 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7274 tree type, int named ATTRIBUTE_UNUSED)
7276 if (s390_function_arg_float (mode, type))
7280 else if (s390_function_arg_integer (mode, type))
7282 int size = s390_function_arg_size (mode, type);
7283 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7289 /* Define where to put the arguments to a function.
7290 Value is zero to push the argument on the stack,
7291 or a hard register in which to store the argument.
7293 MODE is the argument's machine mode.
7294 TYPE is the data type of the argument (as a tree).
7295 This is null for libcalls where that information may
7297 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7298 the preceding args and about the function being called.
7299 NAMED is nonzero if this argument is a named parameter
7300 (otherwise it is an extra parameter matching an ellipsis).
7302 On S/390, we use general purpose registers 2 through 6 to
7303 pass integer, pointer, and certain structure arguments, and
7304 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7305 to pass floating point arguments. All remaining arguments
7306 are pushed to the stack. */
7309 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7310 int named ATTRIBUTE_UNUSED)
7312 if (s390_function_arg_float (mode, type))
7314 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
7317 return gen_rtx_REG (mode, cum->fprs + 16);
7319 else if (s390_function_arg_integer (mode, type))
7321 int size = s390_function_arg_size (mode, type);
7322 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7324 if (cum->gprs + n_gprs > 5)
7327 return gen_rtx_REG (mode, cum->gprs + 2);
7330 /* After the real arguments, expand_call calls us once again
7331 with a void_type_node type. Whatever we return here is
7332 passed as operand 2 to the call expanders.
7334 We don't need this feature ... */
7335 else if (type == void_type_node)
7341 /* Return true if return values of type TYPE should be returned
7342 in a memory buffer whose address is passed by the caller as
7343 hidden first argument. */
7346 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7348 /* We accept small integral (and similar) types. */
7349 if (INTEGRAL_TYPE_P (type)
7350 || POINTER_TYPE_P (type)
7351 || TREE_CODE (type) == OFFSET_TYPE
7352 || TREE_CODE (type) == REAL_TYPE)
7353 return int_size_in_bytes (type) > 8;
7355 /* Aggregates and similar constructs are always returned
7357 if (AGGREGATE_TYPE_P (type)
7358 || TREE_CODE (type) == COMPLEX_TYPE
7359 || TREE_CODE (type) == VECTOR_TYPE)
7362 /* ??? We get called on all sorts of random stuff from
7363 aggregate_value_p. We can't abort, but it's not clear
7364 what's safe to return. Pretend it's a struct I guess. */
7368 /* Define where to return a (scalar) value of type TYPE.
7369 If TYPE is null, define where to return a (scalar)
7370 value of mode MODE from a libcall. */
7373 s390_function_value (tree type, enum machine_mode mode)
7377 int unsignedp = TYPE_UNSIGNED (type);
7378 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7381 if (GET_MODE_CLASS (mode) != MODE_INT
7382 && GET_MODE_CLASS (mode) != MODE_FLOAT)
7384 if (GET_MODE_SIZE (mode) > 8)
7387 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7388 return gen_rtx_REG (mode, 16);
7390 return gen_rtx_REG (mode, 2);
7394 /* Create and return the va_list datatype.
7396 On S/390, va_list is an array type equivalent to
7398 typedef struct __va_list_tag
7402 void *__overflow_arg_area;
7403 void *__reg_save_area;
7406 where __gpr and __fpr hold the number of general purpose
7407 or floating point arguments used up to now, respectively,
7408 __overflow_arg_area points to the stack location of the
7409 next argument passed on the stack, and __reg_save_area
7410 always points to the start of the register area in the
7411 call frame of the current function. The function prologue
7412 saves all registers used for argument passing into this
7413 area if the function uses variable arguments. */
7416 s390_build_builtin_va_list (void)
7418 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7420 record = lang_hooks.types.make_type (RECORD_TYPE);
7423 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7425 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7426 long_integer_type_node);
7427 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7428 long_integer_type_node);
7429 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7431 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7434 DECL_FIELD_CONTEXT (f_gpr) = record;
7435 DECL_FIELD_CONTEXT (f_fpr) = record;
7436 DECL_FIELD_CONTEXT (f_ovf) = record;
7437 DECL_FIELD_CONTEXT (f_sav) = record;
7439 TREE_CHAIN (record) = type_decl;
7440 TYPE_NAME (record) = type_decl;
7441 TYPE_FIELDS (record) = f_gpr;
7442 TREE_CHAIN (f_gpr) = f_fpr;
7443 TREE_CHAIN (f_fpr) = f_ovf;
7444 TREE_CHAIN (f_ovf) = f_sav;
7446 layout_type (record);
7448 /* The correct type is an array type of one element. */
7449 return build_array_type (record, build_index_type (size_zero_node));
7452 /* Implement va_start by filling the va_list structure VALIST.
7453 STDARG_P is always true, and ignored.
7454 NEXTARG points to the first anonymous stack argument.
7456 The following global variables are used to initialize
7457 the va_list structure:
7459 current_function_args_info:
7460 holds number of gprs and fprs used for named arguments.
7461 current_function_arg_offset_rtx:
7462 holds the offset of the first anonymous stack argument
7463 (relative to the virtual arg pointer). */
7466 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7468 HOST_WIDE_INT n_gpr, n_fpr;
7470 tree f_gpr, f_fpr, f_ovf, f_sav;
7471 tree gpr, fpr, ovf, sav, t;
7473 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7474 f_fpr = TREE_CHAIN (f_gpr);
7475 f_ovf = TREE_CHAIN (f_fpr);
7476 f_sav = TREE_CHAIN (f_ovf);
7478 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
7479 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7480 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7481 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7482 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7484 /* Count number of gp and fp argument registers used. */
7486 n_gpr = current_function_args_info.gprs;
7487 n_fpr = current_function_args_info.fprs;
7489 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7490 build_int_cst (NULL_TREE, n_gpr));
7491 TREE_SIDE_EFFECTS (t) = 1;
7492 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7494 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7495 build_int_cst (NULL_TREE, n_fpr));
7496 TREE_SIDE_EFFECTS (t) = 1;
7497 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7499 /* Find the overflow area. */
7500 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7502 off = INTVAL (current_function_arg_offset_rtx);
7503 off = off < 0 ? 0 : off;
7504 if (TARGET_DEBUG_ARG)
7505 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7506 (int)n_gpr, (int)n_fpr, off);
7508 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7510 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7511 TREE_SIDE_EFFECTS (t) = 1;
7512 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7514 /* Find the register save area. */
7515 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7516 if (TARGET_KERNEL_BACKCHAIN)
7517 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7518 build_int_cst (NULL_TREE,
7519 -(RETURN_REGNUM - 2) * UNITS_PER_WORD
7520 - (TARGET_64BIT ? 4 : 2) * 8));
7522 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7523 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7525 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7526 TREE_SIDE_EFFECTS (t) = 1;
7527 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7530 /* Implement va_arg by updating the va_list structure
7531 VALIST as required to retrieve an argument of type
7532 TYPE, and returning that argument.
7534 Generates code equivalent to:
7536 if (integral value) {
7537 if (size <= 4 && args.gpr < 5 ||
7538 size > 4 && args.gpr < 4 )
7539 ret = args.reg_save_area[args.gpr+8]
7541 ret = *args.overflow_arg_area++;
7542 } else if (float value) {
7544 ret = args.reg_save_area[args.fpr+64]
7546 ret = *args.overflow_arg_area++;
7547 } else if (aggregate value) {
7549 ret = *args.reg_save_area[args.gpr]
7551 ret = **args.overflow_arg_area++;
7555 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7556 tree *post_p ATTRIBUTE_UNUSED)
7558 tree f_gpr, f_fpr, f_ovf, f_sav;
7559 tree gpr, fpr, ovf, sav, reg, t, u;
7560 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
7561 tree lab_false, lab_over, addr;
7563 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7564 f_fpr = TREE_CHAIN (f_gpr);
7565 f_ovf = TREE_CHAIN (f_fpr);
7566 f_sav = TREE_CHAIN (f_ovf);
7568 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
7569 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7570 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7571 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7572 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7574 size = int_size_in_bytes (type);
7576 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7578 if (TARGET_DEBUG_ARG)
7580 fprintf (stderr, "va_arg: aggregate type");
7584 /* Aggregates are passed by reference. */
7589 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7590 will be added by s390_frame_info because for va_args always an even
7591 number of gprs has to be saved r15-r2 = 14 regs. */
7592 sav_ofs = (TARGET_KERNEL_BACKCHAIN
7593 ? (TARGET_64BIT ? 4 : 2) * 8 : 2 * UNITS_PER_WORD);
7594 sav_scale = UNITS_PER_WORD;
7595 size = UNITS_PER_WORD;
7598 else if (s390_function_arg_float (TYPE_MODE (type), type))
7600 if (TARGET_DEBUG_ARG)
7602 fprintf (stderr, "va_arg: float type");
7606 /* FP args go in FP registers, if present. */
7610 sav_ofs = TARGET_KERNEL_BACKCHAIN ? 0 : 16 * UNITS_PER_WORD;
7612 /* TARGET_64BIT has up to 4 parameter in fprs */
7613 max_reg = TARGET_64BIT ? 3 : 1;
7617 if (TARGET_DEBUG_ARG)
7619 fprintf (stderr, "va_arg: other type");
7623 /* Otherwise into GP registers. */
7626 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7628 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7629 will be added by s390_frame_info because for va_args always an even
7630 number of gprs has to be saved r15-r2 = 14 regs. */
7631 sav_ofs = TARGET_KERNEL_BACKCHAIN ?
7632 (TARGET_64BIT ? 4 : 2) * 8 : 2*UNITS_PER_WORD;
7634 if (size < UNITS_PER_WORD)
7635 sav_ofs += UNITS_PER_WORD - size;
7637 sav_scale = UNITS_PER_WORD;
7644 /* Pull the value out of the saved registers ... */
7646 lab_false = create_artificial_label ();
7647 lab_over = create_artificial_label ();
7648 addr = create_tmp_var (ptr_type_node, "addr");
7650 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
7651 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7652 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7653 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7654 gimplify_and_add (t, pre_p);
7656 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7657 fold_convert (ptr_type_node, size_int (sav_ofs)));
7658 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7659 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7660 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
7662 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7663 gimplify_and_add (t, pre_p);
7665 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7666 gimplify_and_add (t, pre_p);
7668 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7669 append_to_statement_list (t, pre_p);
7672 /* ... Otherwise out of the overflow area. */
7675 if (size < UNITS_PER_WORD)
7676 t = build2 (PLUS_EXPR, ptr_type_node, t,
7677 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
7679 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7681 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7682 gimplify_and_add (u, pre_p);
7684 t = build2 (PLUS_EXPR, ptr_type_node, t,
7685 fold_convert (ptr_type_node, size_int (size)));
7686 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
7687 gimplify_and_add (t, pre_p);
7689 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7690 append_to_statement_list (t, pre_p);
7693 /* Increment register save count. */
7695 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
7696 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
7697 gimplify_and_add (u, pre_p);
7701 t = build_pointer_type (build_pointer_type (type));
7702 addr = fold_convert (t, addr);
7703 addr = build_fold_indirect_ref (addr);
7707 t = build_pointer_type (type);
7708 addr = fold_convert (t, addr);
7711 return build_fold_indirect_ref (addr);
7719 S390_BUILTIN_THREAD_POINTER,
7720 S390_BUILTIN_SET_THREAD_POINTER,
7725 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7730 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7736 s390_init_builtins (void)
7740 ftype = build_function_type (ptr_type_node, void_list_node);
7741 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7742 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7745 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
7746 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7747 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7751 /* Expand an expression EXP that calls a built-in function,
7752 with result going to TARGET if that's convenient
7753 (and in mode MODE if that's convenient).
7754 SUBTARGET may be used as the target for computing one of EXP's operands.
7755 IGNORE is nonzero if the value is to be ignored. */
7758 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7759 enum machine_mode mode ATTRIBUTE_UNUSED,
7760 int ignore ATTRIBUTE_UNUSED)
7764 unsigned int const *code_for_builtin =
7765 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7767 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7768 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7769 tree arglist = TREE_OPERAND (exp, 1);
7770 enum insn_code icode;
7771 rtx op[MAX_ARGS], pat;
7775 if (fcode >= S390_BUILTIN_max)
7776 internal_error ("bad builtin fcode");
7777 icode = code_for_builtin[fcode];
7779 internal_error ("bad builtin fcode");
7781 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7783 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7785 arglist = TREE_CHAIN (arglist), arity++)
7787 const struct insn_operand_data *insn_op;
7789 tree arg = TREE_VALUE (arglist);
7790 if (arg == error_mark_node)
7792 if (arity > MAX_ARGS)
7795 insn_op = &insn_data[icode].operand[arity + nonvoid];
7797 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7799 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7800 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7805 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7807 || GET_MODE (target) != tmode
7808 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7809 target = gen_reg_rtx (tmode);
7815 pat = GEN_FCN (icode) (target);
7819 pat = GEN_FCN (icode) (target, op[0]);
7821 pat = GEN_FCN (icode) (op[0]);
7824 pat = GEN_FCN (icode) (target, op[0], op[1]);
7840 /* Output assembly code for the trampoline template to
7843 On S/390, we use gpr 1 internally in the trampoline code;
7844 gpr 0 is used to hold the static chain. */
7847 s390_trampoline_template (FILE *file)
7850 op[0] = gen_rtx_REG (Pmode, 0);
7851 op[1] = gen_rtx_REG (Pmode, 1);
7855 output_asm_insn ("basr\t%1,0", op);
7856 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7857 output_asm_insn ("br\t%1", op);
7858 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
7862 output_asm_insn ("basr\t%1,0", op);
7863 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7864 output_asm_insn ("br\t%1", op);
7865 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
7869 /* Emit RTL insns to initialize the variable parts of a trampoline.
7870 FNADDR is an RTX for the address of the function's pure code.
7871 CXT is an RTX for the static chain value for the function. */
7874 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
7876 emit_move_insn (gen_rtx_MEM (Pmode,
7877 memory_address (Pmode,
7878 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
7879 emit_move_insn (gen_rtx_MEM (Pmode,
7880 memory_address (Pmode,
7881 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
7884 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7885 LOW and HIGH, independent of the host word size. */
7888 s390_gen_rtx_const_DI (int high, int low)
7890 #if HOST_BITS_PER_WIDE_INT >= 64
7892 val = (HOST_WIDE_INT)high;
7894 val |= (HOST_WIDE_INT)low;
7896 return GEN_INT (val);
7898 #if HOST_BITS_PER_WIDE_INT >= 32
7899 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7906 /* Output assembler code to FILE to increment profiler label # LABELNO
7907 for profiling a function entry. */
7910 s390_function_profiler (FILE *file, int labelno)
7915 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
7917 fprintf (file, "# function profiler \n");
7919 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7920 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7921 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7923 op[2] = gen_rtx_REG (Pmode, 1);
7924 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
7925 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7927 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7930 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7931 op[4] = gen_rtx_CONST (Pmode, op[4]);
7936 output_asm_insn ("stg\t%0,%1", op);
7937 output_asm_insn ("larl\t%2,%3", op);
7938 output_asm_insn ("brasl\t%0,%4", op);
7939 output_asm_insn ("lg\t%0,%1", op);
7943 op[6] = gen_label_rtx ();
7945 output_asm_insn ("st\t%0,%1", op);
7946 output_asm_insn ("bras\t%2,%l6", op);
7947 output_asm_insn (".long\t%4", op);
7948 output_asm_insn (".long\t%3", op);
7949 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7950 output_asm_insn ("l\t%0,0(%2)", op);
7951 output_asm_insn ("l\t%2,4(%2)", op);
7952 output_asm_insn ("basr\t%0,%0", op);
7953 output_asm_insn ("l\t%0,%1", op);
7957 op[5] = gen_label_rtx ();
7958 op[6] = gen_label_rtx ();
7960 output_asm_insn ("st\t%0,%1", op);
7961 output_asm_insn ("bras\t%2,%l6", op);
7962 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7963 output_asm_insn (".long\t%4-%l5", op);
7964 output_asm_insn (".long\t%3-%l5", op);
7965 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7966 output_asm_insn ("lr\t%0,%2", op);
7967 output_asm_insn ("a\t%0,0(%2)", op);
7968 output_asm_insn ("a\t%2,4(%2)", op);
7969 output_asm_insn ("basr\t%0,%0", op);
7970 output_asm_insn ("l\t%0,%1", op);
7974 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7975 into its SYMBOL_REF_FLAGS. */
7978 s390_encode_section_info (tree decl, rtx rtl, int first)
7980 default_encode_section_info (decl, rtl, first);
7982 /* If a variable has a forced alignment to < 2 bytes, mark it with
7983 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7984 if (TREE_CODE (decl) == VAR_DECL
7985 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
7986 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
7989 /* Output thunk to FILE that implements a C++ virtual function call (with
7990 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7991 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7992 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7993 relative to the resulting this pointer. */
7996 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
7997 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8003 /* Operand 0 is the target function. */
8004 op[0] = XEXP (DECL_RTL (function), 0);
8005 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
8008 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
8009 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
8010 op[0] = gen_rtx_CONST (Pmode, op[0]);
8013 /* Operand 1 is the 'this' pointer. */
8014 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8015 op[1] = gen_rtx_REG (Pmode, 3);
8017 op[1] = gen_rtx_REG (Pmode, 2);
8019 /* Operand 2 is the delta. */
8020 op[2] = GEN_INT (delta);
8022 /* Operand 3 is the vcall_offset. */
8023 op[3] = GEN_INT (vcall_offset);
8025 /* Operand 4 is the temporary register. */
8026 op[4] = gen_rtx_REG (Pmode, 1);
8028 /* Operands 5 to 8 can be used as labels. */
8034 /* Operand 9 can be used for temporary register. */
8037 /* Generate code. */
8040 /* Setup literal pool pointer if required. */
8041 if ((!DISP_IN_RANGE (delta)
8042 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8043 || (!DISP_IN_RANGE (vcall_offset)
8044 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8046 op[5] = gen_label_rtx ();
8047 output_asm_insn ("larl\t%4,%5", op);
8050 /* Add DELTA to this pointer. */
8053 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8054 output_asm_insn ("la\t%1,%2(%1)", op);
8055 else if (DISP_IN_RANGE (delta))
8056 output_asm_insn ("lay\t%1,%2(%1)", op);
8057 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8058 output_asm_insn ("aghi\t%1,%2", op);
8061 op[6] = gen_label_rtx ();
8062 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8066 /* Perform vcall adjustment. */
8069 if (DISP_IN_RANGE (vcall_offset))
8071 output_asm_insn ("lg\t%4,0(%1)", op);
8072 output_asm_insn ("ag\t%1,%3(%4)", op);
8074 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8076 output_asm_insn ("lghi\t%4,%3", op);
8077 output_asm_insn ("ag\t%4,0(%1)", op);
8078 output_asm_insn ("ag\t%1,0(%4)", op);
8082 op[7] = gen_label_rtx ();
8083 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8084 output_asm_insn ("ag\t%4,0(%1)", op);
8085 output_asm_insn ("ag\t%1,0(%4)", op);
8089 /* Jump to target. */
8090 output_asm_insn ("jg\t%0", op);
8092 /* Output literal pool if required. */
8095 output_asm_insn (".align\t4", op);
8096 targetm.asm_out.internal_label (file, "L",
8097 CODE_LABEL_NUMBER (op[5]));
8101 targetm.asm_out.internal_label (file, "L",
8102 CODE_LABEL_NUMBER (op[6]));
8103 output_asm_insn (".long\t%2", op);
8107 targetm.asm_out.internal_label (file, "L",
8108 CODE_LABEL_NUMBER (op[7]));
8109 output_asm_insn (".long\t%3", op);
8114 /* Setup base pointer if required. */
8116 || (!DISP_IN_RANGE (delta)
8117 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8118 || (!DISP_IN_RANGE (delta)
8119 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8121 op[5] = gen_label_rtx ();
8122 output_asm_insn ("basr\t%4,0", op);
8123 targetm.asm_out.internal_label (file, "L",
8124 CODE_LABEL_NUMBER (op[5]));
8127 /* Add DELTA to this pointer. */
8130 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8131 output_asm_insn ("la\t%1,%2(%1)", op);
8132 else if (DISP_IN_RANGE (delta))
8133 output_asm_insn ("lay\t%1,%2(%1)", op);
8134 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8135 output_asm_insn ("ahi\t%1,%2", op);
8138 op[6] = gen_label_rtx ();
8139 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8143 /* Perform vcall adjustment. */
8146 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
8148 output_asm_insn ("lg\t%4,0(%1)", op);
8149 output_asm_insn ("a\t%1,%3(%4)", op);
8151 else if (DISP_IN_RANGE (vcall_offset))
8153 output_asm_insn ("lg\t%4,0(%1)", op);
8154 output_asm_insn ("ay\t%1,%3(%4)", op);
8156 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8158 output_asm_insn ("lhi\t%4,%3", op);
8159 output_asm_insn ("a\t%4,0(%1)", op);
8160 output_asm_insn ("a\t%1,0(%4)", op);
8164 op[7] = gen_label_rtx ();
8165 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8166 output_asm_insn ("a\t%4,0(%1)", op);
8167 output_asm_insn ("a\t%1,0(%4)", op);
8170 /* We had to clobber the base pointer register.
8171 Re-setup the base pointer (with a different base). */
8172 op[5] = gen_label_rtx ();
8173 output_asm_insn ("basr\t%4,0", op);
8174 targetm.asm_out.internal_label (file, "L",
8175 CODE_LABEL_NUMBER (op[5]));
8178 /* Jump to target. */
8179 op[8] = gen_label_rtx ();
8182 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8184 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8185 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8186 else if (flag_pic == 1)
8188 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8189 output_asm_insn ("l\t%4,%0(%4)", op);
8191 else if (flag_pic == 2)
8193 op[9] = gen_rtx_REG (Pmode, 0);
8194 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8195 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8196 output_asm_insn ("ar\t%4,%9", op);
8197 output_asm_insn ("l\t%4,0(%4)", op);
8200 output_asm_insn ("br\t%4", op);
8202 /* Output literal pool. */
8203 output_asm_insn (".align\t4", op);
8205 if (nonlocal && flag_pic == 2)
8206 output_asm_insn (".long\t%0", op);
8209 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8210 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8213 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8215 output_asm_insn (".long\t%0", op);
8217 output_asm_insn (".long\t%0-%5", op);
8221 targetm.asm_out.internal_label (file, "L",
8222 CODE_LABEL_NUMBER (op[6]));
8223 output_asm_insn (".long\t%2", op);
8227 targetm.asm_out.internal_label (file, "L",
8228 CODE_LABEL_NUMBER (op[7]));
8229 output_asm_insn (".long\t%3", op);
8235 s390_valid_pointer_mode (enum machine_mode mode)
8237 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8240 /* How to allocate a 'struct machine_function'. */
8242 static struct machine_function *
8243 s390_init_machine_status (void)
8245 return ggc_alloc_cleared (sizeof (struct machine_function));
8248 /* Checks whether the given ARGUMENT_LIST would use a caller
8249 saved register. This is used to decide whether sibling call
8250 optimization could be performed on the respective function
8254 s390_call_saved_register_used (tree argument_list)
8256 CUMULATIVE_ARGS cum;
8258 enum machine_mode mode;
8263 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8265 while (argument_list)
8267 parameter = TREE_VALUE (argument_list);
8268 argument_list = TREE_CHAIN (argument_list);
8273 /* For an undeclared variable passed as parameter we will get
8274 an ERROR_MARK node here. */
8275 if (TREE_CODE (parameter) == ERROR_MARK)
8278 if (! (type = TREE_TYPE (parameter)))
8281 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
8284 if (pass_by_reference (&cum, mode, type, true))
8287 type = build_pointer_type (type);
8290 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8292 s390_function_arg_advance (&cum, mode, type, 0);
8294 if (parm_rtx && REG_P (parm_rtx))
8297 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8299 if (! call_used_regs[reg + REGNO (parm_rtx)])
8306 /* Return true if the given call expression can be
8307 turned into a sibling call.
8308 DECL holds the declaration of the function to be called whereas
8309 EXP is the call expression itself. */
8312 s390_function_ok_for_sibcall (tree decl, tree exp)
8314 /* The TPF epilogue uses register 1. */
8315 if (TARGET_TPF_PROFILING)
8318 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8319 which would have to be restored before the sibcall. */
8320 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8323 /* Register 6 on s390 is available as an argument register but unfortunately
8324 "caller saved". This makes functions needing this register for arguments
8325 not suitable for sibcalls. */
8326 if (TREE_OPERAND (exp, 1)
8327 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8333 /* This function is used by the call expanders of the machine description.
8334 It emits the call insn itself together with the necessary operations
8335 to adjust the target address and returns the emitted insn.
8336 ADDR_LOCATION is the target address rtx
8337 TLS_CALL the location of the thread-local symbol
8338 RESULT_REG the register where the result of the call should be stored
8339 RETADDR_REG the register where the return address should be stored
8340 If this parameter is NULL_RTX the call is considered
8341 to be a sibling call. */
8344 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8347 bool plt_call = false;
8353 /* Direct function calls need special treatment. */
8354 if (GET_CODE (addr_location) == SYMBOL_REF)
8356 /* When calling a global routine in PIC mode, we must
8357 replace the symbol itself with the PLT stub. */
8358 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8360 addr_location = gen_rtx_UNSPEC (Pmode,
8361 gen_rtvec (1, addr_location),
8363 addr_location = gen_rtx_CONST (Pmode, addr_location);
8367 /* Unless we can use the bras(l) insn, force the
8368 routine address into a register. */
8369 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8372 addr_location = legitimize_pic_address (addr_location, 0);
8374 addr_location = force_reg (Pmode, addr_location);
8378 /* If it is already an indirect call or the code above moved the
8379 SYMBOL_REF to somewhere else make sure the address can be found in
8381 if (retaddr_reg == NULL_RTX
8382 && GET_CODE (addr_location) != SYMBOL_REF
8385 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8386 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8389 addr_location = gen_rtx_MEM (QImode, addr_location);
8390 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8392 if (result_reg != NULL_RTX)
8393 call = gen_rtx_SET (VOIDmode, result_reg, call);
8395 if (retaddr_reg != NULL_RTX)
8397 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8399 if (tls_call != NULL_RTX)
8400 vec = gen_rtvec (3, call, clobber,
8401 gen_rtx_USE (VOIDmode, tls_call));
8403 vec = gen_rtvec (2, call, clobber);
8405 call = gen_rtx_PARALLEL (VOIDmode, vec);
8408 insn = emit_call_insn (call);
8410 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8411 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8413 /* s390_function_ok_for_sibcall should
8414 have denied sibcalls in this case. */
8415 if (retaddr_reg == NULL_RTX)
8418 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8423 /* Implement CONDITIONAL_REGISTER_USAGE. */
8426 s390_conditional_register_usage (void)
8432 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8433 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8435 if (TARGET_CPU_ZARCH)
8437 fixed_regs[RETURN_REGNUM] = 0;
8438 call_used_regs[RETURN_REGNUM] = 0;
8442 for (i = 24; i < 32; i++)
8443 call_used_regs[i] = call_really_used_regs[i] = 0;
8447 for (i = 18; i < 20; i++)
8448 call_used_regs[i] = call_really_used_regs[i] = 0;
8451 if (TARGET_SOFT_FLOAT)
8453 for (i = 16; i < 32; i++)
8454 call_used_regs[i] = fixed_regs[i] = 1;
8458 /* Corresponding function to eh_return expander. */
8460 static GTY(()) rtx s390_tpf_eh_return_symbol;
8462 s390_emit_tpf_eh_return (rtx target)
8466 if (!s390_tpf_eh_return_symbol)
8467 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8469 reg = gen_rtx_REG (Pmode, 2);
8471 emit_move_insn (reg, target);
8472 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8473 gen_rtx_REG (Pmode, RETURN_REGNUM));
8474 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8476 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8479 #include "gt-s390.h"