1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
55 /* Machine-specific symbol_ref flags. */
56 #define SYMBOL_FLAG_ALIGN1 (SYMBOL_FLAG_MACH_DEP << 0)
59 static bool s390_assemble_integer (rtx, unsigned int, int);
60 static void s390_encode_section_info (tree, rtx, int);
61 static bool s390_cannot_force_const_mem (rtx);
62 static rtx s390_delegitimize_address (rtx);
63 static bool s390_return_in_memory (tree, tree);
64 static void s390_init_builtins (void);
65 static rtx s390_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
66 static void s390_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
68 static enum attr_type s390_safe_attr_type (rtx);
70 static int s390_adjust_priority (rtx, int);
71 static int s390_issue_rate (void);
72 static int s390_first_cycle_multipass_dfa_lookahead (void);
73 static bool s390_cannot_copy_insn_p (rtx);
74 static bool s390_rtx_costs (rtx, int, int, int *);
75 static int s390_address_cost (rtx);
76 static void s390_reorg (void);
77 static bool s390_valid_pointer_mode (enum machine_mode);
78 static tree s390_build_builtin_va_list (void);
79 static tree s390_gimplify_va_arg (tree, tree, tree *, tree *);
80 static bool s390_function_ok_for_sibcall (tree, tree);
81 static bool s390_call_saved_register_used (tree);
82 static bool s390_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode mode,
85 #undef TARGET_ASM_ALIGNED_HI_OP
86 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
87 #undef TARGET_ASM_ALIGNED_DI_OP
88 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
89 #undef TARGET_ASM_INTEGER
90 #define TARGET_ASM_INTEGER s390_assemble_integer
92 #undef TARGET_ASM_OPEN_PAREN
93 #define TARGET_ASM_OPEN_PAREN ""
95 #undef TARGET_ASM_CLOSE_PAREN
96 #define TARGET_ASM_CLOSE_PAREN ""
98 #undef TARGET_ENCODE_SECTION_INFO
99 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
102 #undef TARGET_HAVE_TLS
103 #define TARGET_HAVE_TLS true
105 #undef TARGET_CANNOT_FORCE_CONST_MEM
106 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
108 #undef TARGET_DELEGITIMIZE_ADDRESS
109 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
111 #undef TARGET_RETURN_IN_MEMORY
112 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
114 #undef TARGET_INIT_BUILTINS
115 #define TARGET_INIT_BUILTINS s390_init_builtins
116 #undef TARGET_EXPAND_BUILTIN
117 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
119 #undef TARGET_ASM_OUTPUT_MI_THUNK
120 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
121 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
122 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
124 #undef TARGET_SCHED_ADJUST_PRIORITY
125 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
126 #undef TARGET_SCHED_ISSUE_RATE
127 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
128 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
129 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
131 #undef TARGET_CANNOT_COPY_INSN_P
132 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
133 #undef TARGET_RTX_COSTS
134 #define TARGET_RTX_COSTS s390_rtx_costs
135 #undef TARGET_ADDRESS_COST
136 #define TARGET_ADDRESS_COST s390_address_cost
138 #undef TARGET_MACHINE_DEPENDENT_REORG
139 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
141 #undef TARGET_VALID_POINTER_MODE
142 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
144 #undef TARGET_BUILD_BUILTIN_VA_LIST
145 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
146 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
147 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
149 #undef TARGET_PROMOTE_FUNCTION_ARGS
150 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
151 #undef TARGET_PROMOTE_FUNCTION_RETURN
152 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
153 #undef TARGET_PASS_BY_REFERENCE
154 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
156 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
157 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
159 struct gcc_target targetm = TARGET_INITIALIZER;
161 extern int reload_completed;
163 /* The alias set for prologue/epilogue register save/restore. */
164 static int s390_sr_alias_set = 0;
166 /* Save information from a "cmpxx" operation until the branch or scc is
168 rtx s390_compare_op0, s390_compare_op1;
170 /* Structure used to hold the components of a S/390 memory
171 address. A legitimate address on S/390 is of the general
173 base + index + displacement
174 where any of the components is optional.
176 base and index are registers of the class ADDR_REGS,
177 displacement is an unsigned 12-bit immediate constant. */
187 /* Which cpu are we tuning for. */
188 enum processor_type s390_tune;
189 enum processor_flags s390_tune_flags;
190 /* Which instruction set architecture to use. */
191 enum processor_type s390_arch;
192 enum processor_flags s390_arch_flags;
194 /* Strings to hold which cpu and instruction set architecture to use. */
195 const char *s390_tune_string; /* for -mtune=<xxx> */
196 const char *s390_arch_string; /* for -march=<xxx> */
198 /* String to specify backchain mode:
199 "" no-backchain, "1" backchain, "2" kernel-backchain. */
200 const char *s390_backchain_string = TARGET_DEFAULT_BACKCHAIN;
202 const char *s390_warn_framesize_string;
203 const char *s390_warn_dynamicstack_string;
204 const char *s390_stack_size_string;
205 const char *s390_stack_guard_string;
207 HOST_WIDE_INT s390_warn_framesize = 0;
208 bool s390_warn_dynamicstack_p = 0;
209 HOST_WIDE_INT s390_stack_size = 0;
210 HOST_WIDE_INT s390_stack_guard = 0;
212 /* The following structure is embedded in the machine
213 specific part of struct function. */
215 struct s390_frame_layout GTY (())
217 /* Offset within stack frame. */
218 HOST_WIDE_INT gprs_offset;
219 HOST_WIDE_INT f0_offset;
220 HOST_WIDE_INT f4_offset;
221 HOST_WIDE_INT f8_offset;
222 HOST_WIDE_INT backchain_offset;
224 /* Number of first and last gpr to be saved, restored. */
226 int first_restore_gpr;
228 int last_restore_gpr;
230 /* Bits standing for floating point registers. Set, if the
231 respective register has to be saved. Starting with reg 16 (f0)
232 at the rightmost bit.
233 Bit 15 - 8 7 6 5 4 3 2 1 0
234 fpr 15 - 8 7 5 3 1 6 4 2 0
235 reg 31 - 24 23 22 21 20 19 18 17 16 */
236 unsigned int fpr_bitmap;
238 /* Number of floating point registers f8-f15 which must be saved. */
241 /* Set if return address needs to be saved. */
242 bool save_return_addr_p;
244 /* Set if backchain needs to be saved. */
245 bool save_backchain_p;
247 /* Size of stack frame. */
248 HOST_WIDE_INT frame_size;
251 /* Define the structure for the machine field in struct function. */
253 struct machine_function GTY(())
255 struct s390_frame_layout frame_layout;
257 /* Literal pool base register. */
260 /* True if we may need to perform branch splitting. */
261 bool split_branches_pending_p;
263 /* Some local-dynamic TLS symbol name. */
264 const char *some_ld_name;
267 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
269 #define cfun_frame_layout (cfun->machine->frame_layout)
270 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
271 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
272 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
273 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
275 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
278 static int s390_match_ccmode_set (rtx, enum machine_mode);
279 static int s390_branch_condition_mask (rtx);
280 static const char *s390_branch_condition_mnemonic (rtx, int);
281 static int check_mode (rtx, enum machine_mode *);
282 static int s390_short_displacement (rtx);
283 static int s390_decompose_address (rtx, struct s390_address *);
284 static rtx get_thread_pointer (void);
285 static rtx legitimize_tls_address (rtx, rtx);
286 static void print_shift_count_operand (FILE *, rtx);
287 static const char *get_some_local_dynamic_name (void);
288 static int get_some_local_dynamic_name_1 (rtx *, void *);
289 static int reg_used_in_mem_p (int, rtx);
290 static int addr_generation_dependency_p (rtx, rtx);
291 static int s390_split_branches (void);
292 static void annotate_constant_pool_refs (rtx *x);
293 static void find_constant_pool_ref (rtx, rtx *);
294 static void replace_constant_pool_ref (rtx *, rtx, rtx);
295 static rtx find_ltrel_base (rtx);
296 static void replace_ltrel_base (rtx *);
297 static void s390_optimize_prologue (void);
298 static int find_unused_clobbered_reg (void);
299 static void s390_frame_area (int *, int *);
300 static void s390_register_info (int []);
301 static void s390_frame_info (void);
302 static void s390_init_frame_layout (void);
303 static void s390_update_frame_layout (void);
304 static rtx save_fpr (rtx, int, int);
305 static rtx restore_fpr (rtx, int, int);
306 static rtx save_gprs (rtx, int, int, int);
307 static rtx restore_gprs (rtx, int, int, int);
308 static int s390_function_arg_size (enum machine_mode, tree);
309 static bool s390_function_arg_float (enum machine_mode, tree);
310 static struct machine_function * s390_init_machine_status (void);
312 /* Check whether integer displacement is in range. */
313 #define DISP_IN_RANGE(d) \
314 (TARGET_LONG_DISPLACEMENT? ((d) >= -524288 && (d) <= 524287) \
315 : ((d) >= 0 && (d) <= 4095))
317 /* Return true if SET either doesn't set the CC register, or else
318 the source and destination have matching CC modes and that
319 CC mode is at least as constrained as REQ_MODE. */
322 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
324 enum machine_mode set_mode;
326 if (GET_CODE (set) != SET)
329 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
332 set_mode = GET_MODE (SET_DEST (set));
346 if (req_mode != set_mode)
351 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
352 && req_mode != CCSRmode && req_mode != CCURmode)
358 if (req_mode != CCAmode)
366 return (GET_MODE (SET_SRC (set)) == set_mode);
369 /* Return true if every SET in INSN that sets the CC register
370 has source and destination with matching CC modes and that
371 CC mode is at least as constrained as REQ_MODE.
372 If REQ_MODE is VOIDmode, always return false. */
375 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
379 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
380 if (req_mode == VOIDmode)
383 if (GET_CODE (PATTERN (insn)) == SET)
384 return s390_match_ccmode_set (PATTERN (insn), req_mode);
386 if (GET_CODE (PATTERN (insn)) == PARALLEL)
387 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
389 rtx set = XVECEXP (PATTERN (insn), 0, i);
390 if (GET_CODE (set) == SET)
391 if (!s390_match_ccmode_set (set, req_mode))
398 /* If a test-under-mask instruction can be used to implement
399 (compare (and ... OP1) OP2), return the CC mode required
400 to do that. Otherwise, return VOIDmode.
401 MIXED is true if the instruction can distinguish between
402 CC1 and CC2 for mixed selected bits (TMxx), it is false
403 if the instruction cannot (TM). */
406 s390_tm_ccmode (rtx op1, rtx op2, int mixed)
410 /* ??? Fixme: should work on CONST_DOUBLE as well. */
411 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
414 /* Selected bits all zero: CC0. */
415 if (INTVAL (op2) == 0)
418 /* Selected bits all one: CC3. */
419 if (INTVAL (op2) == INTVAL (op1))
422 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
425 bit1 = exact_log2 (INTVAL (op2));
426 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
427 if (bit0 != -1 && bit1 != -1)
428 return bit0 > bit1 ? CCT1mode : CCT2mode;
434 /* Given a comparison code OP (EQ, NE, etc.) and the operands
435 OP0 and OP1 of a COMPARE, return the mode to be used for the
439 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
445 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
446 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
448 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
449 || GET_CODE (op1) == NEG)
450 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
453 if (GET_CODE (op0) == AND)
455 /* Check whether we can potentially do it via TM. */
456 enum machine_mode ccmode;
457 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
458 if (ccmode != VOIDmode)
460 /* Relax CCTmode to CCZmode to allow fall-back to AND
461 if that turns out to be beneficial. */
462 return ccmode == CCTmode ? CCZmode : ccmode;
466 if (register_operand (op0, HImode)
467 && GET_CODE (op1) == CONST_INT
468 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
470 if (register_operand (op0, QImode)
471 && GET_CODE (op1) == CONST_INT
472 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
481 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
482 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (XEXP (op0, 1)), 'K', "K"))
484 if (INTVAL (XEXP((op0), 1)) < 0)
497 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
498 && GET_CODE (op1) != CONST_INT)
504 if (GET_CODE (op0) == PLUS
505 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
508 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
509 && GET_CODE (op1) != CONST_INT)
515 if (GET_CODE (op0) == MINUS
516 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
519 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
520 && GET_CODE (op1) != CONST_INT)
529 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
530 that we can implement more efficiently. */
533 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
535 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
536 if ((*code == EQ || *code == NE)
537 && *op1 == const0_rtx
538 && GET_CODE (*op0) == ZERO_EXTRACT
539 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
540 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
541 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
543 rtx inner = XEXP (*op0, 0);
544 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
545 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
546 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
548 if (len > 0 && len < modesize
549 && pos >= 0 && pos + len <= modesize
550 && modesize <= HOST_BITS_PER_WIDE_INT)
552 unsigned HOST_WIDE_INT block;
553 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
554 block <<= modesize - pos - len;
556 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
557 gen_int_mode (block, GET_MODE (inner)));
561 /* Narrow AND of memory against immediate to enable TM. */
562 if ((*code == EQ || *code == NE)
563 && *op1 == const0_rtx
564 && GET_CODE (*op0) == AND
565 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
566 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
568 rtx inner = XEXP (*op0, 0);
569 rtx mask = XEXP (*op0, 1);
571 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
572 if (GET_CODE (inner) == SUBREG
573 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
574 && (GET_MODE_SIZE (GET_MODE (inner))
575 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
577 & GET_MODE_MASK (GET_MODE (inner))
578 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
580 inner = SUBREG_REG (inner);
582 /* Do not change volatile MEMs. */
583 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
585 int part = s390_single_part (XEXP (*op0, 1),
586 GET_MODE (inner), QImode, 0);
589 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
590 inner = adjust_address_nv (inner, QImode, part);
591 *op0 = gen_rtx_AND (QImode, inner, mask);
596 /* Narrow comparisons against 0xffff to HImode if possible. */
597 if ((*code == EQ || *code == NE)
598 && GET_CODE (*op1) == CONST_INT
599 && INTVAL (*op1) == 0xffff
600 && SCALAR_INT_MODE_P (GET_MODE (*op0))
601 && (nonzero_bits (*op0, GET_MODE (*op0))
602 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
604 *op0 = gen_lowpart (HImode, *op0);
609 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
610 if (GET_CODE (*op0) == UNSPEC
611 && XINT (*op0, 1) == UNSPEC_CMPINT
612 && XVECLEN (*op0, 0) == 1
613 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
614 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
615 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
616 && *op1 == const0_rtx)
618 enum rtx_code new_code = UNKNOWN;
621 case EQ: new_code = EQ; break;
622 case NE: new_code = NE; break;
623 case LT: new_code = LTU; break;
624 case GT: new_code = GTU; break;
625 case LE: new_code = LEU; break;
626 case GE: new_code = GEU; break;
630 if (new_code != UNKNOWN)
632 *op0 = XVECEXP (*op0, 0, 0);
638 /* Emit a compare instruction suitable to implement the comparison
639 OP0 CODE OP1. Return the correct condition RTL to be placed in
640 the IF_THEN_ELSE of the conditional branch testing the result. */
643 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
645 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
646 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
648 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
649 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
652 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
653 unconditional jump, else a conditional jump under condition COND. */
656 s390_emit_jump (rtx target, rtx cond)
660 target = gen_rtx_LABEL_REF (VOIDmode, target);
662 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
664 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
665 emit_jump_insn (insn);
668 /* Return nonzero if OP is a valid comparison operator
669 for a branch condition in mode MODE. */
672 s390_comparison (rtx op, enum machine_mode mode)
674 if (mode != VOIDmode && mode != GET_MODE (op))
677 if (!COMPARISON_P (op))
680 if (GET_CODE (XEXP (op, 0)) != REG
681 || REGNO (XEXP (op, 0)) != CC_REGNUM
682 || XEXP (op, 1) != const0_rtx)
685 return s390_branch_condition_mask (op) >= 0;
688 /* Return nonzero if OP is a valid comparison operator
689 for an ALC condition in mode MODE. */
692 s390_alc_comparison (rtx op, enum machine_mode mode)
694 if (mode != VOIDmode && mode != GET_MODE (op))
697 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
700 if (!COMPARISON_P (op))
703 if (GET_CODE (XEXP (op, 0)) != REG
704 || REGNO (XEXP (op, 0)) != CC_REGNUM
705 || XEXP (op, 1) != const0_rtx)
708 switch (GET_MODE (XEXP (op, 0)))
711 return GET_CODE (op) == LTU;
714 return GET_CODE (op) == LEU;
717 return GET_CODE (op) == GEU;
720 return GET_CODE (op) == GTU;
723 return GET_CODE (op) == LTU;
726 return GET_CODE (op) == UNGT;
729 return GET_CODE (op) == UNLT;
736 /* Return nonzero if OP is a valid comparison operator
737 for an SLB condition in mode MODE. */
740 s390_slb_comparison (rtx op, enum machine_mode mode)
742 if (mode != VOIDmode && mode != GET_MODE (op))
745 while (GET_CODE (op) == ZERO_EXTEND || GET_CODE (op) == SIGN_EXTEND)
748 if (!COMPARISON_P (op))
751 if (GET_CODE (XEXP (op, 0)) != REG
752 || REGNO (XEXP (op, 0)) != CC_REGNUM
753 || XEXP (op, 1) != const0_rtx)
756 switch (GET_MODE (XEXP (op, 0)))
759 return GET_CODE (op) == GEU;
762 return GET_CODE (op) == GTU;
765 return GET_CODE (op) == LTU;
768 return GET_CODE (op) == LEU;
771 return GET_CODE (op) == GEU;
774 return GET_CODE (op) == LE;
777 return GET_CODE (op) == GE;
784 /* Return branch condition mask to implement a branch
785 specified by CODE. Return -1 for invalid comparisons. */
788 s390_branch_condition_mask (rtx code)
790 const int CC0 = 1 << 3;
791 const int CC1 = 1 << 2;
792 const int CC2 = 1 << 1;
793 const int CC3 = 1 << 0;
795 if (GET_CODE (XEXP (code, 0)) != REG
796 || REGNO (XEXP (code, 0)) != CC_REGNUM
797 || XEXP (code, 1) != const0_rtx)
800 switch (GET_MODE (XEXP (code, 0)))
803 switch (GET_CODE (code))
806 case NE: return CC1 | CC2 | CC3;
812 switch (GET_CODE (code))
815 case NE: return CC0 | CC2 | CC3;
821 switch (GET_CODE (code))
824 case NE: return CC0 | CC1 | CC3;
830 switch (GET_CODE (code))
833 case NE: return CC0 | CC1 | CC2;
839 switch (GET_CODE (code))
841 case EQ: return CC0 | CC2;
842 case NE: return CC1 | CC3;
848 switch (GET_CODE (code))
850 case LTU: return CC2 | CC3; /* carry */
851 case GEU: return CC0 | CC1; /* no carry */
857 switch (GET_CODE (code))
859 case GTU: return CC0 | CC1; /* borrow */
860 case LEU: return CC2 | CC3; /* no borrow */
866 switch (GET_CODE (code))
868 case EQ: return CC0 | CC2;
869 case NE: return CC1 | CC3;
870 case LTU: return CC1;
871 case GTU: return CC3;
872 case LEU: return CC1 | CC2;
873 case GEU: return CC2 | CC3;
878 switch (GET_CODE (code))
881 case NE: return CC1 | CC2 | CC3;
882 case LTU: return CC1;
883 case GTU: return CC2;
884 case LEU: return CC0 | CC1;
885 case GEU: return CC0 | CC2;
891 switch (GET_CODE (code))
894 case NE: return CC2 | CC1 | CC3;
895 case LTU: return CC2;
896 case GTU: return CC1;
897 case LEU: return CC0 | CC2;
898 case GEU: return CC0 | CC1;
904 switch (GET_CODE (code))
907 case NE: return CC1 | CC2 | CC3;
908 case LT: return CC1 | CC3;
910 case LE: return CC0 | CC1 | CC3;
911 case GE: return CC0 | CC2;
917 switch (GET_CODE (code))
920 case NE: return CC1 | CC2 | CC3;
922 case GT: return CC2 | CC3;
923 case LE: return CC0 | CC1;
924 case GE: return CC0 | CC2 | CC3;
930 switch (GET_CODE (code))
933 case NE: return CC1 | CC2 | CC3;
936 case LE: return CC0 | CC1;
937 case GE: return CC0 | CC2;
938 case UNORDERED: return CC3;
939 case ORDERED: return CC0 | CC1 | CC2;
940 case UNEQ: return CC0 | CC3;
941 case UNLT: return CC1 | CC3;
942 case UNGT: return CC2 | CC3;
943 case UNLE: return CC0 | CC1 | CC3;
944 case UNGE: return CC0 | CC2 | CC3;
945 case LTGT: return CC1 | CC2;
951 switch (GET_CODE (code))
954 case NE: return CC2 | CC1 | CC3;
957 case LE: return CC0 | CC2;
958 case GE: return CC0 | CC1;
959 case UNORDERED: return CC3;
960 case ORDERED: return CC0 | CC2 | CC1;
961 case UNEQ: return CC0 | CC3;
962 case UNLT: return CC2 | CC3;
963 case UNGT: return CC1 | CC3;
964 case UNLE: return CC0 | CC2 | CC3;
965 case UNGE: return CC0 | CC1 | CC3;
966 case LTGT: return CC2 | CC1;
976 /* If INV is false, return assembler mnemonic string to implement
977 a branch specified by CODE. If INV is true, return mnemonic
978 for the corresponding inverted branch. */
981 s390_branch_condition_mnemonic (rtx code, int inv)
983 static const char *const mnemonic[16] =
985 NULL, "o", "h", "nle",
986 "l", "nhe", "lh", "ne",
987 "e", "nlh", "he", "nl",
988 "le", "nh", "no", NULL
991 int mask = s390_branch_condition_mask (code);
992 gcc_assert (mask >= 0);
997 if (mask < 1 || mask > 14)
1000 return mnemonic[mask];
1003 /* Return the part of op which has a value different from def.
1004 The size of the part is determined by mode.
1005 Use this function only if you already know that op really
1006 contains such a part. */
1008 unsigned HOST_WIDE_INT
1009 s390_extract_part (rtx op, enum machine_mode mode, int def)
1011 unsigned HOST_WIDE_INT value = 0;
1012 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1013 int part_bits = GET_MODE_BITSIZE (mode);
1014 unsigned HOST_WIDE_INT part_mask = (1 << part_bits) - 1;
1017 for (i = 0; i < max_parts; i++)
1020 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1022 value >>= part_bits;
1024 if ((value & part_mask) != (def & part_mask))
1025 return value & part_mask;
1031 /* If OP is an integer constant of mode MODE with exactly one
1032 part of mode PART_MODE unequal to DEF, return the number of that
1033 part. Otherwise, return -1. */
1036 s390_single_part (rtx op,
1037 enum machine_mode mode,
1038 enum machine_mode part_mode,
1041 unsigned HOST_WIDE_INT value = 0;
1042 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1043 unsigned HOST_WIDE_INT part_mask = (1 << GET_MODE_BITSIZE (part_mode)) - 1;
1046 if (GET_CODE (op) != CONST_INT)
1049 for (i = 0; i < n_parts; i++)
1052 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1054 value >>= GET_MODE_BITSIZE (part_mode);
1056 if ((value & part_mask) != (def & part_mask))
1064 return part == -1 ? -1 : n_parts - 1 - part;
1067 /* Check whether we can (and want to) split a double-word
1068 move in mode MODE from SRC to DST into two single-word
1069 moves, moving the subword FIRST_SUBWORD first. */
1072 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1074 /* Floating point registers cannot be split. */
1075 if (FP_REG_P (src) || FP_REG_P (dst))
1078 /* We don't need to split if operands are directly accessible. */
1079 if (s_operand (src, mode) || s_operand (dst, mode))
1082 /* Non-offsettable memory references cannot be split. */
1083 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1084 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1087 /* Moving the first subword must not clobber a register
1088 needed to move the second subword. */
1089 if (register_operand (dst, mode))
1091 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1092 if (reg_overlap_mentioned_p (subreg, src))
1099 /* Check whether the address of memory reference MEM2 equals exactly
1100 the address of memory reference MEM1 plus DELTA. Return true if
1101 we can prove this to be the case, false otherwise. */
1104 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1106 rtx addr1, addr2, addr_delta;
1108 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1111 addr1 = XEXP (mem1, 0);
1112 addr2 = XEXP (mem2, 0);
1114 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1115 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1121 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1124 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1127 enum machine_mode wmode = mode;
1128 rtx dst = operands[0];
1129 rtx src1 = operands[1];
1130 rtx src2 = operands[2];
1133 /* If we cannot handle the operation directly, use a temp register. */
1134 if (!s390_logical_operator_ok_p (operands))
1135 dst = gen_reg_rtx (mode);
1137 /* QImode and HImode patterns make sense only if we have a destination
1138 in memory. Otherwise perform the operation in SImode. */
1139 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1142 /* Widen operands if required. */
1145 if (GET_CODE (dst) == SUBREG
1146 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1148 else if (REG_P (dst))
1149 dst = gen_rtx_SUBREG (wmode, dst, 0);
1151 dst = gen_reg_rtx (wmode);
1153 if (GET_CODE (src1) == SUBREG
1154 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1156 else if (GET_MODE (src1) != VOIDmode)
1157 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1159 if (GET_CODE (src2) == SUBREG
1160 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1162 else if (GET_MODE (src2) != VOIDmode)
1163 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1166 /* Emit the instruction. */
1167 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1168 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1169 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1171 /* Fix up the destination if needed. */
1172 if (dst != operands[0])
1173 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1176 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1179 s390_logical_operator_ok_p (rtx *operands)
1181 /* If the destination operand is in memory, it needs to coincide
1182 with one of the source operands. After reload, it has to be
1183 the first source operand. */
1184 if (GET_CODE (operands[0]) == MEM)
1185 return rtx_equal_p (operands[0], operands[1])
1186 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1191 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1192 operand IMMOP to switch from SS to SI type instructions. */
1195 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1197 int def = code == AND ? -1 : 0;
1201 gcc_assert (GET_CODE (*memop) == MEM);
1202 gcc_assert (!MEM_VOLATILE_P (*memop));
1204 mask = s390_extract_part (*immop, QImode, def);
1205 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1206 gcc_assert (part >= 0);
1208 *memop = adjust_address (*memop, QImode, part);
1209 *immop = gen_int_mode (mask, QImode);
1213 /* Change optimizations to be performed, depending on the
1216 LEVEL is the optimization level specified; 2 if `-O2' is
1217 specified, 1 if `-O' is specified, and 0 if neither is specified.
1219 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1222 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1224 /* ??? There are apparently still problems with -fcaller-saves. */
1225 flag_caller_saves = 0;
1227 /* By default, always emit DWARF-2 unwind info. This allows debugging
1228 without maintaining a stack frame back-chain. */
1229 flag_asynchronous_unwind_tables = 1;
1233 override_options (void)
1238 const char *const name; /* processor name or nickname. */
1239 const enum processor_type processor;
1240 const enum processor_flags flags;
1242 const processor_alias_table[] =
1244 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1245 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1246 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1247 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1248 | PF_LONG_DISPLACEMENT},
1251 int const pta_size = ARRAY_SIZE (processor_alias_table);
1253 /* Acquire a unique set number for our register saves and restores. */
1254 s390_sr_alias_set = new_alias_set ();
1256 /* Set up function hooks. */
1257 init_machine_status = s390_init_machine_status;
1259 /* Architecture mode defaults according to ABI. */
1260 if (!(target_flags_explicit & MASK_ZARCH))
1263 target_flags |= MASK_ZARCH;
1265 target_flags &= ~MASK_ZARCH;
1268 /* Determine processor architectural level. */
1269 if (!s390_arch_string)
1270 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1272 for (i = 0; i < pta_size; i++)
1273 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
1275 s390_arch = processor_alias_table[i].processor;
1276 s390_arch_flags = processor_alias_table[i].flags;
1280 error ("Unknown cpu used in -march=%s.", s390_arch_string);
1282 /* Determine processor to tune for. */
1283 if (!s390_tune_string)
1285 s390_tune = s390_arch;
1286 s390_tune_flags = s390_arch_flags;
1287 s390_tune_string = s390_arch_string;
1291 for (i = 0; i < pta_size; i++)
1292 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
1294 s390_tune = processor_alias_table[i].processor;
1295 s390_tune_flags = processor_alias_table[i].flags;
1299 error ("Unknown cpu used in -mtune=%s.", s390_tune_string);
1302 /* Sanity checks. */
1303 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1304 error ("z/Architecture mode not supported on %s.", s390_arch_string);
1305 if (TARGET_64BIT && !TARGET_ZARCH)
1306 error ("64-bit ABI not supported in ESA/390 mode.");
1308 if (s390_warn_framesize_string)
1310 if (sscanf (s390_warn_framesize_string, HOST_WIDE_INT_PRINT_DEC,
1311 &s390_warn_framesize) != 1)
1312 error ("invalid value for -mwarn-framesize");
1315 if (s390_warn_dynamicstack_string)
1316 s390_warn_dynamicstack_p = 1;
1318 if (s390_stack_size_string)
1320 if (sscanf (s390_stack_size_string, HOST_WIDE_INT_PRINT_DEC,
1321 &s390_stack_size) != 1)
1322 error ("invalid value for -mstack-size");
1324 if (exact_log2 (s390_stack_size) == -1)
1325 error ("stack size must be an exact power of 2");
1327 if (s390_stack_guard_string)
1329 if (sscanf (s390_stack_guard_string, HOST_WIDE_INT_PRINT_DEC,
1330 &s390_stack_guard) != 1)
1331 error ("invalid value for -mstack-guard");
1333 if (s390_stack_guard >= s390_stack_size)
1334 error ("stack size must be greater than the stack guard value");
1336 if (exact_log2 (s390_stack_guard) == -1)
1337 error ("stack guard value must be an exact power of 2");
1340 error ("-mstack-size implies use of -mstack-guard");
1343 if (s390_stack_guard_string && !s390_stack_size_string)
1344 error ("-mstack-guard implies use of -mstack-size");
1347 /* Map for smallest class containing reg regno. */
1349 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1350 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1351 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1352 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1353 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1354 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1355 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1356 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1357 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1358 ADDR_REGS, NO_REGS, ADDR_REGS, ADDR_REGS
1361 /* Return attribute type of insn. */
1363 static enum attr_type
1364 s390_safe_attr_type (rtx insn)
1366 if (recog_memoized (insn) >= 0)
1367 return get_attr_type (insn);
1372 /* Return true if OP a (const_int 0) operand.
1373 OP is the current operation.
1374 MODE is the current operation mode. */
1377 const0_operand (register rtx op, enum machine_mode mode)
1379 return op == CONST0_RTX (mode);
1382 /* Return true if OP is constant.
1383 OP is the current operation.
1384 MODE is the current operation mode. */
1387 consttable_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1389 return CONSTANT_P (op);
1392 /* Return true if the mode of operand OP matches MODE.
1393 If MODE is set to VOIDmode, set it to the mode of OP. */
1396 check_mode (register rtx op, enum machine_mode *mode)
1398 if (*mode == VOIDmode)
1399 *mode = GET_MODE (op);
1402 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1408 /* Return true if OP a valid operand for the LARL instruction.
1409 OP is the current operation.
1410 MODE is the current operation mode. */
1413 larl_operand (register rtx op, enum machine_mode mode)
1415 if (! check_mode (op, &mode))
1418 /* Allow labels and local symbols. */
1419 if (GET_CODE (op) == LABEL_REF)
1421 if (GET_CODE (op) == SYMBOL_REF)
1422 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1423 && SYMBOL_REF_TLS_MODEL (op) == 0
1424 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1426 /* Everything else must have a CONST, so strip it. */
1427 if (GET_CODE (op) != CONST)
1431 /* Allow adding *even* in-range constants. */
1432 if (GET_CODE (op) == PLUS)
1434 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1435 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1437 #if HOST_BITS_PER_WIDE_INT > 32
1438 if (INTVAL (XEXP (op, 1)) >= (HOST_WIDE_INT)1 << 32
1439 || INTVAL (XEXP (op, 1)) < -((HOST_WIDE_INT)1 << 32))
1445 /* Labels and local symbols allowed here as well. */
1446 if (GET_CODE (op) == LABEL_REF)
1448 if (GET_CODE (op) == SYMBOL_REF)
1449 return ((SYMBOL_REF_FLAGS (op) & SYMBOL_FLAG_ALIGN1) == 0
1450 && SYMBOL_REF_TLS_MODEL (op) == 0
1451 && (!flag_pic || SYMBOL_REF_LOCAL_P (op)));
1453 /* Now we must have a @GOTENT offset or @PLT stub
1454 or an @INDNTPOFF TLS offset. */
1455 if (GET_CODE (op) == UNSPEC
1456 && XINT (op, 1) == UNSPEC_GOTENT)
1458 if (GET_CODE (op) == UNSPEC
1459 && XINT (op, 1) == UNSPEC_PLT)
1461 if (GET_CODE (op) == UNSPEC
1462 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1468 /* Return true if OP is a valid S-type operand.
1469 OP is the current operation.
1470 MODE is the current operation mode. */
1473 s_operand (rtx op, enum machine_mode mode)
1475 struct s390_address addr;
1477 /* Call general_operand first, so that we don't have to
1478 check for many special cases. */
1479 if (!general_operand (op, mode))
1482 /* Just like memory_operand, allow (subreg (mem ...))
1484 if (reload_completed
1485 && GET_CODE (op) == SUBREG
1486 && GET_CODE (SUBREG_REG (op)) == MEM)
1487 op = SUBREG_REG (op);
1489 if (GET_CODE (op) != MEM)
1491 if (!s390_decompose_address (XEXP (op, 0), &addr))
1499 /* Return true if OP is a memory operand pointing to the
1500 literal pool, or an immediate operand. */
1503 s390_pool_operand (rtx op)
1505 struct s390_address addr;
1507 /* Just like memory_operand, allow (subreg (mem ...))
1509 if (reload_completed
1510 && GET_CODE (op) == SUBREG
1511 && GET_CODE (SUBREG_REG (op)) == MEM)
1512 op = SUBREG_REG (op);
1514 switch (GET_CODE (op))
1521 if (!s390_decompose_address (XEXP (op, 0), &addr))
1523 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1525 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1534 /* Return true if OP a valid shift count operand.
1535 OP is the current operation.
1536 MODE is the current operation mode. */
1539 shift_count_operand (rtx op, enum machine_mode mode)
1541 HOST_WIDE_INT offset = 0;
1543 if (! check_mode (op, &mode))
1546 /* We can have an integer constant, an address register,
1547 or a sum of the two. Note that reload already checks
1548 that any register present is an address register, so
1549 we just check for any register here. */
1550 if (GET_CODE (op) == CONST_INT)
1552 offset = INTVAL (op);
1555 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1557 offset = INTVAL (XEXP (op, 1));
1560 while (op && GET_CODE (op) == SUBREG)
1561 op = SUBREG_REG (op);
1562 if (op && GET_CODE (op) != REG)
1565 /* Unfortunately we have to reject constants that are invalid
1566 for an address, or else reload will get confused. */
1567 if (!DISP_IN_RANGE (offset))
1573 /* Return true if DISP is a valid short displacement. */
1576 s390_short_displacement (rtx disp)
1578 /* No displacement is OK. */
1582 /* Integer displacement in range. */
1583 if (GET_CODE (disp) == CONST_INT)
1584 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1586 /* GOT offset is not OK, the GOT can be large. */
1587 if (GET_CODE (disp) == CONST
1588 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1589 && XINT (XEXP (disp, 0), 1) == UNSPEC_GOT)
1592 /* All other symbolic constants are literal pool references,
1593 which are OK as the literal pool must be small. */
1594 if (GET_CODE (disp) == CONST)
1600 /* Return true if OP is a valid operand for a C constraint. */
1603 s390_extra_constraint_str (rtx op, int c, const char * str)
1605 struct s390_address addr;
1610 /* Check for offsettable variants of memory constraints. */
1613 /* Only accept non-volatile MEMs. */
1614 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1617 if ((reload_completed || reload_in_progress)
1618 ? !offsettable_memref_p (op)
1619 : !offsettable_nonstrict_memref_p (op))
1628 if (GET_CODE (op) != MEM)
1630 if (!s390_decompose_address (XEXP (op, 0), &addr))
1635 if (TARGET_LONG_DISPLACEMENT)
1637 if (!s390_short_displacement (addr.disp))
1643 if (GET_CODE (op) != MEM)
1646 if (TARGET_LONG_DISPLACEMENT)
1648 if (!s390_decompose_address (XEXP (op, 0), &addr))
1650 if (!s390_short_displacement (addr.disp))
1656 if (!TARGET_LONG_DISPLACEMENT)
1658 if (GET_CODE (op) != MEM)
1660 if (!s390_decompose_address (XEXP (op, 0), &addr))
1664 if (s390_short_displacement (addr.disp))
1669 if (!TARGET_LONG_DISPLACEMENT)
1671 if (GET_CODE (op) != MEM)
1673 /* Any invalid address here will be fixed up by reload,
1674 so accept it for the most generic constraint. */
1675 if (s390_decompose_address (XEXP (op, 0), &addr)
1676 && s390_short_displacement (addr.disp))
1681 if (TARGET_LONG_DISPLACEMENT)
1683 if (!s390_decompose_address (op, &addr))
1685 if (!s390_short_displacement (addr.disp))
1691 if (!TARGET_LONG_DISPLACEMENT)
1693 /* Any invalid address here will be fixed up by reload,
1694 so accept it for the most generic constraint. */
1695 if (s390_decompose_address (op, &addr)
1696 && s390_short_displacement (addr.disp))
1701 return shift_count_operand (op, VOIDmode);
1710 /* Return true if VALUE matches the constraint STR. */
1713 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1717 enum machine_mode mode, part_mode;
1719 int part, part_goal;
1727 return (unsigned int)value < 256;
1730 return (unsigned int)value < 4096;
1733 return value >= -32768 && value < 32768;
1736 return (TARGET_LONG_DISPLACEMENT ?
1737 (value >= -524288 && value <= 524287)
1738 : (value >= 0 && value <= 4095));
1740 return value == 2147483647;
1746 part_goal = str[1] - '0';
1750 case 'H': part_mode = HImode; break;
1751 case 'Q': part_mode = QImode; break;
1757 case 'H': mode = HImode; break;
1758 case 'S': mode = SImode; break;
1759 case 'D': mode = DImode; break;
1765 case '0': def = 0; break;
1766 case 'F': def = -1; break;
1770 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
1773 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
1776 if (part_goal != -1 && part_goal != part)
1788 /* Compute a (partial) cost for rtx X. Return true if the complete
1789 cost has been computed, and false if subexpressions should be
1790 scanned. In either case, *TOTAL contains the cost result. */
1793 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
1798 if (GET_CODE (XEXP (x, 0)) == MINUS
1799 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1806 /* Force_const_mem does not work out of reload, because the
1807 saveable_obstack is set to reload_obstack, which does not
1808 live long enough. Because of this we cannot use force_const_mem
1809 in addsi3. This leads to problems with gen_add2_insn with a
1810 constant greater than a short. Because of that we give an
1811 addition of greater constants a cost of 3 (reload1.c 10096). */
1812 /* ??? saveable_obstack no longer exists. */
1813 if (outer_code == PLUS
1814 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1815 *total = COSTS_N_INSNS (3);
1836 *total = COSTS_N_INSNS (1);
1840 if (GET_MODE (XEXP (x, 0)) == DImode)
1841 *total = COSTS_N_INSNS (40);
1843 *total = COSTS_N_INSNS (7);
1850 *total = COSTS_N_INSNS (33);
1858 /* Return the cost of an address rtx ADDR. */
1861 s390_address_cost (rtx addr)
1863 struct s390_address ad;
1864 if (!s390_decompose_address (addr, &ad))
1867 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1870 /* Return true if OP is a valid operand for the BRAS instruction.
1871 OP is the current operation.
1872 MODE is the current operation mode. */
1875 bras_sym_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1877 register enum rtx_code code = GET_CODE (op);
1879 /* Allow SYMBOL_REFs. */
1880 if (code == SYMBOL_REF)
1883 /* Allow @PLT stubs. */
1885 && GET_CODE (XEXP (op, 0)) == UNSPEC
1886 && XINT (XEXP (op, 0), 1) == UNSPEC_PLT)
1891 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1892 otherwise return 0. */
1895 tls_symbolic_operand (register rtx op)
1897 if (GET_CODE (op) != SYMBOL_REF)
1899 return SYMBOL_REF_TLS_MODEL (op);
1902 /* Return true if OP is a load multiple operation. It is known to be a
1903 PARALLEL and the first section will be tested.
1904 OP is the current operation.
1905 MODE is the current operation mode. */
1908 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1910 enum machine_mode elt_mode;
1911 int count = XVECLEN (op, 0);
1912 unsigned int dest_regno;
1917 /* Perform a quick check so we don't blow up below. */
1919 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1920 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1921 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1924 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1925 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1926 elt_mode = GET_MODE (SET_DEST (XVECEXP (op, 0, 0)));
1928 /* Check, is base, or base + displacement. */
1930 if (GET_CODE (src_addr) == REG)
1932 else if (GET_CODE (src_addr) == PLUS
1933 && GET_CODE (XEXP (src_addr, 0)) == REG
1934 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1936 off = INTVAL (XEXP (src_addr, 1));
1937 src_addr = XEXP (src_addr, 0);
1942 for (i = 1; i < count; i++)
1944 rtx elt = XVECEXP (op, 0, i);
1946 if (GET_CODE (elt) != SET
1947 || GET_CODE (SET_DEST (elt)) != REG
1948 || GET_MODE (SET_DEST (elt)) != elt_mode
1949 || REGNO (SET_DEST (elt)) != dest_regno + i
1950 || GET_CODE (SET_SRC (elt)) != MEM
1951 || GET_MODE (SET_SRC (elt)) != elt_mode
1952 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1953 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1954 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1955 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1956 != off + i * GET_MODE_SIZE (elt_mode))
1963 /* Return true if OP is a store multiple operation. It is known to be a
1964 PARALLEL and the first section will be tested.
1965 OP is the current operation.
1966 MODE is the current operation mode. */
1969 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1971 enum machine_mode elt_mode;
1972 int count = XVECLEN (op, 0);
1973 unsigned int src_regno;
1977 /* Perform a quick check so we don't blow up below. */
1979 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1980 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1981 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1984 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1985 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1986 elt_mode = GET_MODE (SET_SRC (XVECEXP (op, 0, 0)));
1988 /* Check, is base, or base + displacement. */
1990 if (GET_CODE (dest_addr) == REG)
1992 else if (GET_CODE (dest_addr) == PLUS
1993 && GET_CODE (XEXP (dest_addr, 0)) == REG
1994 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1996 off = INTVAL (XEXP (dest_addr, 1));
1997 dest_addr = XEXP (dest_addr, 0);
2002 for (i = 1; i < count; i++)
2004 rtx elt = XVECEXP (op, 0, i);
2006 if (GET_CODE (elt) != SET
2007 || GET_CODE (SET_SRC (elt)) != REG
2008 || GET_MODE (SET_SRC (elt)) != elt_mode
2009 || REGNO (SET_SRC (elt)) != src_regno + i
2010 || GET_CODE (SET_DEST (elt)) != MEM
2011 || GET_MODE (SET_DEST (elt)) != elt_mode
2012 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
2013 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
2014 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
2015 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
2016 != off + i * GET_MODE_SIZE (elt_mode))
2023 /* Return true if OP contains a symbol reference */
2026 symbolic_reference_mentioned_p (rtx op)
2028 register const char *fmt;
2031 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2034 fmt = GET_RTX_FORMAT (GET_CODE (op));
2035 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2041 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2042 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2046 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2053 /* Return true if OP contains a reference to a thread-local symbol. */
2056 tls_symbolic_reference_mentioned_p (rtx op)
2058 register const char *fmt;
2061 if (GET_CODE (op) == SYMBOL_REF)
2062 return tls_symbolic_operand (op);
2064 fmt = GET_RTX_FORMAT (GET_CODE (op));
2065 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2071 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2072 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2076 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2084 /* Return true if OP is a legitimate general operand when
2085 generating PIC code. It is given that flag_pic is on
2086 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2089 legitimate_pic_operand_p (register rtx op)
2091 /* Accept all non-symbolic constants. */
2092 if (!SYMBOLIC_CONST (op))
2095 /* Reject everything else; must be handled
2096 via emit_symbolic_move. */
2100 /* Returns true if the constant value OP is a legitimate general operand.
2101 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2104 legitimate_constant_p (register rtx op)
2106 /* Accept all non-symbolic constants. */
2107 if (!SYMBOLIC_CONST (op))
2110 /* Accept immediate LARL operands. */
2111 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2114 /* Thread-local symbols are never legal constants. This is
2115 so that emit_call knows that computing such addresses
2116 might require a function call. */
2117 if (TLS_SYMBOLIC_CONST (op))
2120 /* In the PIC case, symbolic constants must *not* be
2121 forced into the literal pool. We accept them here,
2122 so that they will be handled by emit_symbolic_move. */
2126 /* All remaining non-PIC symbolic constants are
2127 forced into the literal pool. */
2131 /* Determine if it's legal to put X into the constant pool. This
2132 is not possible if X contains the address of a symbol that is
2133 not constant (TLS) or not known at final link time (PIC). */
2136 s390_cannot_force_const_mem (rtx x)
2138 switch (GET_CODE (x))
2142 /* Accept all non-symbolic constants. */
2146 /* Labels are OK iff we are non-PIC. */
2147 return flag_pic != 0;
2150 /* 'Naked' TLS symbol references are never OK,
2151 non-TLS symbols are OK iff we are non-PIC. */
2152 if (tls_symbolic_operand (x))
2155 return flag_pic != 0;
2158 return s390_cannot_force_const_mem (XEXP (x, 0));
2161 return s390_cannot_force_const_mem (XEXP (x, 0))
2162 || s390_cannot_force_const_mem (XEXP (x, 1));
2165 switch (XINT (x, 1))
2167 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2168 case UNSPEC_LTREL_OFFSET:
2176 case UNSPEC_GOTNTPOFF:
2177 case UNSPEC_INDNTPOFF:
2180 /* If the literal pool shares the code section, be put
2181 execute template placeholders into the pool as well. */
2183 return TARGET_CPU_ZARCH;
2195 /* Returns true if the constant value OP is a legitimate general
2196 operand during and after reload. The difference to
2197 legitimate_constant_p is that this function will not accept
2198 a constant that would need to be forced to the literal pool
2199 before it can be used as operand. */
2202 legitimate_reload_constant_p (register rtx op)
2204 /* Accept la(y) operands. */
2205 if (GET_CODE (op) == CONST_INT
2206 && DISP_IN_RANGE (INTVAL (op)))
2209 /* Accept l(g)hi operands. */
2210 if (GET_CODE (op) == CONST_INT
2211 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', "K"))
2214 /* Accept lliXX operands. */
2216 && s390_single_part (op, DImode, HImode, 0) >= 0)
2219 /* Accept larl operands. */
2220 if (TARGET_CPU_ZARCH
2221 && larl_operand (op, VOIDmode))
2224 /* Everything else cannot be handled without reload. */
2228 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2229 return the class of reg to actually use. */
2232 s390_preferred_reload_class (rtx op, enum reg_class class)
2234 switch (GET_CODE (op))
2236 /* Constants we cannot reload must be forced into the
2241 if (legitimate_reload_constant_p (op))
2246 /* If a symbolic constant or a PLUS is reloaded,
2247 it is most likely being used as an address, so
2248 prefer ADDR_REGS. If 'class' is not a superset
2249 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2254 if (reg_class_subset_p (ADDR_REGS, class))
2266 /* Return the register class of a scratch register needed to
2267 load IN into a register of class CLASS in MODE.
2269 We need a temporary when loading a PLUS expression which
2270 is not a legitimate operand of the LOAD ADDRESS instruction. */
2273 s390_secondary_input_reload_class (enum reg_class class ATTRIBUTE_UNUSED,
2274 enum machine_mode mode, rtx in)
2276 if (s390_plus_operand (in, mode))
2282 /* Return the register class of a scratch register needed to
2283 store a register of class CLASS in MODE into OUT:
2285 We need a temporary when storing a double-word to a
2286 non-offsettable memory address. */
2289 s390_secondary_output_reload_class (enum reg_class class,
2290 enum machine_mode mode, rtx out)
2292 if ((TARGET_64BIT ? mode == TImode
2293 : (mode == DImode || mode == DFmode))
2294 && reg_classes_intersect_p (GENERAL_REGS, class)
2295 && GET_CODE (out) == MEM
2296 && !offsettable_memref_p (out)
2297 && !s_operand (out, VOIDmode))
2303 /* Return true if OP is a PLUS that is not a legitimate
2304 operand for the LA instruction.
2305 OP is the current operation.
2306 MODE is the current operation mode. */
2309 s390_plus_operand (register rtx op, enum machine_mode mode)
2311 if (!check_mode (op, &mode) || mode != Pmode)
2314 if (GET_CODE (op) != PLUS)
2317 if (legitimate_la_operand_p (op))
2323 /* Generate code to load SRC, which is PLUS that is not a
2324 legitimate operand for the LA instruction, into TARGET.
2325 SCRATCH may be used as scratch register. */
2328 s390_expand_plus_operand (register rtx target, register rtx src,
2329 register rtx scratch)
2332 struct s390_address ad;
2334 /* src must be a PLUS; get its two operands. */
2335 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
2338 /* Check if any of the two operands is already scheduled
2339 for replacement by reload. This can happen e.g. when
2340 float registers occur in an address. */
2341 sum1 = find_replacement (&XEXP (src, 0));
2342 sum2 = find_replacement (&XEXP (src, 1));
2343 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2345 /* If the address is already strictly valid, there's nothing to do. */
2346 if (!s390_decompose_address (src, &ad)
2347 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2348 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2350 /* Otherwise, one of the operands cannot be an address register;
2351 we reload its value into the scratch register. */
2352 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2354 emit_move_insn (scratch, sum1);
2357 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2359 emit_move_insn (scratch, sum2);
2363 /* According to the way these invalid addresses are generated
2364 in reload.c, it should never happen (at least on s390) that
2365 *neither* of the PLUS components, after find_replacements
2366 was applied, is an address register. */
2367 if (sum1 == scratch && sum2 == scratch)
2373 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2376 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2377 is only ever performed on addresses, so we can mark the
2378 sum as legitimate for LA in any case. */
2379 s390_load_address (target, src);
2383 /* Decompose a RTL expression ADDR for a memory address into
2384 its components, returned in OUT.
2386 Returns 0 if ADDR is not a valid memory address, nonzero
2387 otherwise. If OUT is NULL, don't return the components,
2388 but check for validity only.
2390 Note: Only addresses in canonical form are recognized.
2391 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
2392 canonical form so that they will be recognized. */
2395 s390_decompose_address (register rtx addr, struct s390_address *out)
2397 HOST_WIDE_INT offset = 0;
2398 rtx base = NULL_RTX;
2399 rtx indx = NULL_RTX;
2400 rtx disp = NULL_RTX;
2402 int pointer = FALSE;
2403 int base_ptr = FALSE;
2404 int indx_ptr = FALSE;
2406 /* Decompose address into base + index + displacement. */
2408 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
2411 else if (GET_CODE (addr) == PLUS)
2413 rtx op0 = XEXP (addr, 0);
2414 rtx op1 = XEXP (addr, 1);
2415 enum rtx_code code0 = GET_CODE (op0);
2416 enum rtx_code code1 = GET_CODE (op1);
2418 if (code0 == REG || code0 == UNSPEC)
2420 if (code1 == REG || code1 == UNSPEC)
2422 indx = op0; /* index + base */
2428 base = op0; /* base + displacement */
2433 else if (code0 == PLUS)
2435 indx = XEXP (op0, 0); /* index + base + disp */
2436 base = XEXP (op0, 1);
2447 disp = addr; /* displacement */
2449 /* Extract integer part of displacement. */
2453 if (GET_CODE (disp) == CONST_INT)
2455 offset = INTVAL (disp);
2458 else if (GET_CODE (disp) == CONST
2459 && GET_CODE (XEXP (disp, 0)) == PLUS
2460 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2462 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2463 disp = XEXP (XEXP (disp, 0), 0);
2467 /* Strip off CONST here to avoid special case tests later. */
2468 if (disp && GET_CODE (disp) == CONST)
2469 disp = XEXP (disp, 0);
2471 /* We can convert literal pool addresses to
2472 displacements by basing them off the base register. */
2473 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
2475 /* Either base or index must be free to hold the base register. */
2477 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2479 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2483 /* Mark up the displacement. */
2484 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
2485 UNSPEC_LTREL_OFFSET);
2488 /* Validate base register. */
2491 if (GET_CODE (base) == UNSPEC)
2492 switch (XINT (base, 1))
2496 disp = gen_rtx_UNSPEC (Pmode,
2497 gen_rtvec (1, XVECEXP (base, 0, 0)),
2498 UNSPEC_LTREL_OFFSET);
2502 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2505 case UNSPEC_LTREL_BASE:
2506 base = gen_rtx_REG (Pmode, BASE_REGNUM);
2513 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
2516 if (REGNO (base) == BASE_REGNUM
2517 || REGNO (base) == STACK_POINTER_REGNUM
2518 || REGNO (base) == FRAME_POINTER_REGNUM
2519 || ((reload_completed || reload_in_progress)
2520 && frame_pointer_needed
2521 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
2522 || REGNO (base) == ARG_POINTER_REGNUM
2524 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
2525 pointer = base_ptr = TRUE;
2528 /* Validate index register. */
2531 if (GET_CODE (indx) == UNSPEC)
2532 switch (XINT (indx, 1))
2536 disp = gen_rtx_UNSPEC (Pmode,
2537 gen_rtvec (1, XVECEXP (indx, 0, 0)),
2538 UNSPEC_LTREL_OFFSET);
2542 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2545 case UNSPEC_LTREL_BASE:
2546 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
2553 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
2556 if (REGNO (indx) == BASE_REGNUM
2557 || REGNO (indx) == STACK_POINTER_REGNUM
2558 || REGNO (indx) == FRAME_POINTER_REGNUM
2559 || ((reload_completed || reload_in_progress)
2560 && frame_pointer_needed
2561 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
2562 || REGNO (indx) == ARG_POINTER_REGNUM
2564 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
2565 pointer = indx_ptr = TRUE;
2568 /* Prefer to use pointer as base, not index. */
2569 if (base && indx && !base_ptr
2570 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
2577 /* Validate displacement. */
2580 /* If the argument pointer or the return address pointer are involved,
2581 the displacement will change later anyway as the virtual registers get
2582 eliminated. This could make a valid displacement invalid, but it is
2583 more likely to make an invalid displacement valid, because we sometimes
2584 access the register save area via negative offsets to one of those
2586 Thus we don't check the displacement for validity here. If after
2587 elimination the displacement turns out to be invalid after all,
2588 this is fixed up by reload in any case. */
2589 if (base != arg_pointer_rtx
2590 && indx != arg_pointer_rtx
2591 && base != return_address_pointer_rtx
2592 && indx != return_address_pointer_rtx)
2593 if (!DISP_IN_RANGE (offset))
2598 /* All the special cases are pointers. */
2601 /* In the small-PIC case, the linker converts @GOT
2602 and @GOTNTPOFF offsets to possible displacements. */
2603 if (GET_CODE (disp) == UNSPEC
2604 && (XINT (disp, 1) == UNSPEC_GOT
2605 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
2612 /* Accept chunkified literal pool symbol references. */
2613 else if (GET_CODE (disp) == MINUS
2614 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
2615 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
2620 /* Accept literal pool references. */
2621 else if (GET_CODE (disp) == UNSPEC
2622 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
2624 orig_disp = gen_rtx_CONST (Pmode, disp);
2627 /* If we have an offset, make sure it does not
2628 exceed the size of the constant pool entry. */
2629 rtx sym = XVECEXP (disp, 0, 0);
2630 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
2633 orig_disp = plus_constant (orig_disp, offset);
2648 out->disp = orig_disp;
2649 out->pointer = pointer;
2655 /* Return nonzero if ADDR is a valid memory address.
2656 STRICT specifies whether strict register checking applies. */
2659 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2660 register rtx addr, int strict)
2662 struct s390_address ad;
2663 if (!s390_decompose_address (addr, &ad))
2668 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2670 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2675 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2677 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2684 /* Return 1 if OP is a valid operand for the LA instruction.
2685 In 31-bit, we need to prove that the result is used as an
2686 address, as LA performs only a 31-bit addition. */
2689 legitimate_la_operand_p (register rtx op)
2691 struct s390_address addr;
2692 if (!s390_decompose_address (op, &addr))
2695 if (TARGET_64BIT || addr.pointer)
2701 /* Return 1 if it is valid *and* preferable to use LA to
2702 compute the sum of OP1 and OP2. */
2705 preferred_la_operand_p (rtx op1, rtx op2)
2707 struct s390_address addr;
2709 if (op2 != const0_rtx)
2710 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2712 if (!s390_decompose_address (op1, &addr))
2714 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2716 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
2719 if (!TARGET_64BIT && !addr.pointer)
2725 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2726 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2732 /* Emit a forced load-address operation to load SRC into DST.
2733 This will use the LOAD ADDRESS instruction even in situations
2734 where legitimate_la_operand_p (SRC) returns false. */
2737 s390_load_address (rtx dst, rtx src)
2740 emit_move_insn (dst, src);
2742 emit_insn (gen_force_la_31 (dst, src));
2745 /* Return a legitimate reference for ORIG (an address) using the
2746 register REG. If REG is 0, a new pseudo is generated.
2748 There are two types of references that must be handled:
2750 1. Global data references must load the address from the GOT, via
2751 the PIC reg. An insn is emitted to do this load, and the reg is
2754 2. Static data references, constant pool addresses, and code labels
2755 compute the address as an offset from the GOT, whose base is in
2756 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2757 differentiate them from global data objects. The returned
2758 address is the PIC reg + an unspec constant.
2760 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2761 reg also appears in the address. */
2764 legitimize_pic_address (rtx orig, rtx reg)
2770 if (GET_CODE (addr) == LABEL_REF
2771 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2773 /* This is a local symbol. */
2774 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2776 /* Access local symbols PC-relative via LARL.
2777 This is the same as in the non-PIC case, so it is
2778 handled automatically ... */
2782 /* Access local symbols relative to the GOT. */
2784 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2786 if (reload_in_progress || reload_completed)
2787 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2789 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2790 addr = gen_rtx_CONST (Pmode, addr);
2791 addr = force_const_mem (Pmode, addr);
2792 emit_move_insn (temp, addr);
2794 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2797 emit_move_insn (reg, new);
2802 else if (GET_CODE (addr) == SYMBOL_REF)
2805 reg = gen_reg_rtx (Pmode);
2809 /* Assume GOT offset < 4k. This is handled the same way
2810 in both 31- and 64-bit code (@GOT). */
2812 if (reload_in_progress || reload_completed)
2813 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2815 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2816 new = gen_rtx_CONST (Pmode, new);
2817 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2818 new = gen_const_mem (Pmode, new);
2819 emit_move_insn (reg, new);
2822 else if (TARGET_CPU_ZARCH)
2824 /* If the GOT offset might be >= 4k, we determine the position
2825 of the GOT entry via a PC-relative LARL (@GOTENT). */
2827 rtx temp = gen_reg_rtx (Pmode);
2829 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2830 new = gen_rtx_CONST (Pmode, new);
2831 emit_move_insn (temp, new);
2833 new = gen_const_mem (Pmode, temp);
2834 emit_move_insn (reg, new);
2839 /* If the GOT offset might be >= 4k, we have to load it
2840 from the literal pool (@GOT). */
2842 rtx temp = gen_reg_rtx (Pmode);
2844 if (reload_in_progress || reload_completed)
2845 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2847 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2848 addr = gen_rtx_CONST (Pmode, addr);
2849 addr = force_const_mem (Pmode, addr);
2850 emit_move_insn (temp, addr);
2852 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2853 new = gen_const_mem (Pmode, new);
2854 emit_move_insn (reg, new);
2860 if (GET_CODE (addr) == CONST)
2862 addr = XEXP (addr, 0);
2863 if (GET_CODE (addr) == UNSPEC)
2865 if (XVECLEN (addr, 0) != 1)
2867 switch (XINT (addr, 1))
2869 /* If someone moved a GOT-relative UNSPEC
2870 out of the literal pool, force them back in. */
2873 new = force_const_mem (Pmode, orig);
2876 /* @GOT is OK as is if small. */
2879 new = force_const_mem (Pmode, orig);
2882 /* @GOTENT is OK as is. */
2886 /* @PLT is OK as is on 64-bit, must be converted to
2887 GOT-relative @PLTOFF on 31-bit. */
2889 if (!TARGET_CPU_ZARCH)
2891 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2893 if (reload_in_progress || reload_completed)
2894 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2896 addr = XVECEXP (addr, 0, 0);
2897 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2899 addr = gen_rtx_CONST (Pmode, addr);
2900 addr = force_const_mem (Pmode, addr);
2901 emit_move_insn (temp, addr);
2903 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2906 emit_move_insn (reg, new);
2912 /* Everything else cannot happen. */
2917 else if (GET_CODE (addr) != PLUS)
2920 if (GET_CODE (addr) == PLUS)
2922 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2923 /* Check first to see if this is a constant offset
2924 from a local symbol reference. */
2925 if ((GET_CODE (op0) == LABEL_REF
2926 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2927 && GET_CODE (op1) == CONST_INT)
2929 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2931 if (INTVAL (op1) & 1)
2933 /* LARL can't handle odd offsets, so emit a
2934 pair of LARL and LA. */
2935 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2937 if (!DISP_IN_RANGE (INTVAL (op1)))
2939 int even = INTVAL (op1) - 1;
2940 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2941 op0 = gen_rtx_CONST (Pmode, op0);
2945 emit_move_insn (temp, op0);
2946 new = gen_rtx_PLUS (Pmode, temp, op1);
2950 emit_move_insn (reg, new);
2956 /* If the offset is even, we can just use LARL.
2957 This will happen automatically. */
2962 /* Access local symbols relative to the GOT. */
2964 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2966 if (reload_in_progress || reload_completed)
2967 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2969 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2971 addr = gen_rtx_PLUS (Pmode, addr, op1);
2972 addr = gen_rtx_CONST (Pmode, addr);
2973 addr = force_const_mem (Pmode, addr);
2974 emit_move_insn (temp, addr);
2976 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2979 emit_move_insn (reg, new);
2985 /* Now, check whether it is a GOT relative symbol plus offset
2986 that was pulled out of the literal pool. Force it back in. */
2988 else if (GET_CODE (op0) == UNSPEC
2989 && GET_CODE (op1) == CONST_INT
2990 && XINT (op0, 1) == UNSPEC_GOTOFF)
2992 if (XVECLEN (op0, 0) != 1)
2995 new = force_const_mem (Pmode, orig);
2998 /* Otherwise, compute the sum. */
3001 base = legitimize_pic_address (XEXP (addr, 0), reg);
3002 new = legitimize_pic_address (XEXP (addr, 1),
3003 base == reg ? NULL_RTX : reg);
3004 if (GET_CODE (new) == CONST_INT)
3005 new = plus_constant (base, INTVAL (new));
3008 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3010 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3011 new = XEXP (new, 1);
3013 new = gen_rtx_PLUS (Pmode, base, new);
3016 if (GET_CODE (new) == CONST)
3017 new = XEXP (new, 0);
3018 new = force_operand (new, 0);
3025 /* Load the thread pointer into a register. */
3028 get_thread_pointer (void)
3032 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
3033 tp = force_reg (Pmode, tp);
3034 mark_reg_pointer (tp, BITS_PER_WORD);
3039 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3040 in s390_tls_symbol which always refers to __tls_get_offset.
3041 The returned offset is written to RESULT_REG and an USE rtx is
3042 generated for TLS_CALL. */
3044 static GTY(()) rtx s390_tls_symbol;
3047 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3054 if (!s390_tls_symbol)
3055 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3057 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3058 gen_rtx_REG (Pmode, RETURN_REGNUM));
3060 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3061 CONST_OR_PURE_CALL_P (insn) = 1;
3064 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3065 this (thread-local) address. REG may be used as temporary. */
3068 legitimize_tls_address (rtx addr, rtx reg)
3070 rtx new, tls_call, temp, base, r2, insn;
3072 if (GET_CODE (addr) == SYMBOL_REF)
3073 switch (tls_symbolic_operand (addr))
3075 case TLS_MODEL_GLOBAL_DYNAMIC:
3077 r2 = gen_rtx_REG (Pmode, 2);
3078 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3079 new = gen_rtx_CONST (Pmode, tls_call);
3080 new = force_const_mem (Pmode, new);
3081 emit_move_insn (r2, new);
3082 s390_emit_tls_call_insn (r2, tls_call);
3083 insn = get_insns ();
3086 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3087 temp = gen_reg_rtx (Pmode);
3088 emit_libcall_block (insn, temp, r2, new);
3090 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3093 s390_load_address (reg, new);
3098 case TLS_MODEL_LOCAL_DYNAMIC:
3100 r2 = gen_rtx_REG (Pmode, 2);
3101 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3102 new = gen_rtx_CONST (Pmode, tls_call);
3103 new = force_const_mem (Pmode, new);
3104 emit_move_insn (r2, new);
3105 s390_emit_tls_call_insn (r2, tls_call);
3106 insn = get_insns ();
3109 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3110 temp = gen_reg_rtx (Pmode);
3111 emit_libcall_block (insn, temp, r2, new);
3113 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3114 base = gen_reg_rtx (Pmode);
3115 s390_load_address (base, new);
3117 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3118 new = gen_rtx_CONST (Pmode, new);
3119 new = force_const_mem (Pmode, new);
3120 temp = gen_reg_rtx (Pmode);
3121 emit_move_insn (temp, new);
3123 new = gen_rtx_PLUS (Pmode, base, temp);
3126 s390_load_address (reg, new);
3131 case TLS_MODEL_INITIAL_EXEC:
3134 /* Assume GOT offset < 4k. This is handled the same way
3135 in both 31- and 64-bit code. */
3137 if (reload_in_progress || reload_completed)
3138 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3140 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3141 new = gen_rtx_CONST (Pmode, new);
3142 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3143 new = gen_const_mem (Pmode, new);
3144 temp = gen_reg_rtx (Pmode);
3145 emit_move_insn (temp, new);
3147 else if (TARGET_CPU_ZARCH)
3149 /* If the GOT offset might be >= 4k, we determine the position
3150 of the GOT entry via a PC-relative LARL. */
3152 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3153 new = gen_rtx_CONST (Pmode, new);
3154 temp = gen_reg_rtx (Pmode);
3155 emit_move_insn (temp, new);
3157 new = gen_const_mem (Pmode, temp);
3158 temp = gen_reg_rtx (Pmode);
3159 emit_move_insn (temp, new);
3163 /* If the GOT offset might be >= 4k, we have to load it
3164 from the literal pool. */
3166 if (reload_in_progress || reload_completed)
3167 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3169 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3170 new = gen_rtx_CONST (Pmode, new);
3171 new = force_const_mem (Pmode, new);
3172 temp = gen_reg_rtx (Pmode);
3173 emit_move_insn (temp, new);
3175 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3176 new = gen_const_mem (Pmode, new);
3178 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3179 temp = gen_reg_rtx (Pmode);
3180 emit_insn (gen_rtx_SET (Pmode, temp, new));
3184 /* In position-dependent code, load the absolute address of
3185 the GOT entry from the literal pool. */
3187 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3188 new = gen_rtx_CONST (Pmode, new);
3189 new = force_const_mem (Pmode, new);
3190 temp = gen_reg_rtx (Pmode);
3191 emit_move_insn (temp, new);
3194 new = gen_const_mem (Pmode, new);
3195 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3196 temp = gen_reg_rtx (Pmode);
3197 emit_insn (gen_rtx_SET (Pmode, temp, new));
3200 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3203 s390_load_address (reg, new);
3208 case TLS_MODEL_LOCAL_EXEC:
3209 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3210 new = gen_rtx_CONST (Pmode, new);
3211 new = force_const_mem (Pmode, new);
3212 temp = gen_reg_rtx (Pmode);
3213 emit_move_insn (temp, new);
3215 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
3218 s390_load_address (reg, new);
3227 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3229 switch (XINT (XEXP (addr, 0), 1))
3231 case UNSPEC_INDNTPOFF:
3232 if (TARGET_CPU_ZARCH)
3243 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3244 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3246 new = XEXP (XEXP (addr, 0), 0);
3247 if (GET_CODE (new) != SYMBOL_REF)
3248 new = gen_rtx_CONST (Pmode, new);
3250 new = legitimize_tls_address (new, reg);
3251 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3252 new = force_operand (new, 0);
3256 abort (); /* for now ... */
3261 /* Emit insns to move operands[1] into operands[0]. */
3264 emit_symbolic_move (rtx *operands)
3266 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3268 if (GET_CODE (operands[0]) == MEM)
3269 operands[1] = force_reg (Pmode, operands[1]);
3270 else if (TLS_SYMBOLIC_CONST (operands[1]))
3271 operands[1] = legitimize_tls_address (operands[1], temp);
3273 operands[1] = legitimize_pic_address (operands[1], temp);
3276 /* Try machine-dependent ways of modifying an illegitimate address X
3277 to be legitimate. If we find one, return the new, valid address.
3279 OLDX is the address as it was before break_out_memory_refs was called.
3280 In some cases it is useful to look at this to decide what needs to be done.
3282 MODE is the mode of the operand pointed to by X.
3284 When -fpic is used, special handling is needed for symbolic references.
3285 See comments by legitimize_pic_address for details. */
3288 legitimize_address (register rtx x, register rtx oldx ATTRIBUTE_UNUSED,
3289 enum machine_mode mode ATTRIBUTE_UNUSED)
3291 rtx constant_term = const0_rtx;
3293 if (TLS_SYMBOLIC_CONST (x))
3295 x = legitimize_tls_address (x, 0);
3297 if (legitimate_address_p (mode, x, FALSE))
3302 if (SYMBOLIC_CONST (x)
3303 || (GET_CODE (x) == PLUS
3304 && (SYMBOLIC_CONST (XEXP (x, 0))
3305 || SYMBOLIC_CONST (XEXP (x, 1)))))
3306 x = legitimize_pic_address (x, 0);
3308 if (legitimate_address_p (mode, x, FALSE))
3312 x = eliminate_constant_term (x, &constant_term);
3314 /* Optimize loading of large displacements by splitting them
3315 into the multiple of 4K and the rest; this allows the
3316 former to be CSE'd if possible.
3318 Don't do this if the displacement is added to a register
3319 pointing into the stack frame, as the offsets will
3320 change later anyway. */
3322 if (GET_CODE (constant_term) == CONST_INT
3323 && !TARGET_LONG_DISPLACEMENT
3324 && !DISP_IN_RANGE (INTVAL (constant_term))
3325 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3327 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3328 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3330 rtx temp = gen_reg_rtx (Pmode);
3331 rtx val = force_operand (GEN_INT (upper), temp);
3333 emit_move_insn (temp, val);
3335 x = gen_rtx_PLUS (Pmode, x, temp);
3336 constant_term = GEN_INT (lower);
3339 if (GET_CODE (x) == PLUS)
3341 if (GET_CODE (XEXP (x, 0)) == REG)
3343 register rtx temp = gen_reg_rtx (Pmode);
3344 register rtx val = force_operand (XEXP (x, 1), temp);
3346 emit_move_insn (temp, val);
3348 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3351 else if (GET_CODE (XEXP (x, 1)) == REG)
3353 register rtx temp = gen_reg_rtx (Pmode);
3354 register rtx val = force_operand (XEXP (x, 0), temp);
3356 emit_move_insn (temp, val);
3358 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3362 if (constant_term != const0_rtx)
3363 x = gen_rtx_PLUS (Pmode, x, constant_term);
3368 /* Try a machine-dependent way of reloading an illegitimate address AD
3369 operand. If we find one, push the reload and and return the new address.
3371 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3372 and TYPE is the reload type of the current reload. */
3375 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3376 int opnum, int type)
3378 if (!optimize || TARGET_LONG_DISPLACEMENT)
3381 if (GET_CODE (ad) == PLUS)
3383 rtx tem = simplify_binary_operation (PLUS, Pmode,
3384 XEXP (ad, 0), XEXP (ad, 1));
3389 if (GET_CODE (ad) == PLUS
3390 && GET_CODE (XEXP (ad, 0)) == REG
3391 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3392 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3394 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3395 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3398 cst = GEN_INT (upper);
3399 if (!legitimate_reload_constant_p (cst))
3400 cst = force_const_mem (Pmode, cst);
3402 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3403 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3405 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3406 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3407 opnum, (enum reload_type) type);
3414 /* Emit code to move LEN bytes from DST to SRC. */
3417 s390_expand_movmem (rtx dst, rtx src, rtx len)
3419 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3421 if (INTVAL (len) > 0)
3422 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3425 else if (TARGET_MVCLE)
3427 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3432 rtx dst_addr, src_addr, count, blocks, temp;
3433 rtx loop_start_label = gen_label_rtx ();
3434 rtx loop_end_label = gen_label_rtx ();
3435 rtx end_label = gen_label_rtx ();
3436 enum machine_mode mode;
3438 mode = GET_MODE (len);
3439 if (mode == VOIDmode)
3442 dst_addr = gen_reg_rtx (Pmode);
3443 src_addr = gen_reg_rtx (Pmode);
3444 count = gen_reg_rtx (mode);
3445 blocks = gen_reg_rtx (mode);
3447 convert_move (count, len, 1);
3448 emit_cmp_and_jump_insns (count, const0_rtx,
3449 EQ, NULL_RTX, mode, 1, end_label);
3451 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3452 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3453 dst = change_address (dst, VOIDmode, dst_addr);
3454 src = change_address (src, VOIDmode, src_addr);
3456 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3458 emit_move_insn (count, temp);
3460 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3462 emit_move_insn (blocks, temp);
3464 emit_cmp_and_jump_insns (blocks, const0_rtx,
3465 EQ, NULL_RTX, mode, 1, loop_end_label);
3467 emit_label (loop_start_label);
3469 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3470 s390_load_address (dst_addr,
3471 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3472 s390_load_address (src_addr,
3473 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3475 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3477 emit_move_insn (blocks, temp);
3479 emit_cmp_and_jump_insns (blocks, const0_rtx,
3480 EQ, NULL_RTX, mode, 1, loop_end_label);
3482 emit_jump (loop_start_label);
3483 emit_label (loop_end_label);
3485 emit_insn (gen_movmem_short (dst, src,
3486 convert_to_mode (Pmode, count, 1)));
3487 emit_label (end_label);
3491 /* Emit code to clear LEN bytes at DST. */
3494 s390_expand_clrmem (rtx dst, rtx len)
3496 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3498 if (INTVAL (len) > 0)
3499 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3502 else if (TARGET_MVCLE)
3504 emit_insn (gen_clrmem_long (dst, convert_to_mode (Pmode, len, 1)));
3509 rtx dst_addr, src_addr, count, blocks, temp;
3510 rtx loop_start_label = gen_label_rtx ();
3511 rtx loop_end_label = gen_label_rtx ();
3512 rtx end_label = gen_label_rtx ();
3513 enum machine_mode mode;
3515 mode = GET_MODE (len);
3516 if (mode == VOIDmode)
3519 dst_addr = gen_reg_rtx (Pmode);
3520 src_addr = gen_reg_rtx (Pmode);
3521 count = gen_reg_rtx (mode);
3522 blocks = gen_reg_rtx (mode);
3524 convert_move (count, len, 1);
3525 emit_cmp_and_jump_insns (count, const0_rtx,
3526 EQ, NULL_RTX, mode, 1, end_label);
3528 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3529 dst = change_address (dst, VOIDmode, dst_addr);
3531 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3533 emit_move_insn (count, temp);
3535 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3537 emit_move_insn (blocks, temp);
3539 emit_cmp_and_jump_insns (blocks, const0_rtx,
3540 EQ, NULL_RTX, mode, 1, loop_end_label);
3542 emit_label (loop_start_label);
3544 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3545 s390_load_address (dst_addr,
3546 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3548 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3550 emit_move_insn (blocks, temp);
3552 emit_cmp_and_jump_insns (blocks, const0_rtx,
3553 EQ, NULL_RTX, mode, 1, loop_end_label);
3555 emit_jump (loop_start_label);
3556 emit_label (loop_end_label);
3558 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3559 emit_label (end_label);
3563 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3564 and return the result in TARGET. */
3567 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3569 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3570 rtx result = gen_rtx_UNSPEC (SImode, gen_rtvec (1, ccreg), UNSPEC_CMPINT);
3572 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3574 if (INTVAL (len) > 0)
3576 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3577 emit_move_insn (target, result);
3580 emit_move_insn (target, const0_rtx);
3583 else /* if (TARGET_MVCLE) */
3585 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3586 emit_move_insn (target, result);
3590 /* Deactivate for now as profile code cannot cope with
3591 CC being live across basic block boundaries. */
3594 rtx addr0, addr1, count, blocks, temp;
3595 rtx loop_start_label = gen_label_rtx ();
3596 rtx loop_end_label = gen_label_rtx ();
3597 rtx end_label = gen_label_rtx ();
3598 enum machine_mode mode;
3600 mode = GET_MODE (len);
3601 if (mode == VOIDmode)
3604 addr0 = gen_reg_rtx (Pmode);
3605 addr1 = gen_reg_rtx (Pmode);
3606 count = gen_reg_rtx (mode);
3607 blocks = gen_reg_rtx (mode);
3609 convert_move (count, len, 1);
3610 emit_cmp_and_jump_insns (count, const0_rtx,
3611 EQ, NULL_RTX, mode, 1, end_label);
3613 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3614 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3615 op0 = change_address (op0, VOIDmode, addr0);
3616 op1 = change_address (op1, VOIDmode, addr1);
3618 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3620 emit_move_insn (count, temp);
3622 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3624 emit_move_insn (blocks, temp);
3626 emit_cmp_and_jump_insns (blocks, const0_rtx,
3627 EQ, NULL_RTX, mode, 1, loop_end_label);
3629 emit_label (loop_start_label);
3631 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3632 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3633 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3634 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3635 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3636 emit_jump_insn (temp);
3638 s390_load_address (addr0,
3639 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3640 s390_load_address (addr1,
3641 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3643 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3645 emit_move_insn (blocks, temp);
3647 emit_cmp_and_jump_insns (blocks, const0_rtx,
3648 EQ, NULL_RTX, mode, 1, loop_end_label);
3650 emit_jump (loop_start_label);
3651 emit_label (loop_end_label);
3653 emit_insn (gen_cmpmem_short (op0, op1,
3654 convert_to_mode (Pmode, count, 1)));
3655 emit_label (end_label);
3657 emit_move_insn (target, result);
3663 /* Expand conditional increment or decrement using alc/slb instructions.
3664 Should generate code setting DST to either SRC or SRC + INCREMENT,
3665 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3666 Returns true if successful, false otherwise. */
3669 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3670 rtx dst, rtx src, rtx increment)
3672 enum machine_mode cmp_mode;
3673 enum machine_mode cc_mode;
3678 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3679 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3681 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3682 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3687 /* Try ADD LOGICAL WITH CARRY. */
3688 if (increment == const1_rtx)
3690 /* Determine CC mode to use. */
3691 if (cmp_code == EQ || cmp_code == NE)
3693 if (cmp_op1 != const0_rtx)
3695 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3696 NULL_RTX, 0, OPTAB_WIDEN);
3697 cmp_op1 = const0_rtx;
3700 cmp_code = cmp_code == EQ ? LEU : GTU;
3703 if (cmp_code == LTU || cmp_code == LEU)
3708 cmp_code = swap_condition (cmp_code);
3725 /* Emit comparison instruction pattern. */
3726 if (!register_operand (cmp_op0, cmp_mode))
3727 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3729 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3730 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3731 /* We use insn_invalid_p here to add clobbers if required. */
3732 if (insn_invalid_p (emit_insn (insn)))
3735 /* Emit ALC instruction pattern. */
3736 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3737 gen_rtx_REG (cc_mode, CC_REGNUM),
3740 if (src != const0_rtx)
3742 if (!register_operand (src, GET_MODE (dst)))
3743 src = force_reg (GET_MODE (dst), src);
3745 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3746 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3749 p = rtvec_alloc (2);
3751 gen_rtx_SET (VOIDmode, dst, op_res);
3753 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3754 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3759 /* Try SUBTRACT LOGICAL WITH BORROW. */
3760 if (increment == constm1_rtx)
3762 /* Determine CC mode to use. */
3763 if (cmp_code == EQ || cmp_code == NE)
3765 if (cmp_op1 != const0_rtx)
3767 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3768 NULL_RTX, 0, OPTAB_WIDEN);
3769 cmp_op1 = const0_rtx;
3772 cmp_code = cmp_code == EQ ? LEU : GTU;
3775 if (cmp_code == GTU || cmp_code == GEU)
3780 cmp_code = swap_condition (cmp_code);
3797 /* Emit comparison instruction pattern. */
3798 if (!register_operand (cmp_op0, cmp_mode))
3799 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3801 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3802 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3803 /* We use insn_invalid_p here to add clobbers if required. */
3804 if (insn_invalid_p (emit_insn (insn)))
3807 /* Emit SLB instruction pattern. */
3808 if (!register_operand (src, GET_MODE (dst)))
3809 src = force_reg (GET_MODE (dst), src);
3811 op_res = gen_rtx_MINUS (GET_MODE (dst),
3812 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3813 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3814 gen_rtx_REG (cc_mode, CC_REGNUM),
3816 p = rtvec_alloc (2);
3818 gen_rtx_SET (VOIDmode, dst, op_res);
3820 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3821 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3830 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3831 We need to emit DTP-relative relocations. */
3834 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3839 fputs ("\t.long\t", file);
3842 fputs ("\t.quad\t", file);
3847 output_addr_const (file, x);
3848 fputs ("@DTPOFF", file);
3851 /* In the name of slightly smaller debug output, and to cater to
3852 general assembler losage, recognize various UNSPEC sequences
3853 and turn them back into a direct symbol reference. */
3856 s390_delegitimize_address (rtx orig_x)
3860 if (GET_CODE (x) != MEM)
3864 if (GET_CODE (x) == PLUS
3865 && GET_CODE (XEXP (x, 1)) == CONST
3866 && GET_CODE (XEXP (x, 0)) == REG
3867 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3869 y = XEXP (XEXP (x, 1), 0);
3870 if (GET_CODE (y) == UNSPEC
3871 && XINT (y, 1) == UNSPEC_GOT)
3872 return XVECEXP (y, 0, 0);
3876 if (GET_CODE (x) == CONST)
3879 if (GET_CODE (y) == UNSPEC
3880 && XINT (y, 1) == UNSPEC_GOTENT)
3881 return XVECEXP (y, 0, 0);
3888 /* Output shift count operand OP to stdio stream FILE. */
3891 print_shift_count_operand (FILE *file, rtx op)
3893 HOST_WIDE_INT offset = 0;
3895 /* We can have an integer constant, an address register,
3896 or a sum of the two. */
3897 if (GET_CODE (op) == CONST_INT)
3899 offset = INTVAL (op);
3902 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
3904 offset = INTVAL (XEXP (op, 1));
3907 while (op && GET_CODE (op) == SUBREG)
3908 op = SUBREG_REG (op);
3911 if (op && (GET_CODE (op) != REG
3912 || REGNO (op) >= FIRST_PSEUDO_REGISTER
3913 || REGNO_REG_CLASS (REGNO (op)) != ADDR_REGS))
3916 /* Shift counts are truncated to the low six bits anyway. */
3917 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & 63);
3919 fprintf (file, "(%s)", reg_names[REGNO (op)]);
3922 /* Locate some local-dynamic symbol still in use by this function
3923 so that we can print its name in local-dynamic base patterns. */
3926 get_some_local_dynamic_name (void)
3930 if (cfun->machine->some_ld_name)
3931 return cfun->machine->some_ld_name;
3933 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3935 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3936 return cfun->machine->some_ld_name;
3942 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
3946 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3948 x = get_pool_constant (x);
3949 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3952 if (GET_CODE (x) == SYMBOL_REF
3953 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3955 cfun->machine->some_ld_name = XSTR (x, 0);
3962 /* Output machine-dependent UNSPECs occurring in address constant X
3963 in assembler syntax to stdio stream FILE. Returns true if the
3964 constant X could be recognized, false otherwise. */
3967 s390_output_addr_const_extra (FILE *file, rtx x)
3969 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
3970 switch (XINT (x, 1))
3973 output_addr_const (file, XVECEXP (x, 0, 0));
3974 fprintf (file, "@GOTENT");
3977 output_addr_const (file, XVECEXP (x, 0, 0));
3978 fprintf (file, "@GOT");
3981 output_addr_const (file, XVECEXP (x, 0, 0));
3982 fprintf (file, "@GOTOFF");
3985 output_addr_const (file, XVECEXP (x, 0, 0));
3986 fprintf (file, "@PLT");
3989 output_addr_const (file, XVECEXP (x, 0, 0));
3990 fprintf (file, "@PLTOFF");
3993 output_addr_const (file, XVECEXP (x, 0, 0));
3994 fprintf (file, "@TLSGD");
3997 assemble_name (file, get_some_local_dynamic_name ());
3998 fprintf (file, "@TLSLDM");
4001 output_addr_const (file, XVECEXP (x, 0, 0));
4002 fprintf (file, "@DTPOFF");
4005 output_addr_const (file, XVECEXP (x, 0, 0));
4006 fprintf (file, "@NTPOFF");
4008 case UNSPEC_GOTNTPOFF:
4009 output_addr_const (file, XVECEXP (x, 0, 0));
4010 fprintf (file, "@GOTNTPOFF");
4012 case UNSPEC_INDNTPOFF:
4013 output_addr_const (file, XVECEXP (x, 0, 0));
4014 fprintf (file, "@INDNTPOFF");
4021 /* Output address operand ADDR in assembler syntax to
4022 stdio stream FILE. */
4025 print_operand_address (FILE *file, rtx addr)
4027 struct s390_address ad;
4029 if (!s390_decompose_address (addr, &ad)
4030 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4031 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4032 output_operand_lossage ("Cannot decompose address.");
4035 output_addr_const (file, ad.disp);
4037 fprintf (file, "0");
4039 if (ad.base && ad.indx)
4040 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4041 reg_names[REGNO (ad.base)]);
4043 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4046 /* Output operand X in assembler syntax to stdio stream FILE.
4047 CODE specified the format flag. The following format flags
4050 'C': print opcode suffix for branch condition.
4051 'D': print opcode suffix for inverse branch condition.
4052 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4053 'O': print only the displacement of a memory reference.
4054 'R': print only the base register of a memory reference.
4055 'S': print S-type memory reference (base+displacement).
4056 'N': print the second word of a DImode operand.
4057 'M': print the second word of a TImode operand.
4058 'Y': print shift count operand.
4060 'b': print integer X as if it's an unsigned byte.
4061 'x': print integer X as if it's an unsigned word.
4062 'h': print integer X as if it's a signed word.
4063 'i': print the first nonzero HImode part of X.
4064 'j': print the first HImode part unequal to 0xffff of X. */
4067 print_operand (FILE *file, rtx x, int code)
4072 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4076 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4080 if (GET_CODE (x) == SYMBOL_REF)
4082 fprintf (file, "%s", ":tls_load:");
4083 output_addr_const (file, x);
4085 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4087 fprintf (file, "%s", ":tls_gdcall:");
4088 output_addr_const (file, XVECEXP (x, 0, 0));
4090 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4092 fprintf (file, "%s", ":tls_ldcall:");
4093 assemble_name (file, get_some_local_dynamic_name ());
4101 struct s390_address ad;
4103 if (GET_CODE (x) != MEM
4104 || !s390_decompose_address (XEXP (x, 0), &ad)
4105 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4110 output_addr_const (file, ad.disp);
4112 fprintf (file, "0");
4118 struct s390_address ad;
4120 if (GET_CODE (x) != MEM
4121 || !s390_decompose_address (XEXP (x, 0), &ad)
4122 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4127 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4129 fprintf (file, "0");
4135 struct s390_address ad;
4137 if (GET_CODE (x) != MEM
4138 || !s390_decompose_address (XEXP (x, 0), &ad)
4139 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4144 output_addr_const (file, ad.disp);
4146 fprintf (file, "0");
4149 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4154 if (GET_CODE (x) == REG)
4155 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4156 else if (GET_CODE (x) == MEM)
4157 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4163 if (GET_CODE (x) == REG)
4164 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4165 else if (GET_CODE (x) == MEM)
4166 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4172 print_shift_count_operand (file, x);
4176 switch (GET_CODE (x))
4179 fprintf (file, "%s", reg_names[REGNO (x)]);
4183 output_address (XEXP (x, 0));
4190 output_addr_const (file, x);
4195 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4196 else if (code == 'x')
4197 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4198 else if (code == 'h')
4199 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4200 else if (code == 'i')
4201 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4202 s390_extract_part (x, HImode, 0));
4203 else if (code == 'j')
4204 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4205 s390_extract_part (x, HImode, -1));
4207 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4211 if (GET_MODE (x) != VOIDmode)
4214 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4215 else if (code == 'x')
4216 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4217 else if (code == 'h')
4218 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4224 fatal_insn ("UNKNOWN in print_operand !?", x);
4229 /* Target hook for assembling integer objects. We need to define it
4230 here to work a round a bug in some versions of GAS, which couldn't
4231 handle values smaller than INT_MIN when printed in decimal. */
4234 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4236 if (size == 8 && aligned_p
4237 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4239 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4243 return default_assemble_integer (x, size, aligned_p);
4246 /* Returns true if register REGNO is used for forming
4247 a memory address in expression X. */
4250 reg_used_in_mem_p (int regno, rtx x)
4252 enum rtx_code code = GET_CODE (x);
4258 if (refers_to_regno_p (regno, regno+1,
4262 else if (code == SET
4263 && GET_CODE (SET_DEST (x)) == PC)
4265 if (refers_to_regno_p (regno, regno+1,
4270 fmt = GET_RTX_FORMAT (code);
4271 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4274 && reg_used_in_mem_p (regno, XEXP (x, i)))
4277 else if (fmt[i] == 'E')
4278 for (j = 0; j < XVECLEN (x, i); j++)
4279 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4285 /* Returns true if expression DEP_RTX sets an address register
4286 used by instruction INSN to address memory. */
4289 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4293 if (GET_CODE (dep_rtx) == INSN)
4294 dep_rtx = PATTERN (dep_rtx);
4296 if (GET_CODE (dep_rtx) == SET)
4298 target = SET_DEST (dep_rtx);
4299 if (GET_CODE (target) == STRICT_LOW_PART)
4300 target = XEXP (target, 0);
4301 while (GET_CODE (target) == SUBREG)
4302 target = SUBREG_REG (target);
4304 if (GET_CODE (target) == REG)
4306 int regno = REGNO (target);
4308 if (s390_safe_attr_type (insn) == TYPE_LA)
4310 pat = PATTERN (insn);
4311 if (GET_CODE (pat) == PARALLEL)
4313 if (XVECLEN (pat, 0) != 2)
4315 pat = XVECEXP (pat, 0, 0);
4317 if (GET_CODE (pat) == SET)
4318 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4322 else if (get_attr_atype (insn) == ATYPE_AGEN)
4323 return reg_used_in_mem_p (regno, PATTERN (insn));
4329 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4332 s390_agen_dep_p (rtx dep_insn, rtx insn)
4334 rtx dep_rtx = PATTERN (dep_insn);
4337 if (GET_CODE (dep_rtx) == SET
4338 && addr_generation_dependency_p (dep_rtx, insn))
4340 else if (GET_CODE (dep_rtx) == PARALLEL)
4342 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4344 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4351 /* A C statement (sans semicolon) to update the integer scheduling priority
4352 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4353 reduce the priority to execute INSN later. Do not define this macro if
4354 you do not need to adjust the scheduling priorities of insns.
4356 A STD instruction should be scheduled earlier,
4357 in order to use the bypass. */
4360 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4362 if (! INSN_P (insn))
4365 if (s390_tune != PROCESSOR_2084_Z990)
4368 switch (s390_safe_attr_type (insn))
4372 priority = priority << 3;
4376 priority = priority << 1;
4384 /* The number of instructions that can be issued per cycle. */
4387 s390_issue_rate (void)
4389 if (s390_tune == PROCESSOR_2084_Z990)
4395 s390_first_cycle_multipass_dfa_lookahead (void)
4401 /* Split all branches that exceed the maximum distance.
4402 Returns true if this created a new literal pool entry. */
4405 s390_split_branches (void)
4407 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4408 int new_literal = 0;
4409 rtx insn, pat, tmp, target;
4412 /* We need correct insn addresses. */
4414 shorten_branches (get_insns ());
4416 /* Find all branches that exceed 64KB, and split them. */
4418 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4420 if (GET_CODE (insn) != JUMP_INSN)
4423 pat = PATTERN (insn);
4424 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4425 pat = XVECEXP (pat, 0, 0);
4426 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4429 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4431 label = &SET_SRC (pat);
4433 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4435 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4436 label = &XEXP (SET_SRC (pat), 1);
4437 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4438 label = &XEXP (SET_SRC (pat), 2);
4445 if (get_attr_length (insn) <= 4)
4448 /* We are going to use the return register as scratch register,
4449 make sure it will be saved/restored by the prologue/epilogue. */
4450 cfun_frame_layout.save_return_addr_p = 1;
4455 tmp = force_const_mem (Pmode, *label);
4456 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4457 INSN_ADDRESSES_NEW (tmp, -1);
4458 annotate_constant_pool_refs (&PATTERN (tmp));
4465 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4466 UNSPEC_LTREL_OFFSET);
4467 target = gen_rtx_CONST (Pmode, target);
4468 target = force_const_mem (Pmode, target);
4469 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4470 INSN_ADDRESSES_NEW (tmp, -1);
4471 annotate_constant_pool_refs (&PATTERN (tmp));
4473 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4474 cfun->machine->base_reg),
4476 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4479 if (!validate_change (insn, label, target, 0))
4486 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4487 Fix up MEMs as required. */
4490 annotate_constant_pool_refs (rtx *x)
4495 if (GET_CODE (*x) == SYMBOL_REF
4496 && CONSTANT_POOL_ADDRESS_P (*x))
4499 /* Literal pool references can only occur inside a MEM ... */
4500 if (GET_CODE (*x) == MEM)
4502 rtx memref = XEXP (*x, 0);
4504 if (GET_CODE (memref) == SYMBOL_REF
4505 && CONSTANT_POOL_ADDRESS_P (memref))
4507 rtx base = cfun->machine->base_reg;
4508 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4511 *x = replace_equiv_address (*x, addr);
4515 if (GET_CODE (memref) == CONST
4516 && GET_CODE (XEXP (memref, 0)) == PLUS
4517 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4518 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4519 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4521 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4522 rtx sym = XEXP (XEXP (memref, 0), 0);
4523 rtx base = cfun->machine->base_reg;
4524 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4527 *x = replace_equiv_address (*x, plus_constant (addr, off));
4532 /* ... or a load-address type pattern. */
4533 if (GET_CODE (*x) == SET)
4535 rtx addrref = SET_SRC (*x);
4537 if (GET_CODE (addrref) == SYMBOL_REF
4538 && CONSTANT_POOL_ADDRESS_P (addrref))
4540 rtx base = cfun->machine->base_reg;
4541 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4544 SET_SRC (*x) = addr;
4548 if (GET_CODE (addrref) == CONST
4549 && GET_CODE (XEXP (addrref, 0)) == PLUS
4550 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4551 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4552 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4554 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4555 rtx sym = XEXP (XEXP (addrref, 0), 0);
4556 rtx base = cfun->machine->base_reg;
4557 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4560 SET_SRC (*x) = plus_constant (addr, off);
4565 /* Annotate LTREL_BASE as well. */
4566 if (GET_CODE (*x) == UNSPEC
4567 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4569 rtx base = cfun->machine->base_reg;
4570 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4575 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4576 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4580 annotate_constant_pool_refs (&XEXP (*x, i));
4582 else if (fmt[i] == 'E')
4584 for (j = 0; j < XVECLEN (*x, i); j++)
4585 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4591 /* Find an annotated literal pool symbol referenced in RTX X,
4592 and store it at REF. Will abort if X contains references to
4593 more than one such pool symbol; multiple references to the same
4594 symbol are allowed, however.
4596 The rtx pointed to by REF must be initialized to NULL_RTX
4597 by the caller before calling this routine. */
4600 find_constant_pool_ref (rtx x, rtx *ref)
4605 /* Ignore LTREL_BASE references. */
4606 if (GET_CODE (x) == UNSPEC
4607 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4609 /* Likewise POOL_ENTRY insns. */
4610 if (GET_CODE (x) == UNSPEC_VOLATILE
4611 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4614 if (GET_CODE (x) == SYMBOL_REF
4615 && CONSTANT_POOL_ADDRESS_P (x))
4618 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4620 rtx sym = XVECEXP (x, 0, 0);
4621 if (GET_CODE (sym) != SYMBOL_REF
4622 || !CONSTANT_POOL_ADDRESS_P (sym))
4625 if (*ref == NULL_RTX)
4627 else if (*ref != sym)
4633 fmt = GET_RTX_FORMAT (GET_CODE (x));
4634 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4638 find_constant_pool_ref (XEXP (x, i), ref);
4640 else if (fmt[i] == 'E')
4642 for (j = 0; j < XVECLEN (x, i); j++)
4643 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4648 /* Replace every reference to the annotated literal pool
4649 symbol REF in X by its base plus OFFSET. */
4652 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4660 if (GET_CODE (*x) == UNSPEC
4661 && XINT (*x, 1) == UNSPEC_LTREF
4662 && XVECEXP (*x, 0, 0) == ref)
4664 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4668 if (GET_CODE (*x) == PLUS
4669 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4670 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4671 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4672 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4674 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4675 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4679 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4680 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4684 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4686 else if (fmt[i] == 'E')
4688 for (j = 0; j < XVECLEN (*x, i); j++)
4689 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4694 /* Check whether X contains an UNSPEC_LTREL_BASE.
4695 Return its constant pool symbol if found, NULL_RTX otherwise. */
4698 find_ltrel_base (rtx x)
4703 if (GET_CODE (x) == UNSPEC
4704 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4705 return XVECEXP (x, 0, 0);
4707 fmt = GET_RTX_FORMAT (GET_CODE (x));
4708 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4712 rtx fnd = find_ltrel_base (XEXP (x, i));
4716 else if (fmt[i] == 'E')
4718 for (j = 0; j < XVECLEN (x, i); j++)
4720 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4730 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4733 replace_ltrel_base (rtx *x)
4738 if (GET_CODE (*x) == UNSPEC
4739 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4741 *x = XVECEXP (*x, 0, 1);
4745 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4746 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4750 replace_ltrel_base (&XEXP (*x, i));
4752 else if (fmt[i] == 'E')
4754 for (j = 0; j < XVECLEN (*x, i); j++)
4755 replace_ltrel_base (&XVECEXP (*x, i, j));
4761 /* We keep a list of constants which we have to add to internal
4762 constant tables in the middle of large functions. */
4764 #define NR_C_MODES 7
4765 enum machine_mode constant_modes[NR_C_MODES] =
4776 struct constant *next;
4781 struct constant_pool
4783 struct constant_pool *next;
4788 struct constant *constants[NR_C_MODES];
4789 struct constant *execute;
4794 static struct constant_pool * s390_mainpool_start (void);
4795 static void s390_mainpool_finish (struct constant_pool *);
4796 static void s390_mainpool_cancel (struct constant_pool *);
4798 static struct constant_pool * s390_chunkify_start (void);
4799 static void s390_chunkify_finish (struct constant_pool *);
4800 static void s390_chunkify_cancel (struct constant_pool *);
4802 static struct constant_pool *s390_start_pool (struct constant_pool **, rtx);
4803 static void s390_end_pool (struct constant_pool *, rtx);
4804 static void s390_add_pool_insn (struct constant_pool *, rtx);
4805 static struct constant_pool *s390_find_pool (struct constant_pool *, rtx);
4806 static void s390_add_constant (struct constant_pool *, rtx, enum machine_mode);
4807 static rtx s390_find_constant (struct constant_pool *, rtx, enum machine_mode);
4808 static void s390_add_execute (struct constant_pool *, rtx);
4809 static rtx s390_find_execute (struct constant_pool *, rtx);
4810 static rtx s390_execute_label (rtx);
4811 static rtx s390_execute_target (rtx);
4812 static void s390_dump_pool (struct constant_pool *, bool);
4813 static void s390_dump_execute (struct constant_pool *);
4814 static struct constant_pool *s390_alloc_pool (void);
4815 static void s390_free_pool (struct constant_pool *);
4817 /* Create new constant pool covering instructions starting at INSN
4818 and chain it to the end of POOL_LIST. */
4820 static struct constant_pool *
4821 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4823 struct constant_pool *pool, **prev;
4825 pool = s390_alloc_pool ();
4826 pool->first_insn = insn;
4828 for (prev = pool_list; *prev; prev = &(*prev)->next)
4835 /* End range of instructions covered by POOL at INSN and emit
4836 placeholder insn representing the pool. */
4839 s390_end_pool (struct constant_pool *pool, rtx insn)
4841 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4844 insn = get_last_insn ();
4846 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4847 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4850 /* Add INSN to the list of insns covered by POOL. */
4853 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
4855 bitmap_set_bit (pool->insns, INSN_UID (insn));
4858 /* Return pool out of POOL_LIST that covers INSN. */
4860 static struct constant_pool *
4861 s390_find_pool (struct constant_pool *pool_list, rtx insn)
4863 struct constant_pool *pool;
4865 for (pool = pool_list; pool; pool = pool->next)
4866 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4872 /* Add constant VAL of mode MODE to the constant pool POOL. */
4875 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
4880 for (i = 0; i < NR_C_MODES; i++)
4881 if (constant_modes[i] == mode)
4883 if (i == NR_C_MODES)
4886 for (c = pool->constants[i]; c != NULL; c = c->next)
4887 if (rtx_equal_p (val, c->value))
4892 c = (struct constant *) xmalloc (sizeof *c);
4894 c->label = gen_label_rtx ();
4895 c->next = pool->constants[i];
4896 pool->constants[i] = c;
4897 pool->size += GET_MODE_SIZE (mode);
4901 /* Find constant VAL of mode MODE in the constant pool POOL.
4902 Return an RTX describing the distance from the start of
4903 the pool to the location of the new constant. */
4906 s390_find_constant (struct constant_pool *pool, rtx val,
4907 enum machine_mode mode)
4913 for (i = 0; i < NR_C_MODES; i++)
4914 if (constant_modes[i] == mode)
4916 if (i == NR_C_MODES)
4919 for (c = pool->constants[i]; c != NULL; c = c->next)
4920 if (rtx_equal_p (val, c->value))
4926 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4927 gen_rtx_LABEL_REF (Pmode, pool->label));
4928 offset = gen_rtx_CONST (Pmode, offset);
4932 /* Add execute target for INSN to the constant pool POOL. */
4935 s390_add_execute (struct constant_pool *pool, rtx insn)
4939 for (c = pool->execute; c != NULL; c = c->next)
4940 if (INSN_UID (insn) == INSN_UID (c->value))
4945 rtx label = s390_execute_label (insn);
4948 c = (struct constant *) xmalloc (sizeof *c);
4950 c->label = label == const0_rtx ? gen_label_rtx () : XEXP (label, 0);
4951 c->next = pool->execute;
4953 pool->size += label == const0_rtx ? 6 : 0;
4957 /* Find execute target for INSN in the constant pool POOL.
4958 Return an RTX describing the distance from the start of
4959 the pool to the location of the execute target. */
4962 s390_find_execute (struct constant_pool *pool, rtx insn)
4967 for (c = pool->execute; c != NULL; c = c->next)
4968 if (INSN_UID (insn) == INSN_UID (c->value))
4974 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4975 gen_rtx_LABEL_REF (Pmode, pool->label));
4976 offset = gen_rtx_CONST (Pmode, offset);
4980 /* Check whether INSN is an execute. Return the label_ref to its
4981 execute target template if so, NULL_RTX otherwise. */
4984 s390_execute_label (rtx insn)
4986 if (GET_CODE (insn) == INSN
4987 && GET_CODE (PATTERN (insn)) == PARALLEL
4988 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
4989 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
4990 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
4995 /* For an execute INSN, extract the execute target template. */
4998 s390_execute_target (rtx insn)
5000 rtx pattern = PATTERN (insn);
5001 gcc_assert (s390_execute_label (insn));
5003 if (XVECLEN (pattern, 0) == 2)
5005 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5009 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5012 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5013 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5015 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5021 /* Indicate that INSN cannot be duplicated. This is the case for
5022 execute insns that carry a unique label. */
5025 s390_cannot_copy_insn_p (rtx insn)
5027 rtx label = s390_execute_label (insn);
5028 return label && label != const0_rtx;
5031 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5032 do not emit the pool base label. */
5035 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5038 rtx insn = pool->pool_insn;
5041 /* Switch to rodata section. */
5042 if (TARGET_CPU_ZARCH)
5044 insn = emit_insn_after (gen_pool_section_start (), insn);
5045 INSN_ADDRESSES_NEW (insn, -1);
5048 /* Ensure minimum pool alignment. */
5049 if (TARGET_CPU_ZARCH)
5050 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5052 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5053 INSN_ADDRESSES_NEW (insn, -1);
5055 /* Emit pool base label. */
5058 insn = emit_label_after (pool->label, insn);
5059 INSN_ADDRESSES_NEW (insn, -1);
5062 /* Dump constants in descending alignment requirement order,
5063 ensuring proper alignment for every constant. */
5064 for (i = 0; i < NR_C_MODES; i++)
5065 for (c = pool->constants[i]; c; c = c->next)
5067 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5068 rtx value = c->value;
5069 if (GET_CODE (value) == CONST
5070 && GET_CODE (XEXP (value, 0)) == UNSPEC
5071 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5072 && XVECLEN (XEXP (value, 0), 0) == 1)
5074 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5075 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5076 value = gen_rtx_CONST (VOIDmode, value);
5079 insn = emit_label_after (c->label, insn);
5080 INSN_ADDRESSES_NEW (insn, -1);
5082 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5083 gen_rtvec (1, value),
5084 UNSPECV_POOL_ENTRY);
5085 insn = emit_insn_after (value, insn);
5086 INSN_ADDRESSES_NEW (insn, -1);
5089 /* Ensure minimum alignment for instructions. */
5090 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5091 INSN_ADDRESSES_NEW (insn, -1);
5093 /* Output in-pool execute template insns. */
5094 for (c = pool->execute; c; c = c->next)
5096 if (s390_execute_label (c->value) != const0_rtx)
5099 insn = emit_label_after (c->label, insn);
5100 INSN_ADDRESSES_NEW (insn, -1);
5102 insn = emit_insn_after (s390_execute_target (c->value), insn);
5103 INSN_ADDRESSES_NEW (insn, -1);
5106 /* Switch back to previous section. */
5107 if (TARGET_CPU_ZARCH)
5109 insn = emit_insn_after (gen_pool_section_end (), insn);
5110 INSN_ADDRESSES_NEW (insn, -1);
5113 insn = emit_barrier_after (insn);
5114 INSN_ADDRESSES_NEW (insn, -1);
5116 /* Remove placeholder insn. */
5117 remove_insn (pool->pool_insn);
5119 /* Output out-of-pool execute template isns. */
5120 s390_dump_execute (pool);
5123 /* Dump out the out-of-pool execute template insns in POOL
5124 at the end of the instruction stream. */
5127 s390_dump_execute (struct constant_pool *pool)
5132 for (c = pool->execute; c; c = c->next)
5134 if (s390_execute_label (c->value) == const0_rtx)
5137 insn = emit_label (c->label);
5138 INSN_ADDRESSES_NEW (insn, -1);
5140 insn = emit_insn (s390_execute_target (c->value));
5141 INSN_ADDRESSES_NEW (insn, -1);
5145 /* Allocate new constant_pool structure. */
5147 static struct constant_pool *
5148 s390_alloc_pool (void)
5150 struct constant_pool *pool;
5153 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5155 for (i = 0; i < NR_C_MODES; i++)
5156 pool->constants[i] = NULL;
5158 pool->execute = NULL;
5159 pool->label = gen_label_rtx ();
5160 pool->first_insn = NULL_RTX;
5161 pool->pool_insn = NULL_RTX;
5162 pool->insns = BITMAP_XMALLOC ();
5168 /* Free all memory used by POOL. */
5171 s390_free_pool (struct constant_pool *pool)
5173 struct constant *c, *next;
5176 for (i = 0; i < NR_C_MODES; i++)
5177 for (c = pool->constants[i]; c; c = next)
5183 for (c = pool->execute; c; c = next)
5189 BITMAP_XFREE (pool->insns);
5194 /* Collect main literal pool. Return NULL on overflow. */
5196 static struct constant_pool *
5197 s390_mainpool_start (void)
5199 struct constant_pool *pool;
5202 pool = s390_alloc_pool ();
5204 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5206 if (GET_CODE (insn) == INSN
5207 && GET_CODE (PATTERN (insn)) == SET
5208 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5209 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5211 if (pool->pool_insn)
5213 pool->pool_insn = insn;
5216 if (s390_execute_label (insn))
5218 s390_add_execute (pool, insn);
5220 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5222 rtx pool_ref = NULL_RTX;
5223 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5226 rtx constant = get_pool_constant (pool_ref);
5227 enum machine_mode mode = get_pool_mode (pool_ref);
5228 s390_add_constant (pool, constant, mode);
5233 if (!pool->pool_insn && pool->size > 0)
5236 if (pool->size >= 4096)
5238 /* We're going to chunkify the pool, so remove the main
5239 pool placeholder insn. */
5240 remove_insn (pool->pool_insn);
5242 s390_free_pool (pool);
5249 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5250 Modify the current function to output the pool constants as well as
5251 the pool register setup instruction. */
5254 s390_mainpool_finish (struct constant_pool *pool)
5256 rtx base_reg = cfun->machine->base_reg;
5259 /* If the pool is empty, we're done. */
5260 if (pool->size == 0)
5262 /* However, we may have out-of-pool execute templates. */
5263 s390_dump_execute (pool);
5265 /* We don't actually need a base register after all. */
5266 cfun->machine->base_reg = NULL_RTX;
5268 if (pool->pool_insn)
5269 remove_insn (pool->pool_insn);
5270 s390_free_pool (pool);
5274 /* We need correct insn addresses. */
5275 shorten_branches (get_insns ());
5277 /* On zSeries, we use a LARL to load the pool register. The pool is
5278 located in the .rodata section, so we emit it after the function. */
5279 if (TARGET_CPU_ZARCH)
5281 insn = gen_main_base_64 (base_reg, pool->label);
5282 insn = emit_insn_after (insn, pool->pool_insn);
5283 INSN_ADDRESSES_NEW (insn, -1);
5284 remove_insn (pool->pool_insn);
5286 insn = get_last_insn ();
5287 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5288 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5290 s390_dump_pool (pool, 0);
5293 /* On S/390, if the total size of the function's code plus literal pool
5294 does not exceed 4096 bytes, we use BASR to set up a function base
5295 pointer, and emit the literal pool at the end of the function. */
5296 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5297 + pool->size + 8 /* alignment slop */ < 4096)
5299 insn = gen_main_base_31_small (base_reg, pool->label);
5300 insn = emit_insn_after (insn, pool->pool_insn);
5301 INSN_ADDRESSES_NEW (insn, -1);
5302 remove_insn (pool->pool_insn);
5304 insn = emit_label_after (pool->label, insn);
5305 INSN_ADDRESSES_NEW (insn, -1);
5307 insn = get_last_insn ();
5308 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5309 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5311 s390_dump_pool (pool, 1);
5314 /* Otherwise, we emit an inline literal pool and use BASR to branch
5315 over it, setting up the pool register at the same time. */
5318 rtx pool_end = gen_label_rtx ();
5320 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5321 insn = emit_insn_after (insn, pool->pool_insn);
5322 INSN_ADDRESSES_NEW (insn, -1);
5323 remove_insn (pool->pool_insn);
5325 insn = emit_label_after (pool->label, insn);
5326 INSN_ADDRESSES_NEW (insn, -1);
5328 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5329 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5331 insn = emit_label_after (pool_end, pool->pool_insn);
5332 INSN_ADDRESSES_NEW (insn, -1);
5334 s390_dump_pool (pool, 1);
5338 /* Replace all literal pool references. */
5340 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5343 replace_ltrel_base (&PATTERN (insn));
5345 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5347 rtx addr, pool_ref = NULL_RTX;
5348 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5351 if (s390_execute_label (insn))
5352 addr = s390_find_execute (pool, insn);
5354 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5355 get_pool_mode (pool_ref));
5357 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5358 INSN_CODE (insn) = -1;
5364 /* Free the pool. */
5365 s390_free_pool (pool);
5368 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5369 We have decided we cannot use this pool, so revert all changes
5370 to the current function that were done by s390_mainpool_start. */
5372 s390_mainpool_cancel (struct constant_pool *pool)
5374 /* We didn't actually change the instruction stream, so simply
5375 free the pool memory. */
5376 s390_free_pool (pool);
5380 /* Chunkify the literal pool. */
5382 #define S390_POOL_CHUNK_MIN 0xc00
5383 #define S390_POOL_CHUNK_MAX 0xe00
5385 static struct constant_pool *
5386 s390_chunkify_start (void)
5388 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5391 rtx pending_ltrel = NULL_RTX;
5394 rtx (*gen_reload_base) (rtx, rtx) =
5395 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5398 /* We need correct insn addresses. */
5400 shorten_branches (get_insns ());
5402 /* Scan all insns and move literals to pool chunks. */
5404 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5406 /* Check for pending LTREL_BASE. */
5409 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5412 if (ltrel_base == pending_ltrel)
5413 pending_ltrel = NULL_RTX;
5419 if (s390_execute_label (insn))
5422 curr_pool = s390_start_pool (&pool_list, insn);
5424 s390_add_execute (curr_pool, insn);
5425 s390_add_pool_insn (curr_pool, insn);
5427 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5429 rtx pool_ref = NULL_RTX;
5430 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5433 rtx constant = get_pool_constant (pool_ref);
5434 enum machine_mode mode = get_pool_mode (pool_ref);
5437 curr_pool = s390_start_pool (&pool_list, insn);
5439 s390_add_constant (curr_pool, constant, mode);
5440 s390_add_pool_insn (curr_pool, insn);
5442 /* Don't split the pool chunk between a LTREL_OFFSET load
5443 and the corresponding LTREL_BASE. */
5444 if (GET_CODE (constant) == CONST
5445 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5446 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5450 pending_ltrel = pool_ref;
5455 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5458 s390_add_pool_insn (curr_pool, insn);
5459 /* An LTREL_BASE must follow within the same basic block. */
5465 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5466 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5469 if (TARGET_CPU_ZARCH)
5471 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5474 s390_end_pool (curr_pool, NULL_RTX);
5479 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5480 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5483 /* We will later have to insert base register reload insns.
5484 Those will have an effect on code size, which we need to
5485 consider here. This calculation makes rather pessimistic
5486 worst-case assumptions. */
5487 if (GET_CODE (insn) == CODE_LABEL)
5490 if (chunk_size < S390_POOL_CHUNK_MIN
5491 && curr_pool->size < S390_POOL_CHUNK_MIN)
5494 /* Pool chunks can only be inserted after BARRIERs ... */
5495 if (GET_CODE (insn) == BARRIER)
5497 s390_end_pool (curr_pool, insn);
5502 /* ... so if we don't find one in time, create one. */
5503 else if ((chunk_size > S390_POOL_CHUNK_MAX
5504 || curr_pool->size > S390_POOL_CHUNK_MAX))
5506 rtx label, jump, barrier;
5508 /* We can insert the barrier only after a 'real' insn. */
5509 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5511 if (get_attr_length (insn) == 0)
5514 /* Don't separate LTREL_BASE from the corresponding
5515 LTREL_OFFSET load. */
5519 label = gen_label_rtx ();
5520 jump = emit_jump_insn_after (gen_jump (label), insn);
5521 barrier = emit_barrier_after (jump);
5522 insn = emit_label_after (label, barrier);
5523 JUMP_LABEL (jump) = label;
5524 LABEL_NUSES (label) = 1;
5526 INSN_ADDRESSES_NEW (jump, -1);
5527 INSN_ADDRESSES_NEW (barrier, -1);
5528 INSN_ADDRESSES_NEW (insn, -1);
5530 s390_end_pool (curr_pool, barrier);
5538 s390_end_pool (curr_pool, NULL_RTX);
5543 /* Find all labels that are branched into
5544 from an insn belonging to a different chunk. */
5546 far_labels = BITMAP_XMALLOC ();
5548 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5550 /* Labels marked with LABEL_PRESERVE_P can be target
5551 of non-local jumps, so we have to mark them.
5552 The same holds for named labels.
5554 Don't do that, however, if it is the label before
5557 if (GET_CODE (insn) == CODE_LABEL
5558 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5560 rtx vec_insn = next_real_insn (insn);
5561 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5562 PATTERN (vec_insn) : NULL_RTX;
5564 || !(GET_CODE (vec_pat) == ADDR_VEC
5565 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5566 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5569 /* If we have a direct jump (conditional or unconditional)
5570 or a casesi jump, check all potential targets. */
5571 else if (GET_CODE (insn) == JUMP_INSN)
5573 rtx pat = PATTERN (insn);
5574 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5575 pat = XVECEXP (pat, 0, 0);
5577 if (GET_CODE (pat) == SET)
5579 rtx label = JUMP_LABEL (insn);
5582 if (s390_find_pool (pool_list, label)
5583 != s390_find_pool (pool_list, insn))
5584 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5587 else if (GET_CODE (pat) == PARALLEL
5588 && XVECLEN (pat, 0) == 2
5589 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5590 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5591 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5593 /* Find the jump table used by this casesi jump. */
5594 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5595 rtx vec_insn = next_real_insn (vec_label);
5596 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5597 PATTERN (vec_insn) : NULL_RTX;
5599 && (GET_CODE (vec_pat) == ADDR_VEC
5600 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5602 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5604 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5606 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5608 if (s390_find_pool (pool_list, label)
5609 != s390_find_pool (pool_list, insn))
5610 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5617 /* Insert base register reload insns before every pool. */
5619 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5621 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5623 rtx insn = curr_pool->first_insn;
5624 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5627 /* Insert base register reload insns at every far label. */
5629 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5630 if (GET_CODE (insn) == CODE_LABEL
5631 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5633 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5636 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5638 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5643 BITMAP_XFREE (far_labels);
5646 /* Recompute insn addresses. */
5648 init_insn_lengths ();
5649 shorten_branches (get_insns ());
5654 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5655 After we have decided to use this list, finish implementing
5656 all changes to the current function as required. */
5659 s390_chunkify_finish (struct constant_pool *pool_list)
5661 struct constant_pool *curr_pool = NULL;
5665 /* Replace all literal pool references. */
5667 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5670 replace_ltrel_base (&PATTERN (insn));
5672 curr_pool = s390_find_pool (pool_list, insn);
5676 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5678 rtx addr, pool_ref = NULL_RTX;
5679 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5682 if (s390_execute_label (insn))
5683 addr = s390_find_execute (curr_pool, insn);
5685 addr = s390_find_constant (curr_pool,
5686 get_pool_constant (pool_ref),
5687 get_pool_mode (pool_ref));
5689 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5690 INSN_CODE (insn) = -1;
5695 /* Dump out all literal pools. */
5697 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5698 s390_dump_pool (curr_pool, 0);
5700 /* Free pool list. */
5704 struct constant_pool *next = pool_list->next;
5705 s390_free_pool (pool_list);
5710 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5711 We have decided we cannot use this list, so revert all changes
5712 to the current function that were done by s390_chunkify_start. */
5715 s390_chunkify_cancel (struct constant_pool *pool_list)
5717 struct constant_pool *curr_pool = NULL;
5720 /* Remove all pool placeholder insns. */
5722 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5724 /* Did we insert an extra barrier? Remove it. */
5725 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5726 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5727 rtx label = NEXT_INSN (curr_pool->pool_insn);
5729 if (jump && GET_CODE (jump) == JUMP_INSN
5730 && barrier && GET_CODE (barrier) == BARRIER
5731 && label && GET_CODE (label) == CODE_LABEL
5732 && GET_CODE (PATTERN (jump)) == SET
5733 && SET_DEST (PATTERN (jump)) == pc_rtx
5734 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5735 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5738 remove_insn (barrier);
5739 remove_insn (label);
5742 remove_insn (curr_pool->pool_insn);
5745 /* Remove all base register reload insns. */
5747 for (insn = get_insns (); insn; )
5749 rtx next_insn = NEXT_INSN (insn);
5751 if (GET_CODE (insn) == INSN
5752 && GET_CODE (PATTERN (insn)) == SET
5753 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5754 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5760 /* Free pool list. */
5764 struct constant_pool *next = pool_list->next;
5765 s390_free_pool (pool_list);
5771 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5774 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5778 switch (GET_MODE_CLASS (mode))
5781 if (GET_CODE (exp) != CONST_DOUBLE)
5784 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5785 assemble_real (r, mode, align);
5789 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5798 /* Rework the prologue/epilogue to avoid saving/restoring
5799 registers unnecessarily. */
5802 s390_optimize_prologue (void)
5804 rtx insn, new_insn, next_insn;
5806 /* Do a final recompute of the frame-related data. */
5808 s390_update_frame_layout ();
5810 /* If all special registers are in fact used, there's nothing we
5811 can do, so no point in walking the insn list. */
5813 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
5814 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
5815 && (TARGET_CPU_ZARCH
5816 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
5817 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
5820 /* Search for prologue/epilogue insns and replace them. */
5822 for (insn = get_insns (); insn; insn = next_insn)
5824 int first, last, off;
5825 rtx set, base, offset;
5827 next_insn = NEXT_INSN (insn);
5829 if (GET_CODE (insn) != INSN)
5832 if (GET_CODE (PATTERN (insn)) == PARALLEL
5833 && store_multiple_operation (PATTERN (insn), VOIDmode))
5835 set = XVECEXP (PATTERN (insn), 0, 0);
5836 first = REGNO (SET_SRC (set));
5837 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5838 offset = const0_rtx;
5839 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5840 off = INTVAL (offset);
5842 if (GET_CODE (base) != REG || off < 0)
5844 if (REGNO (base) != STACK_POINTER_REGNUM
5845 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5847 if (first > BASE_REGNUM || last < BASE_REGNUM)
5850 if (cfun_frame_layout.first_save_gpr != -1)
5852 new_insn = save_gprs (base,
5853 off + (cfun_frame_layout.first_save_gpr
5854 - first) * UNITS_PER_WORD,
5855 cfun_frame_layout.first_save_gpr,
5856 cfun_frame_layout.last_save_gpr);
5857 new_insn = emit_insn_before (new_insn, insn);
5858 INSN_ADDRESSES_NEW (new_insn, -1);
5865 if (GET_CODE (PATTERN (insn)) == SET
5866 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
5867 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
5868 || (!TARGET_CPU_ZARCH
5869 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
5870 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
5872 set = PATTERN (insn);
5873 first = REGNO (SET_SRC (set));
5874 offset = const0_rtx;
5875 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
5876 off = INTVAL (offset);
5878 if (GET_CODE (base) != REG || off < 0)
5880 if (REGNO (base) != STACK_POINTER_REGNUM
5881 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5883 if (cfun_frame_layout.first_save_gpr != -1)
5885 new_insn = save_gprs (base,
5886 off + (cfun_frame_layout.first_save_gpr
5887 - first) * UNITS_PER_WORD,
5888 cfun_frame_layout.first_save_gpr,
5889 cfun_frame_layout.last_save_gpr);
5890 new_insn = emit_insn_before (new_insn, insn);
5891 INSN_ADDRESSES_NEW (new_insn, -1);
5898 if (GET_CODE (PATTERN (insn)) == PARALLEL
5899 && load_multiple_operation (PATTERN (insn), VOIDmode))
5901 set = XVECEXP (PATTERN (insn), 0, 0);
5902 first = REGNO (SET_DEST (set));
5903 last = first + XVECLEN (PATTERN (insn), 0) - 1;
5904 offset = const0_rtx;
5905 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5906 off = INTVAL (offset);
5908 if (GET_CODE (base) != REG || off < 0)
5910 if (REGNO (base) != STACK_POINTER_REGNUM
5911 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5913 if (first > BASE_REGNUM || last < BASE_REGNUM)
5916 if (cfun_frame_layout.first_restore_gpr != -1)
5918 new_insn = restore_gprs (base,
5919 off + (cfun_frame_layout.first_restore_gpr
5920 - first) * UNITS_PER_WORD,
5921 cfun_frame_layout.first_restore_gpr,
5922 cfun_frame_layout.last_restore_gpr);
5923 new_insn = emit_insn_before (new_insn, insn);
5924 INSN_ADDRESSES_NEW (new_insn, -1);
5931 if (GET_CODE (PATTERN (insn)) == SET
5932 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
5933 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
5934 || (!TARGET_CPU_ZARCH
5935 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
5936 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
5938 set = PATTERN (insn);
5939 first = REGNO (SET_DEST (set));
5940 offset = const0_rtx;
5941 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
5942 off = INTVAL (offset);
5944 if (GET_CODE (base) != REG || off < 0)
5946 if (REGNO (base) != STACK_POINTER_REGNUM
5947 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
5949 if (cfun_frame_layout.first_restore_gpr != -1)
5951 new_insn = restore_gprs (base,
5952 off + (cfun_frame_layout.first_restore_gpr
5953 - first) * UNITS_PER_WORD,
5954 cfun_frame_layout.first_restore_gpr,
5955 cfun_frame_layout.last_restore_gpr);
5956 new_insn = emit_insn_before (new_insn, insn);
5957 INSN_ADDRESSES_NEW (new_insn, -1);
5966 /* Perform machine-dependent processing. */
5971 bool pool_overflow = false;
5973 /* Make sure all splits have been performed; splits after
5974 machine_dependent_reorg might confuse insn length counts. */
5975 split_all_insns_noflow ();
5978 /* Install the main literal pool and the associated base
5979 register load insns.
5981 In addition, there are two problematic situations we need
5984 - the literal pool might be > 4096 bytes in size, so that
5985 some of its elements cannot be directly accessed
5987 - a branch target might be > 64K away from the branch, so that
5988 it is not possible to use a PC-relative instruction.
5990 To fix those, we split the single literal pool into multiple
5991 pool chunks, reloading the pool base register at various
5992 points throughout the function to ensure it always points to
5993 the pool chunk the following code expects, and / or replace
5994 PC-relative branches by absolute branches.
5996 However, the two problems are interdependent: splitting the
5997 literal pool can move a branch further away from its target,
5998 causing the 64K limit to overflow, and on the other hand,
5999 replacing a PC-relative branch by an absolute branch means
6000 we need to put the branch target address into the literal
6001 pool, possibly causing it to overflow.
6003 So, we loop trying to fix up both problems until we manage
6004 to satisfy both conditions at the same time. Note that the
6005 loop is guaranteed to terminate as every pass of the loop
6006 strictly decreases the total number of PC-relative branches
6007 in the function. (This is not completely true as there
6008 might be branch-over-pool insns introduced by chunkify_start.
6009 Those never need to be split however.) */
6013 struct constant_pool *pool = NULL;
6015 /* Collect the literal pool. */
6018 pool = s390_mainpool_start ();
6020 pool_overflow = true;
6023 /* If literal pool overflowed, start to chunkify it. */
6025 pool = s390_chunkify_start ();
6027 /* Split out-of-range branches. If this has created new
6028 literal pool entries, cancel current chunk list and
6029 recompute it. zSeries machines have large branch
6030 instructions, so we never need to split a branch. */
6031 if (!TARGET_CPU_ZARCH && s390_split_branches ())
6034 s390_chunkify_cancel (pool);
6036 s390_mainpool_cancel (pool);
6041 /* If we made it up to here, both conditions are satisfied.
6042 Finish up literal pool related changes. */
6044 s390_chunkify_finish (pool);
6046 s390_mainpool_finish (pool);
6048 /* We're done splitting branches. */
6049 cfun->machine->split_branches_pending_p = false;
6053 s390_optimize_prologue ();
6057 /* Return an RTL expression representing the value of the return address
6058 for the frame COUNT steps up from the current frame. FRAME is the
6059 frame pointer of that frame. */
6062 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6067 /* Without backchain, we fail for all but the current frame. */
6069 if (!TARGET_BACKCHAIN && !TARGET_KERNEL_BACKCHAIN && count > 0)
6072 /* For the current frame, we need to make sure the initial
6073 value of RETURN_REGNUM is actually saved. */
6077 cfun_frame_layout.save_return_addr_p = true;
6078 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6081 if (TARGET_BACKCHAIN)
6082 offset = RETURN_REGNUM * UNITS_PER_WORD;
6084 offset = -2 * UNITS_PER_WORD;
6086 addr = plus_constant (frame, offset);
6087 addr = memory_address (Pmode, addr);
6088 return gen_rtx_MEM (Pmode, addr);
6091 /* Return an RTL expression representing the back chain stored in
6092 the current stack frame. */
6095 s390_back_chain_rtx (void)
6099 gcc_assert (TARGET_BACKCHAIN || TARGET_KERNEL_BACKCHAIN);
6101 if (TARGET_BACKCHAIN)
6102 chain = stack_pointer_rtx;
6104 chain = plus_constant (stack_pointer_rtx,
6105 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6107 chain = gen_rtx_MEM (Pmode, chain);
6111 /* Find first call clobbered register unused in a function.
6112 This could be used as base register in a leaf function
6113 or for holding the return address before epilogue. */
6116 find_unused_clobbered_reg (void)
6119 for (i = 0; i < 6; i++)
6120 if (!regs_ever_live[i])
6125 /* Determine the frame area which actually has to be accessed
6126 in the function epilogue. The values are stored at the
6127 given pointers AREA_BOTTOM (address of the lowest used stack
6128 address) and AREA_TOP (address of the first item which does
6129 not belong to the stack frame). */
6132 s390_frame_area (int *area_bottom, int *area_top)
6140 if (cfun_frame_layout.first_restore_gpr != -1)
6142 b = (cfun_frame_layout.gprs_offset
6143 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6144 t = b + (cfun_frame_layout.last_restore_gpr
6145 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6148 if (TARGET_64BIT && cfun_save_high_fprs_p)
6150 b = MIN (b, cfun_frame_layout.f8_offset);
6151 t = MAX (t, (cfun_frame_layout.f8_offset
6152 + cfun_frame_layout.high_fprs * 8));
6156 for (i = 2; i < 4; i++)
6157 if (cfun_fpr_bit_p (i))
6159 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6160 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6167 /* Fill cfun->machine with info about register usage of current function.
6168 Return in LIVE_REGS which GPRs are currently considered live. */
6171 s390_register_info (int live_regs[])
6175 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6176 cfun_frame_layout.fpr_bitmap = 0;
6177 cfun_frame_layout.high_fprs = 0;
6179 for (i = 24; i < 32; i++)
6180 if (regs_ever_live[i] && !global_regs[i])
6182 cfun_set_fpr_bit (i - 16);
6183 cfun_frame_layout.high_fprs++;
6186 /* Find first and last gpr to be saved. We trust regs_ever_live
6187 data, except that we don't save and restore global registers.
6189 Also, all registers with special meaning to the compiler need
6190 to be handled extra. */
6192 for (i = 0; i < 16; i++)
6193 live_regs[i] = regs_ever_live[i] && !global_regs[i];
6196 live_regs[PIC_OFFSET_TABLE_REGNUM]
6197 = regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6199 live_regs[BASE_REGNUM]
6200 = cfun->machine->base_reg
6201 && REGNO (cfun->machine->base_reg) == BASE_REGNUM;
6203 live_regs[RETURN_REGNUM]
6204 = cfun->machine->split_branches_pending_p
6205 || cfun_frame_layout.save_return_addr_p;
6207 live_regs[STACK_POINTER_REGNUM]
6208 = !current_function_is_leaf
6209 || TARGET_TPF_PROFILING
6210 || cfun_save_high_fprs_p
6211 || get_frame_size () > 0
6212 || current_function_calls_alloca
6213 || current_function_stdarg;
6215 for (i = 6; i < 16; i++)
6218 for (j = 15; j > i; j--)
6224 /* Nothing to save/restore. */
6225 cfun_frame_layout.first_save_gpr = -1;
6226 cfun_frame_layout.first_restore_gpr = -1;
6227 cfun_frame_layout.last_save_gpr = -1;
6228 cfun_frame_layout.last_restore_gpr = -1;
6232 /* Save / Restore from gpr i to j. */
6233 cfun_frame_layout.first_save_gpr = i;
6234 cfun_frame_layout.first_restore_gpr = i;
6235 cfun_frame_layout.last_save_gpr = j;
6236 cfun_frame_layout.last_restore_gpr = j;
6239 if (current_function_stdarg)
6241 /* Varargs functions need to save gprs 2 to 6. */
6242 if (cfun_frame_layout.first_save_gpr == -1
6243 || cfun_frame_layout.first_save_gpr > 2)
6244 cfun_frame_layout.first_save_gpr = 2;
6246 if (cfun_frame_layout.last_save_gpr == -1
6247 || cfun_frame_layout.last_save_gpr < 6)
6248 cfun_frame_layout.last_save_gpr = 6;
6250 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6251 if (TARGET_HARD_FLOAT)
6252 for (i = 0; i < (TARGET_64BIT ? 4 : 2); i++)
6253 cfun_set_fpr_bit (i);
6257 for (i = 2; i < 4; i++)
6258 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6259 cfun_set_fpr_bit (i);
6262 /* Fill cfun->machine with info about frame of current function. */
6265 s390_frame_info (void)
6269 cfun_frame_layout.frame_size = get_frame_size ();
6270 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6271 fatal_error ("Total size of local variables exceeds architecture limit.");
6273 cfun_frame_layout.save_backchain_p = (TARGET_BACKCHAIN
6274 || TARGET_KERNEL_BACKCHAIN);
6276 if (TARGET_BACKCHAIN)
6278 cfun_frame_layout.backchain_offset = 0;
6279 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6280 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6281 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6282 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6285 else if (TARGET_KERNEL_BACKCHAIN)
6287 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6289 cfun_frame_layout.gprs_offset
6290 = (cfun_frame_layout.backchain_offset
6291 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6296 cfun_frame_layout.f4_offset
6297 = (cfun_frame_layout.gprs_offset
6298 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6300 cfun_frame_layout.f0_offset
6301 = (cfun_frame_layout.f4_offset
6302 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6306 /* On 31 bit we have to care about alignment of the
6307 floating point regs to provide fastest access. */
6308 cfun_frame_layout.f0_offset
6309 = ((cfun_frame_layout.gprs_offset
6310 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6311 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6313 cfun_frame_layout.f4_offset
6314 = (cfun_frame_layout.f0_offset
6315 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6318 else /* no backchain */
6320 cfun_frame_layout.f4_offset
6321 = (STACK_POINTER_OFFSET
6322 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6324 cfun_frame_layout.f0_offset
6325 = (cfun_frame_layout.f4_offset
6326 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6328 cfun_frame_layout.gprs_offset
6329 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6332 if (current_function_is_leaf
6333 && !TARGET_TPF_PROFILING
6334 && cfun_frame_layout.frame_size == 0
6335 && !cfun_save_high_fprs_p
6336 && !current_function_calls_alloca
6337 && !current_function_stdarg)
6340 if (TARGET_BACKCHAIN)
6341 cfun_frame_layout.frame_size += (STARTING_FRAME_OFFSET
6342 + cfun_frame_layout.high_fprs * 8);
6345 cfun_frame_layout.frame_size += (cfun_frame_layout.save_backchain_p
6348 /* No alignment trouble here because f8-f15 are only saved under
6350 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6351 cfun_frame_layout.f4_offset),
6352 cfun_frame_layout.gprs_offset)
6353 - cfun_frame_layout.high_fprs * 8);
6355 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6357 for (i = 0; i < 8; i++)
6358 if (cfun_fpr_bit_p (i))
6359 cfun_frame_layout.frame_size += 8;
6361 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6363 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6364 the frame size to sustain 8 byte alignment of stack frames. */
6365 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6366 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6367 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6369 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6373 /* Generate frame layout. Fills in register and frame data for the current
6374 function in cfun->machine. This routine can be called multiple times;
6375 it will re-do the complete frame layout every time. */
6378 s390_init_frame_layout (void)
6380 HOST_WIDE_INT frame_size;
6384 /* If return address register is explicitly used, we need to save it. */
6385 if (regs_ever_live[RETURN_REGNUM]
6386 || !current_function_is_leaf
6387 || TARGET_TPF_PROFILING
6388 || current_function_stdarg
6389 || current_function_calls_eh_return)
6390 cfun_frame_layout.save_return_addr_p = true;
6392 /* On S/390 machines, we may need to perform branch splitting, which
6393 will require both base and return address register. We have no
6394 choice but to assume we're going to need them until right at the
6395 end of the machine dependent reorg phase. */
6396 if (!TARGET_CPU_ZARCH)
6397 cfun->machine->split_branches_pending_p = true;
6401 frame_size = cfun_frame_layout.frame_size;
6403 /* Try to predict whether we'll need the base register. */
6404 base_used = cfun->machine->split_branches_pending_p
6405 || current_function_uses_const_pool
6406 || (!DISP_IN_RANGE (-frame_size)
6407 && !CONST_OK_FOR_CONSTRAINT_P (-frame_size, 'K', "K"));
6409 /* Decide which register to use as literal pool base. In small
6410 leaf functions, try to use an unused call-clobbered register
6411 as base register to avoid save/restore overhead. */
6413 cfun->machine->base_reg = NULL_RTX;
6414 else if (current_function_is_leaf && !regs_ever_live[5])
6415 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6417 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6419 s390_register_info (live_regs);
6422 while (frame_size != cfun_frame_layout.frame_size);
6425 /* Update frame layout. Recompute actual register save data based on
6426 current info and update regs_ever_live for the special registers.
6427 May be called multiple times, but may never cause *more* registers
6428 to be saved than s390_init_frame_layout allocated room for. */
6431 s390_update_frame_layout (void)
6435 s390_register_info (live_regs);
6437 regs_ever_live[BASE_REGNUM] = live_regs[BASE_REGNUM];
6438 regs_ever_live[RETURN_REGNUM] = live_regs[RETURN_REGNUM];
6439 regs_ever_live[STACK_POINTER_REGNUM] = live_regs[STACK_POINTER_REGNUM];
6441 if (cfun->machine->base_reg)
6442 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6445 /* Return true if register FROM can be eliminated via register TO. */
6448 s390_can_eliminate (int from, int to)
6450 gcc_assert (to == STACK_POINTER_REGNUM
6451 || to == HARD_FRAME_POINTER_REGNUM);
6453 gcc_assert (from == FRAME_POINTER_REGNUM
6454 || from == ARG_POINTER_REGNUM
6455 || from == RETURN_ADDRESS_POINTER_REGNUM);
6457 /* Make sure we actually saved the return address. */
6458 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6459 if (!current_function_calls_eh_return
6460 && !current_function_stdarg
6461 && !cfun_frame_layout.save_return_addr_p)
6467 /* Return offset between register FROM and TO initially after prolog. */
6470 s390_initial_elimination_offset (int from, int to)
6472 HOST_WIDE_INT offset;
6475 /* ??? Why are we called for non-eliminable pairs? */
6476 if (!s390_can_eliminate (from, to))
6481 case FRAME_POINTER_REGNUM:
6485 case ARG_POINTER_REGNUM:
6486 s390_init_frame_layout ();
6487 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6490 case RETURN_ADDRESS_POINTER_REGNUM:
6491 s390_init_frame_layout ();
6492 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6493 gcc_assert (index >= 0);
6494 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6495 offset += index * UNITS_PER_WORD;
6505 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6506 to register BASE. Return generated insn. */
6509 save_fpr (rtx base, int offset, int regnum)
6512 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6513 set_mem_alias_set (addr, s390_sr_alias_set);
6515 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6518 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6519 to register BASE. Return generated insn. */
6522 restore_fpr (rtx base, int offset, int regnum)
6525 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6526 set_mem_alias_set (addr, s390_sr_alias_set);
6528 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6531 /* Generate insn to save registers FIRST to LAST into
6532 the register save area located at offset OFFSET
6533 relative to register BASE. */
6536 save_gprs (rtx base, int offset, int first, int last)
6538 rtx addr, insn, note;
6541 addr = plus_constant (base, offset);
6542 addr = gen_rtx_MEM (Pmode, addr);
6543 set_mem_alias_set (addr, s390_sr_alias_set);
6545 /* Special-case single register. */
6549 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6551 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6553 RTX_FRAME_RELATED_P (insn) = 1;
6558 insn = gen_store_multiple (addr,
6559 gen_rtx_REG (Pmode, first),
6560 GEN_INT (last - first + 1));
6563 /* We need to set the FRAME_RELATED flag on all SETs
6564 inside the store-multiple pattern.
6566 However, we must not emit DWARF records for registers 2..5
6567 if they are stored for use by variable arguments ...
6569 ??? Unfortunately, it is not enough to simply not the the
6570 FRAME_RELATED flags for those SETs, because the first SET
6571 of the PARALLEL is always treated as if it had the flag
6572 set, even if it does not. Therefore we emit a new pattern
6573 without those registers as REG_FRAME_RELATED_EXPR note. */
6577 rtx pat = PATTERN (insn);
6579 for (i = 0; i < XVECLEN (pat, 0); i++)
6580 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6581 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6583 RTX_FRAME_RELATED_P (insn) = 1;
6587 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
6588 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
6589 gen_rtx_REG (Pmode, 6),
6590 GEN_INT (last - 6 + 1));
6591 note = PATTERN (note);
6594 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6595 note, REG_NOTES (insn));
6597 for (i = 0; i < XVECLEN (note, 0); i++)
6598 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6599 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6601 RTX_FRAME_RELATED_P (insn) = 1;
6607 /* Generate insn to restore registers FIRST to LAST from
6608 the register save area located at offset OFFSET
6609 relative to register BASE. */
6612 restore_gprs (rtx base, int offset, int first, int last)
6616 addr = plus_constant (base, offset);
6617 addr = gen_rtx_MEM (Pmode, addr);
6618 set_mem_alias_set (addr, s390_sr_alias_set);
6620 /* Special-case single register. */
6624 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6626 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6631 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6633 GEN_INT (last - first + 1));
6637 /* Return insn sequence to load the GOT register. */
6639 static GTY(()) rtx got_symbol;
6641 s390_load_got (void)
6647 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6648 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6653 if (TARGET_CPU_ZARCH)
6655 emit_move_insn (pic_offset_table_rtx, got_symbol);
6661 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
6662 UNSPEC_LTREL_OFFSET);
6663 offset = gen_rtx_CONST (Pmode, offset);
6664 offset = force_const_mem (Pmode, offset);
6666 emit_move_insn (pic_offset_table_rtx, offset);
6668 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
6670 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6672 emit_move_insn (pic_offset_table_rtx, offset);
6675 insns = get_insns ();
6680 /* Expand the prologue into a bunch of separate insns. */
6683 s390_emit_prologue (void)
6691 /* Complete frame layout. */
6693 s390_update_frame_layout ();
6695 /* Annotate all constant pool references to let the scheduler know
6696 they implicitly use the base register. */
6698 push_topmost_sequence ();
6700 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6702 annotate_constant_pool_refs (&PATTERN (insn));
6704 pop_topmost_sequence ();
6706 /* Choose best register to use for temp use within prologue.
6707 See below for why TPF must use the register 1. */
6709 if (!current_function_is_leaf && !TARGET_TPF_PROFILING)
6710 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6712 temp_reg = gen_rtx_REG (Pmode, 1);
6714 /* Save call saved gprs. */
6715 if (cfun_frame_layout.first_save_gpr != -1)
6717 insn = save_gprs (stack_pointer_rtx,
6718 cfun_frame_layout.gprs_offset,
6719 cfun_frame_layout.first_save_gpr,
6720 cfun_frame_layout.last_save_gpr);
6724 /* Dummy insn to mark literal pool slot. */
6726 if (cfun->machine->base_reg)
6727 emit_insn (gen_main_pool (cfun->machine->base_reg));
6729 offset = cfun_frame_layout.f0_offset;
6731 /* Save f0 and f2. */
6732 for (i = 0; i < 2; i++)
6734 if (cfun_fpr_bit_p (i))
6736 save_fpr (stack_pointer_rtx, offset, i + 16);
6739 else if (TARGET_BACKCHAIN)
6743 /* Save f4 and f6. */
6744 offset = cfun_frame_layout.f4_offset;
6745 for (i = 2; i < 4; i++)
6747 if (cfun_fpr_bit_p (i))
6749 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6752 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6753 therefore are not frame related. */
6754 if (!call_really_used_regs[i + 16])
6755 RTX_FRAME_RELATED_P (insn) = 1;
6757 else if (TARGET_BACKCHAIN)
6761 if (!TARGET_BACKCHAIN
6762 && cfun_save_high_fprs_p
6763 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6765 offset = (cfun_frame_layout.f8_offset
6766 + (cfun_frame_layout.high_fprs - 1) * 8);
6768 for (i = 15; i > 7 && offset >= 0; i--)
6769 if (cfun_fpr_bit_p (i))
6771 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6773 RTX_FRAME_RELATED_P (insn) = 1;
6776 if (offset >= cfun_frame_layout.f8_offset)
6780 if (TARGET_BACKCHAIN)
6781 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
6783 /* Decrement stack pointer. */
6785 if (cfun_frame_layout.frame_size > 0)
6787 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
6789 if (s390_stack_size)
6791 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6792 & ~(s390_stack_guard - 1));
6793 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6794 GEN_INT (stack_check_mask));
6797 gen_cmpdi (t, const0_rtx);
6799 gen_cmpsi (t, const0_rtx);
6801 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6802 gen_rtx_REG (CCmode,
6808 if (s390_warn_framesize > 0
6809 && cfun_frame_layout.frame_size >= s390_warn_framesize)
6810 warning ("frame size of `%s' is " HOST_WIDE_INT_PRINT_DEC " bytes",
6811 current_function_name (), cfun_frame_layout.frame_size);
6813 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
6814 warning ("`%s' uses dynamic stack allocation", current_function_name ());
6816 /* Save incoming stack pointer into temp reg. */
6817 if (cfun_frame_layout.save_backchain_p || next_fpr)
6818 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
6820 /* Subtract frame size from stack pointer. */
6822 if (DISP_IN_RANGE (INTVAL (frame_off)))
6824 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6825 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6827 insn = emit_insn (insn);
6831 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6832 frame_off = force_const_mem (Pmode, frame_off);
6834 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
6835 annotate_constant_pool_refs (&PATTERN (insn));
6838 RTX_FRAME_RELATED_P (insn) = 1;
6840 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6841 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6842 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6843 GEN_INT (-cfun_frame_layout.frame_size))),
6846 /* Set backchain. */
6848 if (cfun_frame_layout.save_backchain_p)
6850 if (cfun_frame_layout.backchain_offset)
6851 addr = gen_rtx_MEM (Pmode,
6852 plus_constant (stack_pointer_rtx,
6853 cfun_frame_layout.backchain_offset));
6855 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
6856 set_mem_alias_set (addr, s390_sr_alias_set);
6857 insn = emit_insn (gen_move_insn (addr, temp_reg));
6860 /* If we support asynchronous exceptions (e.g. for Java),
6861 we need to make sure the backchain pointer is set up
6862 before any possibly trapping memory access. */
6864 if (cfun_frame_layout.save_backchain_p && flag_non_call_exceptions)
6866 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6867 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6871 /* Save fprs 8 - 15 (64 bit ABI). */
6873 if (cfun_save_high_fprs_p && next_fpr)
6875 insn = emit_insn (gen_add2_insn (temp_reg,
6876 GEN_INT (cfun_frame_layout.f8_offset)));
6880 for (i = 24; i <= next_fpr; i++)
6881 if (cfun_fpr_bit_p (i - 16))
6883 rtx addr = plus_constant (stack_pointer_rtx,
6884 cfun_frame_layout.frame_size
6885 + cfun_frame_layout.f8_offset
6888 insn = save_fpr (temp_reg, offset, i);
6890 RTX_FRAME_RELATED_P (insn) = 1;
6892 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6893 gen_rtx_SET (VOIDmode,
6894 gen_rtx_MEM (DFmode, addr),
6895 gen_rtx_REG (DFmode, i)),
6900 /* Set frame pointer, if needed. */
6902 if (frame_pointer_needed)
6904 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6905 RTX_FRAME_RELATED_P (insn) = 1;
6908 /* Set up got pointer, if needed. */
6910 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6912 rtx insns = s390_load_got ();
6914 for (insn = insns; insn; insn = NEXT_INSN (insn))
6916 annotate_constant_pool_refs (&PATTERN (insn));
6918 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6925 if (TARGET_TPF_PROFILING)
6927 /* Generate a BAS instruction to serve as a function
6928 entry intercept to facilitate the use of tracing
6929 algorithms located at the branch target. */
6930 emit_insn (gen_prologue_tpf ());
6932 /* Emit a blockage here so that all code
6933 lies between the profiling mechanisms. */
6934 emit_insn (gen_blockage ());
6938 /* Expand the epilogue into a bunch of separate insns. */
6941 s390_emit_epilogue (bool sibcall)
6943 rtx frame_pointer, return_reg;
6944 int area_bottom, area_top, offset = 0;
6949 if (TARGET_TPF_PROFILING)
6952 /* Generate a BAS instruction to serve as a function
6953 entry intercept to facilitate the use of tracing
6954 algorithms located at the branch target. */
6956 /* Emit a blockage here so that all code
6957 lies between the profiling mechanisms. */
6958 emit_insn (gen_blockage ());
6960 emit_insn (gen_epilogue_tpf ());
6963 /* Check whether to use frame or stack pointer for restore. */
6965 frame_pointer = (frame_pointer_needed
6966 ? hard_frame_pointer_rtx : stack_pointer_rtx);
6968 s390_frame_area (&area_bottom, &area_top);
6970 /* Check whether we can access the register save area.
6971 If not, increment the frame pointer as required. */
6973 if (area_top <= area_bottom)
6975 /* Nothing to restore. */
6977 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
6978 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
6980 /* Area is in range. */
6981 offset = cfun_frame_layout.frame_size;
6985 rtx insn, frame_off;
6987 offset = area_bottom < 0 ? -area_bottom : 0;
6988 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
6990 if (DISP_IN_RANGE (INTVAL (frame_off)))
6992 insn = gen_rtx_SET (VOIDmode, frame_pointer,
6993 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
6994 insn = emit_insn (insn);
6998 if (!CONST_OK_FOR_CONSTRAINT_P (INTVAL (frame_off), 'K', "K"))
6999 frame_off = force_const_mem (Pmode, frame_off);
7001 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
7002 annotate_constant_pool_refs (&PATTERN (insn));
7006 /* Restore call saved fprs. */
7010 if (cfun_save_high_fprs_p)
7012 next_offset = cfun_frame_layout.f8_offset;
7013 for (i = 24; i < 32; i++)
7015 if (cfun_fpr_bit_p (i - 16))
7017 restore_fpr (frame_pointer,
7018 offset + next_offset, i);
7027 next_offset = cfun_frame_layout.f4_offset;
7028 for (i = 18; i < 20; i++)
7030 if (cfun_fpr_bit_p (i - 16))
7032 restore_fpr (frame_pointer,
7033 offset + next_offset, i);
7036 else if (TARGET_BACKCHAIN)
7042 /* Return register. */
7044 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7046 /* Restore call saved gprs. */
7048 if (cfun_frame_layout.first_restore_gpr != -1)
7053 /* Check for global register and save them
7054 to stack location from where they get restored. */
7056 for (i = cfun_frame_layout.first_restore_gpr;
7057 i <= cfun_frame_layout.last_restore_gpr;
7060 /* These registers are special and need to be
7061 restored in any case. */
7062 if (i == STACK_POINTER_REGNUM
7063 || i == RETURN_REGNUM
7065 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7070 addr = plus_constant (frame_pointer,
7071 offset + cfun_frame_layout.gprs_offset
7072 + (i - cfun_frame_layout.first_save_gpr)
7074 addr = gen_rtx_MEM (Pmode, addr);
7075 set_mem_alias_set (addr, s390_sr_alias_set);
7076 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7082 /* Fetch return address from stack before load multiple,
7083 this will do good for scheduling. */
7085 if (cfun_frame_layout.save_return_addr_p
7086 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7087 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7089 int return_regnum = find_unused_clobbered_reg();
7092 return_reg = gen_rtx_REG (Pmode, return_regnum);
7094 addr = plus_constant (frame_pointer,
7095 offset + cfun_frame_layout.gprs_offset
7097 - cfun_frame_layout.first_save_gpr)
7099 addr = gen_rtx_MEM (Pmode, addr);
7100 set_mem_alias_set (addr, s390_sr_alias_set);
7101 emit_move_insn (return_reg, addr);
7105 insn = restore_gprs (frame_pointer,
7106 offset + cfun_frame_layout.gprs_offset
7107 + (cfun_frame_layout.first_restore_gpr
7108 - cfun_frame_layout.first_save_gpr)
7110 cfun_frame_layout.first_restore_gpr,
7111 cfun_frame_layout.last_restore_gpr);
7118 /* Return to caller. */
7120 p = rtvec_alloc (2);
7122 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7123 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7124 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7129 /* Return the size in bytes of a function argument of
7130 type TYPE and/or mode MODE. At least one of TYPE or
7131 MODE must be specified. */
7134 s390_function_arg_size (enum machine_mode mode, tree type)
7137 return int_size_in_bytes (type);
7139 /* No type info available for some library calls ... */
7140 if (mode != BLKmode)
7141 return GET_MODE_SIZE (mode);
7143 /* If we have neither type nor mode, abort */
7147 /* Return true if a function argument of type TYPE and mode MODE
7148 is to be passed in a floating-point register, if available. */
7151 s390_function_arg_float (enum machine_mode mode, tree type)
7153 int size = s390_function_arg_size (mode, type);
7157 /* Soft-float changes the ABI: no floating-point registers are used. */
7158 if (TARGET_SOFT_FLOAT)
7161 /* No type info available for some library calls ... */
7163 return mode == SFmode || mode == DFmode;
7165 /* The ABI says that record types with a single member are treated
7166 just like that member would be. */
7167 while (TREE_CODE (type) == RECORD_TYPE)
7169 tree field, single = NULL_TREE;
7171 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7173 if (TREE_CODE (field) != FIELD_DECL)
7176 if (single == NULL_TREE)
7177 single = TREE_TYPE (field);
7182 if (single == NULL_TREE)
7188 return TREE_CODE (type) == REAL_TYPE;
7191 /* Return true if a function argument of type TYPE and mode MODE
7192 is to be passed in an integer register, or a pair of integer
7193 registers, if available. */
7196 s390_function_arg_integer (enum machine_mode mode, tree type)
7198 int size = s390_function_arg_size (mode, type);
7202 /* No type info available for some library calls ... */
7204 return GET_MODE_CLASS (mode) == MODE_INT
7205 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7207 /* We accept small integral (and similar) types. */
7208 if (INTEGRAL_TYPE_P (type)
7209 || POINTER_TYPE_P (type)
7210 || TREE_CODE (type) == OFFSET_TYPE
7211 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7214 /* We also accept structs of size 1, 2, 4, 8 that are not
7215 passed in floating-point registers. */
7216 if (AGGREGATE_TYPE_P (type)
7217 && exact_log2 (size) >= 0
7218 && !s390_function_arg_float (mode, type))
7224 /* Return 1 if a function argument of type TYPE and mode MODE
7225 is to be passed by reference. The ABI specifies that only
7226 structures of size 1, 2, 4, or 8 bytes are passed by value,
7227 all other structures (and complex numbers) are passed by
7231 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7232 enum machine_mode mode, tree type,
7233 bool named ATTRIBUTE_UNUSED)
7235 int size = s390_function_arg_size (mode, type);
7241 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7244 if (TREE_CODE (type) == COMPLEX_TYPE
7245 || TREE_CODE (type) == VECTOR_TYPE)
7252 /* Update the data in CUM to advance over an argument of mode MODE and
7253 data type TYPE. (TYPE is null for libcalls where that information
7254 may not be available.). The boolean NAMED specifies whether the
7255 argument is a named argument (as opposed to an unnamed argument
7256 matching an ellipsis). */
7259 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7260 tree type, int named ATTRIBUTE_UNUSED)
7262 if (s390_function_arg_float (mode, type))
7266 else if (s390_function_arg_integer (mode, type))
7268 int size = s390_function_arg_size (mode, type);
7269 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7275 /* Define where to put the arguments to a function.
7276 Value is zero to push the argument on the stack,
7277 or a hard register in which to store the argument.
7279 MODE is the argument's machine mode.
7280 TYPE is the data type of the argument (as a tree).
7281 This is null for libcalls where that information may
7283 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7284 the preceding args and about the function being called.
7285 NAMED is nonzero if this argument is a named parameter
7286 (otherwise it is an extra parameter matching an ellipsis).
7288 On S/390, we use general purpose registers 2 through 6 to
7289 pass integer, pointer, and certain structure arguments, and
7290 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7291 to pass floating point arguments. All remaining arguments
7292 are pushed to the stack. */
7295 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7296 int named ATTRIBUTE_UNUSED)
7298 if (s390_function_arg_float (mode, type))
7300 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
7303 return gen_rtx_REG (mode, cum->fprs + 16);
7305 else if (s390_function_arg_integer (mode, type))
7307 int size = s390_function_arg_size (mode, type);
7308 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7310 if (cum->gprs + n_gprs > 5)
7313 return gen_rtx_REG (mode, cum->gprs + 2);
7316 /* After the real arguments, expand_call calls us once again
7317 with a void_type_node type. Whatever we return here is
7318 passed as operand 2 to the call expanders.
7320 We don't need this feature ... */
7321 else if (type == void_type_node)
7327 /* Return true if return values of type TYPE should be returned
7328 in a memory buffer whose address is passed by the caller as
7329 hidden first argument. */
7332 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7334 /* We accept small integral (and similar) types. */
7335 if (INTEGRAL_TYPE_P (type)
7336 || POINTER_TYPE_P (type)
7337 || TREE_CODE (type) == OFFSET_TYPE
7338 || TREE_CODE (type) == REAL_TYPE)
7339 return int_size_in_bytes (type) > 8;
7341 /* Aggregates and similar constructs are always returned
7343 if (AGGREGATE_TYPE_P (type)
7344 || TREE_CODE (type) == COMPLEX_TYPE
7345 || TREE_CODE (type) == VECTOR_TYPE)
7348 /* ??? We get called on all sorts of random stuff from
7349 aggregate_value_p. We can't abort, but it's not clear
7350 what's safe to return. Pretend it's a struct I guess. */
7354 /* Define where to return a (scalar) value of type TYPE.
7355 If TYPE is null, define where to return a (scalar)
7356 value of mode MODE from a libcall. */
7359 s390_function_value (tree type, enum machine_mode mode)
7363 int unsignedp = TYPE_UNSIGNED (type);
7364 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7367 if (GET_MODE_CLASS (mode) != MODE_INT
7368 && GET_MODE_CLASS (mode) != MODE_FLOAT)
7370 if (GET_MODE_SIZE (mode) > 8)
7373 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7374 return gen_rtx_REG (mode, 16);
7376 return gen_rtx_REG (mode, 2);
7380 /* Create and return the va_list datatype.
7382 On S/390, va_list is an array type equivalent to
7384 typedef struct __va_list_tag
7388 void *__overflow_arg_area;
7389 void *__reg_save_area;
7392 where __gpr and __fpr hold the number of general purpose
7393 or floating point arguments used up to now, respectively,
7394 __overflow_arg_area points to the stack location of the
7395 next argument passed on the stack, and __reg_save_area
7396 always points to the start of the register area in the
7397 call frame of the current function. The function prologue
7398 saves all registers used for argument passing into this
7399 area if the function uses variable arguments. */
7402 s390_build_builtin_va_list (void)
7404 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7406 record = lang_hooks.types.make_type (RECORD_TYPE);
7409 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7411 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7412 long_integer_type_node);
7413 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7414 long_integer_type_node);
7415 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7417 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7420 DECL_FIELD_CONTEXT (f_gpr) = record;
7421 DECL_FIELD_CONTEXT (f_fpr) = record;
7422 DECL_FIELD_CONTEXT (f_ovf) = record;
7423 DECL_FIELD_CONTEXT (f_sav) = record;
7425 TREE_CHAIN (record) = type_decl;
7426 TYPE_NAME (record) = type_decl;
7427 TYPE_FIELDS (record) = f_gpr;
7428 TREE_CHAIN (f_gpr) = f_fpr;
7429 TREE_CHAIN (f_fpr) = f_ovf;
7430 TREE_CHAIN (f_ovf) = f_sav;
7432 layout_type (record);
7434 /* The correct type is an array type of one element. */
7435 return build_array_type (record, build_index_type (size_zero_node));
7438 /* Implement va_start by filling the va_list structure VALIST.
7439 STDARG_P is always true, and ignored.
7440 NEXTARG points to the first anonymous stack argument.
7442 The following global variables are used to initialize
7443 the va_list structure:
7445 current_function_args_info:
7446 holds number of gprs and fprs used for named arguments.
7447 current_function_arg_offset_rtx:
7448 holds the offset of the first anonymous stack argument
7449 (relative to the virtual arg pointer). */
7452 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7454 HOST_WIDE_INT n_gpr, n_fpr;
7456 tree f_gpr, f_fpr, f_ovf, f_sav;
7457 tree gpr, fpr, ovf, sav, t;
7459 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7460 f_fpr = TREE_CHAIN (f_gpr);
7461 f_ovf = TREE_CHAIN (f_fpr);
7462 f_sav = TREE_CHAIN (f_ovf);
7464 valist = build_va_arg_indirect_ref (valist);
7465 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7466 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7467 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7468 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7470 /* Count number of gp and fp argument registers used. */
7472 n_gpr = current_function_args_info.gprs;
7473 n_fpr = current_function_args_info.fprs;
7475 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7476 build_int_cst (NULL_TREE, n_gpr));
7477 TREE_SIDE_EFFECTS (t) = 1;
7478 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7480 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7481 build_int_cst (NULL_TREE, n_fpr));
7482 TREE_SIDE_EFFECTS (t) = 1;
7483 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7485 /* Find the overflow area. */
7486 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7488 off = INTVAL (current_function_arg_offset_rtx);
7489 off = off < 0 ? 0 : off;
7490 if (TARGET_DEBUG_ARG)
7491 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7492 (int)n_gpr, (int)n_fpr, off);
7494 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7496 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7497 TREE_SIDE_EFFECTS (t) = 1;
7498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7500 /* Find the register save area. */
7501 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7502 if (TARGET_KERNEL_BACKCHAIN)
7503 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7504 build_int_cst (NULL_TREE,
7505 -(RETURN_REGNUM - 2) * UNITS_PER_WORD
7506 - (TARGET_64BIT ? 4 : 2) * 8));
7508 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7509 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7511 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7512 TREE_SIDE_EFFECTS (t) = 1;
7513 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7516 /* Implement va_arg by updating the va_list structure
7517 VALIST as required to retrieve an argument of type
7518 TYPE, and returning that argument.
7520 Generates code equivalent to:
7522 if (integral value) {
7523 if (size <= 4 && args.gpr < 5 ||
7524 size > 4 && args.gpr < 4 )
7525 ret = args.reg_save_area[args.gpr+8]
7527 ret = *args.overflow_arg_area++;
7528 } else if (float value) {
7530 ret = args.reg_save_area[args.fpr+64]
7532 ret = *args.overflow_arg_area++;
7533 } else if (aggregate value) {
7535 ret = *args.reg_save_area[args.gpr]
7537 ret = **args.overflow_arg_area++;
7541 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7542 tree *post_p ATTRIBUTE_UNUSED)
7544 tree f_gpr, f_fpr, f_ovf, f_sav;
7545 tree gpr, fpr, ovf, sav, reg, t, u;
7546 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
7547 tree lab_false, lab_over, addr;
7549 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7550 f_fpr = TREE_CHAIN (f_gpr);
7551 f_ovf = TREE_CHAIN (f_fpr);
7552 f_sav = TREE_CHAIN (f_ovf);
7554 valist = build_va_arg_indirect_ref (valist);
7555 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7556 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7557 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7558 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7560 size = int_size_in_bytes (type);
7562 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7564 if (TARGET_DEBUG_ARG)
7566 fprintf (stderr, "va_arg: aggregate type");
7570 /* Aggregates are passed by reference. */
7575 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7576 will be added by s390_frame_info because for va_args always an even
7577 number of gprs has to be saved r15-r2 = 14 regs. */
7578 sav_ofs = (TARGET_KERNEL_BACKCHAIN
7579 ? (TARGET_64BIT ? 4 : 2) * 8 : 2 * UNITS_PER_WORD);
7580 sav_scale = UNITS_PER_WORD;
7581 size = UNITS_PER_WORD;
7584 else if (s390_function_arg_float (TYPE_MODE (type), type))
7586 if (TARGET_DEBUG_ARG)
7588 fprintf (stderr, "va_arg: float type");
7592 /* FP args go in FP registers, if present. */
7596 sav_ofs = TARGET_KERNEL_BACKCHAIN ? 0 : 16 * UNITS_PER_WORD;
7598 /* TARGET_64BIT has up to 4 parameter in fprs */
7599 max_reg = TARGET_64BIT ? 3 : 1;
7603 if (TARGET_DEBUG_ARG)
7605 fprintf (stderr, "va_arg: other type");
7609 /* Otherwise into GP registers. */
7612 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7614 /* TARGET_KERNEL_BACKCHAIN on 31 bit: It is assumed here that no padding
7615 will be added by s390_frame_info because for va_args always an even
7616 number of gprs has to be saved r15-r2 = 14 regs. */
7617 sav_ofs = TARGET_KERNEL_BACKCHAIN ?
7618 (TARGET_64BIT ? 4 : 2) * 8 : 2*UNITS_PER_WORD;
7620 if (size < UNITS_PER_WORD)
7621 sav_ofs += UNITS_PER_WORD - size;
7623 sav_scale = UNITS_PER_WORD;
7630 /* Pull the value out of the saved registers ... */
7632 lab_false = create_artificial_label ();
7633 lab_over = create_artificial_label ();
7634 addr = create_tmp_var (ptr_type_node, "addr");
7636 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
7637 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7638 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7639 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7640 gimplify_and_add (t, pre_p);
7642 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7643 fold_convert (ptr_type_node, size_int (sav_ofs)));
7644 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7645 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7646 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
7648 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7649 gimplify_and_add (t, pre_p);
7651 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7652 gimplify_and_add (t, pre_p);
7654 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7655 append_to_statement_list (t, pre_p);
7658 /* ... Otherwise out of the overflow area. */
7661 if (size < UNITS_PER_WORD)
7662 t = build2 (PLUS_EXPR, ptr_type_node, t,
7663 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
7665 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7667 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7668 gimplify_and_add (u, pre_p);
7670 t = build2 (PLUS_EXPR, ptr_type_node, t,
7671 fold_convert (ptr_type_node, size_int (size)));
7672 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
7673 gimplify_and_add (t, pre_p);
7675 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7676 append_to_statement_list (t, pre_p);
7679 /* Increment register save count. */
7681 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
7682 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
7683 gimplify_and_add (u, pre_p);
7687 t = build_pointer_type (build_pointer_type (type));
7688 addr = fold_convert (t, addr);
7689 addr = build_va_arg_indirect_ref (addr);
7693 t = build_pointer_type (type);
7694 addr = fold_convert (t, addr);
7697 return build_va_arg_indirect_ref (addr);
7705 S390_BUILTIN_THREAD_POINTER,
7706 S390_BUILTIN_SET_THREAD_POINTER,
7711 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7716 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7722 s390_init_builtins (void)
7726 ftype = build_function_type (ptr_type_node, void_list_node);
7727 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7728 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7731 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
7732 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7733 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7737 /* Expand an expression EXP that calls a built-in function,
7738 with result going to TARGET if that's convenient
7739 (and in mode MODE if that's convenient).
7740 SUBTARGET may be used as the target for computing one of EXP's operands.
7741 IGNORE is nonzero if the value is to be ignored. */
7744 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7745 enum machine_mode mode ATTRIBUTE_UNUSED,
7746 int ignore ATTRIBUTE_UNUSED)
7750 unsigned int const *code_for_builtin =
7751 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7753 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7754 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7755 tree arglist = TREE_OPERAND (exp, 1);
7756 enum insn_code icode;
7757 rtx op[MAX_ARGS], pat;
7761 if (fcode >= S390_BUILTIN_max)
7762 internal_error ("bad builtin fcode");
7763 icode = code_for_builtin[fcode];
7765 internal_error ("bad builtin fcode");
7767 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7769 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7771 arglist = TREE_CHAIN (arglist), arity++)
7773 const struct insn_operand_data *insn_op;
7775 tree arg = TREE_VALUE (arglist);
7776 if (arg == error_mark_node)
7778 if (arity > MAX_ARGS)
7781 insn_op = &insn_data[icode].operand[arity + nonvoid];
7783 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7785 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7786 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7791 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7793 || GET_MODE (target) != tmode
7794 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7795 target = gen_reg_rtx (tmode);
7801 pat = GEN_FCN (icode) (target);
7805 pat = GEN_FCN (icode) (target, op[0]);
7807 pat = GEN_FCN (icode) (op[0]);
7810 pat = GEN_FCN (icode) (target, op[0], op[1]);
7826 /* Output assembly code for the trampoline template to
7829 On S/390, we use gpr 1 internally in the trampoline code;
7830 gpr 0 is used to hold the static chain. */
7833 s390_trampoline_template (FILE *file)
7836 op[0] = gen_rtx_REG (Pmode, 0);
7837 op[1] = gen_rtx_REG (Pmode, 1);
7841 output_asm_insn ("basr\t%1,0", op);
7842 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7843 output_asm_insn ("br\t%1", op);
7844 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
7848 output_asm_insn ("basr\t%1,0", op);
7849 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7850 output_asm_insn ("br\t%1", op);
7851 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
7855 /* Emit RTL insns to initialize the variable parts of a trampoline.
7856 FNADDR is an RTX for the address of the function's pure code.
7857 CXT is an RTX for the static chain value for the function. */
7860 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
7862 emit_move_insn (gen_rtx_MEM (Pmode,
7863 memory_address (Pmode,
7864 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
7865 emit_move_insn (gen_rtx_MEM (Pmode,
7866 memory_address (Pmode,
7867 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
7870 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7871 LOW and HIGH, independent of the host word size. */
7874 s390_gen_rtx_const_DI (int high, int low)
7876 #if HOST_BITS_PER_WIDE_INT >= 64
7878 val = (HOST_WIDE_INT)high;
7880 val |= (HOST_WIDE_INT)low;
7882 return GEN_INT (val);
7884 #if HOST_BITS_PER_WIDE_INT >= 32
7885 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7892 /* Output assembler code to FILE to increment profiler label # LABELNO
7893 for profiling a function entry. */
7896 s390_function_profiler (FILE *file, int labelno)
7901 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
7903 fprintf (file, "# function profiler \n");
7905 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7906 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7907 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7909 op[2] = gen_rtx_REG (Pmode, 1);
7910 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
7911 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7913 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7916 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7917 op[4] = gen_rtx_CONST (Pmode, op[4]);
7922 output_asm_insn ("stg\t%0,%1", op);
7923 output_asm_insn ("larl\t%2,%3", op);
7924 output_asm_insn ("brasl\t%0,%4", op);
7925 output_asm_insn ("lg\t%0,%1", op);
7929 op[6] = gen_label_rtx ();
7931 output_asm_insn ("st\t%0,%1", op);
7932 output_asm_insn ("bras\t%2,%l6", op);
7933 output_asm_insn (".long\t%4", op);
7934 output_asm_insn (".long\t%3", op);
7935 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7936 output_asm_insn ("l\t%0,0(%2)", op);
7937 output_asm_insn ("l\t%2,4(%2)", op);
7938 output_asm_insn ("basr\t%0,%0", op);
7939 output_asm_insn ("l\t%0,%1", op);
7943 op[5] = gen_label_rtx ();
7944 op[6] = gen_label_rtx ();
7946 output_asm_insn ("st\t%0,%1", op);
7947 output_asm_insn ("bras\t%2,%l6", op);
7948 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7949 output_asm_insn (".long\t%4-%l5", op);
7950 output_asm_insn (".long\t%3-%l5", op);
7951 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7952 output_asm_insn ("lr\t%0,%2", op);
7953 output_asm_insn ("a\t%0,0(%2)", op);
7954 output_asm_insn ("a\t%2,4(%2)", op);
7955 output_asm_insn ("basr\t%0,%0", op);
7956 output_asm_insn ("l\t%0,%1", op);
7960 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7961 into its SYMBOL_REF_FLAGS. */
7964 s390_encode_section_info (tree decl, rtx rtl, int first)
7966 default_encode_section_info (decl, rtl, first);
7968 /* If a variable has a forced alignment to < 2 bytes, mark it with
7969 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7970 if (TREE_CODE (decl) == VAR_DECL
7971 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
7972 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
7975 /* Output thunk to FILE that implements a C++ virtual function call (with
7976 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7977 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7978 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7979 relative to the resulting this pointer. */
7982 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
7983 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
7989 /* Operand 0 is the target function. */
7990 op[0] = XEXP (DECL_RTL (function), 0);
7991 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
7994 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
7995 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
7996 op[0] = gen_rtx_CONST (Pmode, op[0]);
7999 /* Operand 1 is the 'this' pointer. */
8000 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
8001 op[1] = gen_rtx_REG (Pmode, 3);
8003 op[1] = gen_rtx_REG (Pmode, 2);
8005 /* Operand 2 is the delta. */
8006 op[2] = GEN_INT (delta);
8008 /* Operand 3 is the vcall_offset. */
8009 op[3] = GEN_INT (vcall_offset);
8011 /* Operand 4 is the temporary register. */
8012 op[4] = gen_rtx_REG (Pmode, 1);
8014 /* Operands 5 to 8 can be used as labels. */
8020 /* Operand 9 can be used for temporary register. */
8023 /* Generate code. */
8026 /* Setup literal pool pointer if required. */
8027 if ((!DISP_IN_RANGE (delta)
8028 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8029 || (!DISP_IN_RANGE (vcall_offset)
8030 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8032 op[5] = gen_label_rtx ();
8033 output_asm_insn ("larl\t%4,%5", op);
8036 /* Add DELTA to this pointer. */
8039 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8040 output_asm_insn ("la\t%1,%2(%1)", op);
8041 else if (DISP_IN_RANGE (delta))
8042 output_asm_insn ("lay\t%1,%2(%1)", op);
8043 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8044 output_asm_insn ("aghi\t%1,%2", op);
8047 op[6] = gen_label_rtx ();
8048 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8052 /* Perform vcall adjustment. */
8055 if (DISP_IN_RANGE (vcall_offset))
8057 output_asm_insn ("lg\t%4,0(%1)", op);
8058 output_asm_insn ("ag\t%1,%3(%4)", op);
8060 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8062 output_asm_insn ("lghi\t%4,%3", op);
8063 output_asm_insn ("ag\t%4,0(%1)", op);
8064 output_asm_insn ("ag\t%1,0(%4)", op);
8068 op[7] = gen_label_rtx ();
8069 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8070 output_asm_insn ("ag\t%4,0(%1)", op);
8071 output_asm_insn ("ag\t%1,0(%4)", op);
8075 /* Jump to target. */
8076 output_asm_insn ("jg\t%0", op);
8078 /* Output literal pool if required. */
8081 output_asm_insn (".align\t4", op);
8082 targetm.asm_out.internal_label (file, "L",
8083 CODE_LABEL_NUMBER (op[5]));
8087 targetm.asm_out.internal_label (file, "L",
8088 CODE_LABEL_NUMBER (op[6]));
8089 output_asm_insn (".long\t%2", op);
8093 targetm.asm_out.internal_label (file, "L",
8094 CODE_LABEL_NUMBER (op[7]));
8095 output_asm_insn (".long\t%3", op);
8100 /* Setup base pointer if required. */
8102 || (!DISP_IN_RANGE (delta)
8103 && !CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8104 || (!DISP_IN_RANGE (delta)
8105 && !CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K")))
8107 op[5] = gen_label_rtx ();
8108 output_asm_insn ("basr\t%4,0", op);
8109 targetm.asm_out.internal_label (file, "L",
8110 CODE_LABEL_NUMBER (op[5]));
8113 /* Add DELTA to this pointer. */
8116 if (CONST_OK_FOR_CONSTRAINT_P (delta, 'J', "J"))
8117 output_asm_insn ("la\t%1,%2(%1)", op);
8118 else if (DISP_IN_RANGE (delta))
8119 output_asm_insn ("lay\t%1,%2(%1)", op);
8120 else if (CONST_OK_FOR_CONSTRAINT_P (delta, 'K', "K"))
8121 output_asm_insn ("ahi\t%1,%2", op);
8124 op[6] = gen_label_rtx ();
8125 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8129 /* Perform vcall adjustment. */
8132 if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'J', "J"))
8134 output_asm_insn ("lg\t%4,0(%1)", op);
8135 output_asm_insn ("a\t%1,%3(%4)", op);
8137 else if (DISP_IN_RANGE (vcall_offset))
8139 output_asm_insn ("lg\t%4,0(%1)", op);
8140 output_asm_insn ("ay\t%1,%3(%4)", op);
8142 else if (CONST_OK_FOR_CONSTRAINT_P (vcall_offset, 'K', "K"))
8144 output_asm_insn ("lhi\t%4,%3", op);
8145 output_asm_insn ("a\t%4,0(%1)", op);
8146 output_asm_insn ("a\t%1,0(%4)", op);
8150 op[7] = gen_label_rtx ();
8151 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8152 output_asm_insn ("a\t%4,0(%1)", op);
8153 output_asm_insn ("a\t%1,0(%4)", op);
8156 /* We had to clobber the base pointer register.
8157 Re-setup the base pointer (with a different base). */
8158 op[5] = gen_label_rtx ();
8159 output_asm_insn ("basr\t%4,0", op);
8160 targetm.asm_out.internal_label (file, "L",
8161 CODE_LABEL_NUMBER (op[5]));
8164 /* Jump to target. */
8165 op[8] = gen_label_rtx ();
8168 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8170 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8171 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8172 else if (flag_pic == 1)
8174 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8175 output_asm_insn ("l\t%4,%0(%4)", op);
8177 else if (flag_pic == 2)
8179 op[9] = gen_rtx_REG (Pmode, 0);
8180 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8181 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8182 output_asm_insn ("ar\t%4,%9", op);
8183 output_asm_insn ("l\t%4,0(%4)", op);
8186 output_asm_insn ("br\t%4", op);
8188 /* Output literal pool. */
8189 output_asm_insn (".align\t4", op);
8191 if (nonlocal && flag_pic == 2)
8192 output_asm_insn (".long\t%0", op);
8195 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8196 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8199 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8201 output_asm_insn (".long\t%0", op);
8203 output_asm_insn (".long\t%0-%5", op);
8207 targetm.asm_out.internal_label (file, "L",
8208 CODE_LABEL_NUMBER (op[6]));
8209 output_asm_insn (".long\t%2", op);
8213 targetm.asm_out.internal_label (file, "L",
8214 CODE_LABEL_NUMBER (op[7]));
8215 output_asm_insn (".long\t%3", op);
8221 s390_valid_pointer_mode (enum machine_mode mode)
8223 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8226 /* How to allocate a 'struct machine_function'. */
8228 static struct machine_function *
8229 s390_init_machine_status (void)
8231 return ggc_alloc_cleared (sizeof (struct machine_function));
8234 /* Checks whether the given ARGUMENT_LIST would use a caller
8235 saved register. This is used to decide whether sibling call
8236 optimization could be performed on the respective function
8240 s390_call_saved_register_used (tree argument_list)
8242 CUMULATIVE_ARGS cum;
8244 enum machine_mode mode;
8249 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8251 while (argument_list)
8253 parameter = TREE_VALUE (argument_list);
8254 argument_list = TREE_CHAIN (argument_list);
8259 /* For an undeclared variable passed as parameter we will get
8260 an ERROR_MARK node here. */
8261 if (TREE_CODE (parameter) == ERROR_MARK)
8264 if (! (type = TREE_TYPE (parameter)))
8267 if (! (mode = TYPE_MODE (TREE_TYPE (parameter))))
8270 if (pass_by_reference (&cum, mode, type, true))
8273 type = build_pointer_type (type);
8276 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8278 s390_function_arg_advance (&cum, mode, type, 0);
8280 if (parm_rtx && REG_P (parm_rtx))
8283 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8285 if (! call_used_regs[reg + REGNO (parm_rtx)])
8292 /* Return true if the given call expression can be
8293 turned into a sibling call.
8294 DECL holds the declaration of the function to be called whereas
8295 EXP is the call expression itself. */
8298 s390_function_ok_for_sibcall (tree decl, tree exp)
8300 /* The TPF epilogue uses register 1. */
8301 if (TARGET_TPF_PROFILING)
8304 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8305 which would have to be restored before the sibcall. */
8306 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8309 /* Register 6 on s390 is available as an argument register but unfortunately
8310 "caller saved". This makes functions needing this register for arguments
8311 not suitable for sibcalls. */
8312 if (TREE_OPERAND (exp, 1)
8313 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8319 /* This function is used by the call expanders of the machine description.
8320 It emits the call insn itself together with the necessary operations
8321 to adjust the target address and returns the emitted insn.
8322 ADDR_LOCATION is the target address rtx
8323 TLS_CALL the location of the thread-local symbol
8324 RESULT_REG the register where the result of the call should be stored
8325 RETADDR_REG the register where the return address should be stored
8326 If this parameter is NULL_RTX the call is considered
8327 to be a sibling call. */
8330 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8333 bool plt_call = false;
8339 /* Direct function calls need special treatment. */
8340 if (GET_CODE (addr_location) == SYMBOL_REF)
8342 /* When calling a global routine in PIC mode, we must
8343 replace the symbol itself with the PLT stub. */
8344 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8346 addr_location = gen_rtx_UNSPEC (Pmode,
8347 gen_rtvec (1, addr_location),
8349 addr_location = gen_rtx_CONST (Pmode, addr_location);
8353 /* Unless we can use the bras(l) insn, force the
8354 routine address into a register. */
8355 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8358 addr_location = legitimize_pic_address (addr_location, 0);
8360 addr_location = force_reg (Pmode, addr_location);
8364 /* If it is already an indirect call or the code above moved the
8365 SYMBOL_REF to somewhere else make sure the address can be found in
8367 if (retaddr_reg == NULL_RTX
8368 && GET_CODE (addr_location) != SYMBOL_REF
8371 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8372 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8375 addr_location = gen_rtx_MEM (QImode, addr_location);
8376 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8378 if (result_reg != NULL_RTX)
8379 call = gen_rtx_SET (VOIDmode, result_reg, call);
8381 if (retaddr_reg != NULL_RTX)
8383 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8385 if (tls_call != NULL_RTX)
8386 vec = gen_rtvec (3, call, clobber,
8387 gen_rtx_USE (VOIDmode, tls_call));
8389 vec = gen_rtvec (2, call, clobber);
8391 call = gen_rtx_PARALLEL (VOIDmode, vec);
8394 insn = emit_call_insn (call);
8396 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8397 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8399 /* s390_function_ok_for_sibcall should
8400 have denied sibcalls in this case. */
8401 if (retaddr_reg == NULL_RTX)
8404 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8409 /* Implement CONDITIONAL_REGISTER_USAGE. */
8412 s390_conditional_register_usage (void)
8418 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8419 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8421 if (TARGET_CPU_ZARCH)
8423 fixed_regs[RETURN_REGNUM] = 0;
8424 call_used_regs[RETURN_REGNUM] = 0;
8428 for (i = 24; i < 32; i++)
8429 call_used_regs[i] = call_really_used_regs[i] = 0;
8433 for (i = 18; i < 20; i++)
8434 call_used_regs[i] = call_really_used_regs[i] = 0;
8437 if (TARGET_SOFT_FLOAT)
8439 for (i = 16; i < 32; i++)
8440 call_used_regs[i] = fixed_regs[i] = 1;
8444 /* Corresponding function to eh_return expander. */
8446 static GTY(()) rtx s390_tpf_eh_return_symbol;
8448 s390_emit_tpf_eh_return (rtx target)
8452 if (!s390_tpf_eh_return_symbol)
8453 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8455 reg = gen_rtx_REG (Pmode, 2);
8457 emit_move_insn (reg, target);
8458 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8459 gen_rtx_REG (Pmode, RETURN_REGNUM));
8460 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8462 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8465 #include "gt-s390.h"