1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005
3 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com).
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
53 #include "tree-gimple.h"
56 /* Define the specific costs for a given cpu. */
58 struct processor_costs
61 const int m; /* cost of an M instruction. */
62 const int mghi; /* cost of an MGHI instruction. */
63 const int mh; /* cost of an MH instruction. */
64 const int mhi; /* cost of an MHI instruction. */
65 const int ml; /* cost of an ML instruction. */
66 const int mr; /* cost of an MR instruction. */
67 const int ms; /* cost of an MS instruction. */
68 const int msg; /* cost of an MSG instruction. */
69 const int msgf; /* cost of an MSGF instruction. */
70 const int msgfr; /* cost of an MSGFR instruction. */
71 const int msgr; /* cost of an MSGR instruction. */
72 const int msr; /* cost of an MSR instruction. */
73 const int mult_df; /* cost of multiplication in DFmode. */
75 const int sqdbr; /* cost of square root in DFmode. */
76 const int sqebr; /* cost of square root in SFmode. */
77 /* multiply and add */
78 const int madbr; /* cost of multiply and add in DFmode. */
79 const int maebr; /* cost of multiply and add in SFmode. */
92 const struct processor_costs *s390_cost;
95 struct processor_costs z900_cost =
97 COSTS_N_INSNS (5), /* M */
98 COSTS_N_INSNS (10), /* MGHI */
99 COSTS_N_INSNS (5), /* MH */
100 COSTS_N_INSNS (4), /* MHI */
101 COSTS_N_INSNS (5), /* ML */
102 COSTS_N_INSNS (5), /* MR */
103 COSTS_N_INSNS (4), /* MS */
104 COSTS_N_INSNS (15), /* MSG */
105 COSTS_N_INSNS (7), /* MSGF */
106 COSTS_N_INSNS (7), /* MSGFR */
107 COSTS_N_INSNS (10), /* MSGR */
108 COSTS_N_INSNS (4), /* MSR */
109 COSTS_N_INSNS (7), /* multiplication in DFmode */
110 COSTS_N_INSNS (44), /* SQDBR */
111 COSTS_N_INSNS (35), /* SQEBR */
112 COSTS_N_INSNS (18), /* MADBR */
113 COSTS_N_INSNS (13), /* MAEBR */
114 COSTS_N_INSNS (30), /* DDBR */
115 COSTS_N_INSNS (30), /* DDR */
116 COSTS_N_INSNS (27), /* DEBR */
117 COSTS_N_INSNS (26), /* DER */
118 COSTS_N_INSNS (220), /* DLGR */
119 COSTS_N_INSNS (34), /* DLR */
120 COSTS_N_INSNS (34), /* DR */
121 COSTS_N_INSNS (32), /* DSGFR */
122 COSTS_N_INSNS (32), /* DSGR */
126 struct processor_costs z990_cost =
128 COSTS_N_INSNS (4), /* M */
129 COSTS_N_INSNS (2), /* MGHI */
130 COSTS_N_INSNS (2), /* MH */
131 COSTS_N_INSNS (2), /* MHI */
132 COSTS_N_INSNS (4), /* ML */
133 COSTS_N_INSNS (4), /* MR */
134 COSTS_N_INSNS (5), /* MS */
135 COSTS_N_INSNS (6), /* MSG */
136 COSTS_N_INSNS (4), /* MSGF */
137 COSTS_N_INSNS (4), /* MSGFR */
138 COSTS_N_INSNS (4), /* MSGR */
139 COSTS_N_INSNS (4), /* MSR */
140 COSTS_N_INSNS (1), /* multiplication in DFmode */
141 COSTS_N_INSNS (66), /* SQDBR */
142 COSTS_N_INSNS (38), /* SQEBR */
143 COSTS_N_INSNS (1), /* MADBR */
144 COSTS_N_INSNS (1), /* MAEBR */
145 COSTS_N_INSNS (40), /* DDBR */
146 COSTS_N_INSNS (44), /* DDR */
147 COSTS_N_INSNS (26), /* DDBR */
148 COSTS_N_INSNS (28), /* DER */
149 COSTS_N_INSNS (176), /* DLGR */
150 COSTS_N_INSNS (31), /* DLR */
151 COSTS_N_INSNS (31), /* DR */
152 COSTS_N_INSNS (31), /* DSGFR */
153 COSTS_N_INSNS (31), /* DSGR */
157 struct processor_costs z9_109_cost =
159 COSTS_N_INSNS (4), /* M */
160 COSTS_N_INSNS (2), /* MGHI */
161 COSTS_N_INSNS (2), /* MH */
162 COSTS_N_INSNS (2), /* MHI */
163 COSTS_N_INSNS (4), /* ML */
164 COSTS_N_INSNS (4), /* MR */
165 COSTS_N_INSNS (5), /* MS */
166 COSTS_N_INSNS (6), /* MSG */
167 COSTS_N_INSNS (4), /* MSGF */
168 COSTS_N_INSNS (4), /* MSGFR */
169 COSTS_N_INSNS (4), /* MSGR */
170 COSTS_N_INSNS (4), /* MSR */
171 COSTS_N_INSNS (1), /* multiplication in DFmode */
172 COSTS_N_INSNS (66), /* SQDBR */
173 COSTS_N_INSNS (38), /* SQEBR */
174 COSTS_N_INSNS (1), /* MADBR */
175 COSTS_N_INSNS (1), /* MAEBR */
176 COSTS_N_INSNS (40), /* DDBR */
177 COSTS_N_INSNS (37), /* DDR */
178 COSTS_N_INSNS (26), /* DDBR */
179 COSTS_N_INSNS (28), /* DER */
180 COSTS_N_INSNS (30), /* DLGR */
181 COSTS_N_INSNS (23), /* DLR */
182 COSTS_N_INSNS (23), /* DR */
183 COSTS_N_INSNS (24), /* DSGFR */
184 COSTS_N_INSNS (24), /* DSGR */
187 extern int reload_completed;
189 /* Save information from a "cmpxx" operation until the branch or scc is
191 rtx s390_compare_op0, s390_compare_op1;
193 /* Save the result of a compare_and_swap until the branch or scc is
195 rtx s390_compare_emitted = NULL_RTX;
197 /* Structure used to hold the components of a S/390 memory
198 address. A legitimate address on S/390 is of the general
200 base + index + displacement
201 where any of the components is optional.
203 base and index are registers of the class ADDR_REGS,
204 displacement is an unsigned 12-bit immediate constant. */
214 /* Which cpu are we tuning for. */
215 enum processor_type s390_tune = PROCESSOR_max;
216 enum processor_flags s390_tune_flags;
217 /* Which instruction set architecture to use. */
218 enum processor_type s390_arch;
219 enum processor_flags s390_arch_flags;
221 HOST_WIDE_INT s390_warn_framesize = 0;
222 HOST_WIDE_INT s390_stack_size = 0;
223 HOST_WIDE_INT s390_stack_guard = 0;
225 /* The following structure is embedded in the machine
226 specific part of struct function. */
228 struct s390_frame_layout GTY (())
230 /* Offset within stack frame. */
231 HOST_WIDE_INT gprs_offset;
232 HOST_WIDE_INT f0_offset;
233 HOST_WIDE_INT f4_offset;
234 HOST_WIDE_INT f8_offset;
235 HOST_WIDE_INT backchain_offset;
237 /* Number of first and last gpr to be saved, restored. */
239 int first_restore_gpr;
241 int last_restore_gpr;
243 /* Bits standing for floating point registers. Set, if the
244 respective register has to be saved. Starting with reg 16 (f0)
245 at the rightmost bit.
246 Bit 15 - 8 7 6 5 4 3 2 1 0
247 fpr 15 - 8 7 5 3 1 6 4 2 0
248 reg 31 - 24 23 22 21 20 19 18 17 16 */
249 unsigned int fpr_bitmap;
251 /* Number of floating point registers f8-f15 which must be saved. */
254 /* Set if return address needs to be saved.
255 This flag is set by s390_return_addr_rtx if it could not use
256 the initial value of r14 and therefore depends on r14 saved
258 bool save_return_addr_p;
260 /* Size of stack frame. */
261 HOST_WIDE_INT frame_size;
264 /* Define the structure for the machine field in struct function. */
266 struct machine_function GTY(())
268 struct s390_frame_layout frame_layout;
270 /* Literal pool base register. */
273 /* True if we may need to perform branch splitting. */
274 bool split_branches_pending_p;
276 /* Some local-dynamic TLS symbol name. */
277 const char *some_ld_name;
279 bool has_landing_pad_p;
282 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
284 #define cfun_frame_layout (cfun->machine->frame_layout)
285 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
286 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr - \
287 cfun_frame_layout.first_save_gpr + 1) * UNITS_PER_WORD)
288 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
290 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
293 /* Number of GPRs and FPRs used for argument passing. */
294 #define GP_ARG_NUM_REG 5
295 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
297 /* A couple of shortcuts. */
298 #define CONST_OK_FOR_J(x) \
299 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
300 #define CONST_OK_FOR_K(x) \
301 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
302 #define CONST_OK_FOR_Os(x) \
303 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
304 #define CONST_OK_FOR_Op(x) \
305 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
306 #define CONST_OK_FOR_On(x) \
307 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
309 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
312 s390_set_has_landing_pad_p (bool value)
314 cfun->machine->has_landing_pad_p = value;
317 /* If two condition code modes are compatible, return a condition code
318 mode which is compatible with both. Otherwise, return
321 static enum machine_mode
322 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
330 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
331 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
352 /* Return true if SET either doesn't set the CC register, or else
353 the source and destination have matching CC modes and that
354 CC mode is at least as constrained as REQ_MODE. */
357 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
359 enum machine_mode set_mode;
361 gcc_assert (GET_CODE (set) == SET);
363 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
366 set_mode = GET_MODE (SET_DEST (set));
380 if (req_mode != set_mode)
385 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
386 && req_mode != CCSRmode && req_mode != CCURmode)
392 if (req_mode != CCAmode)
400 return (GET_MODE (SET_SRC (set)) == set_mode);
403 /* Return true if every SET in INSN that sets the CC register
404 has source and destination with matching CC modes and that
405 CC mode is at least as constrained as REQ_MODE.
406 If REQ_MODE is VOIDmode, always return false. */
409 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
413 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
414 if (req_mode == VOIDmode)
417 if (GET_CODE (PATTERN (insn)) == SET)
418 return s390_match_ccmode_set (PATTERN (insn), req_mode);
420 if (GET_CODE (PATTERN (insn)) == PARALLEL)
421 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
423 rtx set = XVECEXP (PATTERN (insn), 0, i);
424 if (GET_CODE (set) == SET)
425 if (!s390_match_ccmode_set (set, req_mode))
432 /* If a test-under-mask instruction can be used to implement
433 (compare (and ... OP1) OP2), return the CC mode required
434 to do that. Otherwise, return VOIDmode.
435 MIXED is true if the instruction can distinguish between
436 CC1 and CC2 for mixed selected bits (TMxx), it is false
437 if the instruction cannot (TM). */
440 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
444 /* ??? Fixme: should work on CONST_DOUBLE as well. */
445 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
448 /* Selected bits all zero: CC0.
449 e.g.: int a; if ((a & (16 + 128)) == 0) */
450 if (INTVAL (op2) == 0)
453 /* Selected bits all one: CC3.
454 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
455 if (INTVAL (op2) == INTVAL (op1))
458 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
460 if ((a & (16 + 128)) == 16) -> CCT1
461 if ((a & (16 + 128)) == 128) -> CCT2 */
464 bit1 = exact_log2 (INTVAL (op2));
465 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
466 if (bit0 != -1 && bit1 != -1)
467 return bit0 > bit1 ? CCT1mode : CCT2mode;
473 /* Given a comparison code OP (EQ, NE, etc.) and the operands
474 OP0 and OP1 of a COMPARE, return the mode to be used for the
478 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
484 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
485 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
487 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
488 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
490 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
491 || GET_CODE (op1) == NEG)
492 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
495 if (GET_CODE (op0) == AND)
497 /* Check whether we can potentially do it via TM. */
498 enum machine_mode ccmode;
499 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
500 if (ccmode != VOIDmode)
502 /* Relax CCTmode to CCZmode to allow fall-back to AND
503 if that turns out to be beneficial. */
504 return ccmode == CCTmode ? CCZmode : ccmode;
508 if (register_operand (op0, HImode)
509 && GET_CODE (op1) == CONST_INT
510 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
512 if (register_operand (op0, QImode)
513 && GET_CODE (op1) == CONST_INT
514 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
523 /* The only overflow condition of NEG and ABS happens when
524 -INT_MAX is used as parameter, which stays negative. So
525 we have an overflow from a positive value to a negative.
526 Using CCAP mode the resulting cc can be used for comparisons. */
527 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
528 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
531 /* If constants are involved in an add instruction it is possible to use
532 the resulting cc for comparisons with zero. Knowing the sign of the
533 constant the overflow behavior gets predictable. e.g.:
534 int a, b; if ((b = a + c) > 0)
535 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
536 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
537 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
539 if (INTVAL (XEXP((op0), 1)) < 0)
553 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
554 && GET_CODE (op1) != CONST_INT)
560 if (GET_CODE (op0) == PLUS
561 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
564 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
565 && GET_CODE (op1) != CONST_INT)
571 if (GET_CODE (op0) == MINUS
572 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
575 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
576 && GET_CODE (op1) != CONST_INT)
585 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
586 that we can implement more efficiently. */
589 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
591 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
592 if ((*code == EQ || *code == NE)
593 && *op1 == const0_rtx
594 && GET_CODE (*op0) == ZERO_EXTRACT
595 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
596 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
597 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
599 rtx inner = XEXP (*op0, 0);
600 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
601 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
602 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
604 if (len > 0 && len < modesize
605 && pos >= 0 && pos + len <= modesize
606 && modesize <= HOST_BITS_PER_WIDE_INT)
608 unsigned HOST_WIDE_INT block;
609 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
610 block <<= modesize - pos - len;
612 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
613 gen_int_mode (block, GET_MODE (inner)));
617 /* Narrow AND of memory against immediate to enable TM. */
618 if ((*code == EQ || *code == NE)
619 && *op1 == const0_rtx
620 && GET_CODE (*op0) == AND
621 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
622 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
624 rtx inner = XEXP (*op0, 0);
625 rtx mask = XEXP (*op0, 1);
627 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
628 if (GET_CODE (inner) == SUBREG
629 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
630 && (GET_MODE_SIZE (GET_MODE (inner))
631 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
633 & GET_MODE_MASK (GET_MODE (inner))
634 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
636 inner = SUBREG_REG (inner);
638 /* Do not change volatile MEMs. */
639 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
641 int part = s390_single_part (XEXP (*op0, 1),
642 GET_MODE (inner), QImode, 0);
645 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
646 inner = adjust_address_nv (inner, QImode, part);
647 *op0 = gen_rtx_AND (QImode, inner, mask);
652 /* Narrow comparisons against 0xffff to HImode if possible. */
653 if ((*code == EQ || *code == NE)
654 && GET_CODE (*op1) == CONST_INT
655 && INTVAL (*op1) == 0xffff
656 && SCALAR_INT_MODE_P (GET_MODE (*op0))
657 && (nonzero_bits (*op0, GET_MODE (*op0))
658 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
660 *op0 = gen_lowpart (HImode, *op0);
665 /* Remove redundant UNSPEC_CMPINT conversions if possible. */
666 if (GET_CODE (*op0) == UNSPEC
667 && XINT (*op0, 1) == UNSPEC_CMPINT
668 && XVECLEN (*op0, 0) == 1
669 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
670 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
671 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
672 && *op1 == const0_rtx)
674 enum rtx_code new_code = UNKNOWN;
677 case EQ: new_code = EQ; break;
678 case NE: new_code = NE; break;
679 case LT: new_code = GTU; break;
680 case GT: new_code = LTU; break;
681 case LE: new_code = GEU; break;
682 case GE: new_code = LEU; break;
686 if (new_code != UNKNOWN)
688 *op0 = XVECEXP (*op0, 0, 0);
693 /* Simplify cascaded EQ, NE with const0_rtx. */
694 if ((*code == NE || *code == EQ)
695 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
696 && GET_MODE (*op0) == SImode
697 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
698 && REG_P (XEXP (*op0, 0))
699 && XEXP (*op0, 1) == const0_rtx
700 && *op1 == const0_rtx)
702 if ((*code == EQ && GET_CODE (*op0) == NE)
703 || (*code == NE && GET_CODE (*op0) == EQ))
707 *op0 = XEXP (*op0, 0);
710 /* Prefer register over memory as first operand. */
711 if (MEM_P (*op0) && REG_P (*op1))
713 rtx tem = *op0; *op0 = *op1; *op1 = tem;
714 *code = swap_condition (*code);
718 /* Emit a compare instruction suitable to implement the comparison
719 OP0 CODE OP1. Return the correct condition RTL to be placed in
720 the IF_THEN_ELSE of the conditional branch testing the result. */
723 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
725 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
728 /* Do not output a redundant compare instruction if a compare_and_swap
729 pattern already computed the result and the machine modes are compatible. */
730 if (s390_compare_emitted
731 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode)
732 == GET_MODE (s390_compare_emitted)))
733 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx);
736 rtx cc = gen_rtx_REG (mode, CC_REGNUM);
738 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
739 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
741 s390_compare_emitted = NULL_RTX;
745 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
746 unconditional jump, else a conditional jump under condition COND. */
749 s390_emit_jump (rtx target, rtx cond)
753 target = gen_rtx_LABEL_REF (VOIDmode, target);
755 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
757 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
758 emit_jump_insn (insn);
761 /* Return branch condition mask to implement a branch
762 specified by CODE. Return -1 for invalid comparisons. */
765 s390_branch_condition_mask (rtx code)
767 const int CC0 = 1 << 3;
768 const int CC1 = 1 << 2;
769 const int CC2 = 1 << 1;
770 const int CC3 = 1 << 0;
772 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
773 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
774 gcc_assert (XEXP (code, 1) == const0_rtx);
776 switch (GET_MODE (XEXP (code, 0)))
780 switch (GET_CODE (code))
783 case NE: return CC1 | CC2 | CC3;
789 switch (GET_CODE (code))
792 case NE: return CC0 | CC2 | CC3;
798 switch (GET_CODE (code))
801 case NE: return CC0 | CC1 | CC3;
807 switch (GET_CODE (code))
810 case NE: return CC0 | CC1 | CC2;
816 switch (GET_CODE (code))
818 case EQ: return CC0 | CC2;
819 case NE: return CC1 | CC3;
825 switch (GET_CODE (code))
827 case LTU: return CC2 | CC3; /* carry */
828 case GEU: return CC0 | CC1; /* no carry */
834 switch (GET_CODE (code))
836 case GTU: return CC0 | CC1; /* borrow */
837 case LEU: return CC2 | CC3; /* no borrow */
843 switch (GET_CODE (code))
845 case EQ: return CC0 | CC2;
846 case NE: return CC1 | CC3;
847 case LTU: return CC1;
848 case GTU: return CC3;
849 case LEU: return CC1 | CC2;
850 case GEU: return CC2 | CC3;
855 switch (GET_CODE (code))
858 case NE: return CC1 | CC2 | CC3;
859 case LTU: return CC1;
860 case GTU: return CC2;
861 case LEU: return CC0 | CC1;
862 case GEU: return CC0 | CC2;
868 switch (GET_CODE (code))
871 case NE: return CC2 | CC1 | CC3;
872 case LTU: return CC2;
873 case GTU: return CC1;
874 case LEU: return CC0 | CC2;
875 case GEU: return CC0 | CC1;
881 switch (GET_CODE (code))
884 case NE: return CC1 | CC2 | CC3;
885 case LT: return CC1 | CC3;
887 case LE: return CC0 | CC1 | CC3;
888 case GE: return CC0 | CC2;
894 switch (GET_CODE (code))
897 case NE: return CC1 | CC2 | CC3;
899 case GT: return CC2 | CC3;
900 case LE: return CC0 | CC1;
901 case GE: return CC0 | CC2 | CC3;
907 switch (GET_CODE (code))
910 case NE: return CC1 | CC2 | CC3;
913 case LE: return CC0 | CC1;
914 case GE: return CC0 | CC2;
915 case UNORDERED: return CC3;
916 case ORDERED: return CC0 | CC1 | CC2;
917 case UNEQ: return CC0 | CC3;
918 case UNLT: return CC1 | CC3;
919 case UNGT: return CC2 | CC3;
920 case UNLE: return CC0 | CC1 | CC3;
921 case UNGE: return CC0 | CC2 | CC3;
922 case LTGT: return CC1 | CC2;
928 switch (GET_CODE (code))
931 case NE: return CC2 | CC1 | CC3;
934 case LE: return CC0 | CC2;
935 case GE: return CC0 | CC1;
936 case UNORDERED: return CC3;
937 case ORDERED: return CC0 | CC2 | CC1;
938 case UNEQ: return CC0 | CC3;
939 case UNLT: return CC2 | CC3;
940 case UNGT: return CC1 | CC3;
941 case UNLE: return CC0 | CC2 | CC3;
942 case UNGE: return CC0 | CC1 | CC3;
943 case LTGT: return CC2 | CC1;
953 /* If INV is false, return assembler mnemonic string to implement
954 a branch specified by CODE. If INV is true, return mnemonic
955 for the corresponding inverted branch. */
958 s390_branch_condition_mnemonic (rtx code, int inv)
960 static const char *const mnemonic[16] =
962 NULL, "o", "h", "nle",
963 "l", "nhe", "lh", "ne",
964 "e", "nlh", "he", "nl",
965 "le", "nh", "no", NULL
968 int mask = s390_branch_condition_mask (code);
969 gcc_assert (mask >= 0);
974 gcc_assert (mask >= 1 && mask <= 14);
976 return mnemonic[mask];
979 /* Return the part of op which has a value different from def.
980 The size of the part is determined by mode.
981 Use this function only if you already know that op really
982 contains such a part. */
984 unsigned HOST_WIDE_INT
985 s390_extract_part (rtx op, enum machine_mode mode, int def)
987 unsigned HOST_WIDE_INT value = 0;
988 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
989 int part_bits = GET_MODE_BITSIZE (mode);
990 unsigned HOST_WIDE_INT part_mask
991 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
994 for (i = 0; i < max_parts; i++)
997 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1001 if ((value & part_mask) != (def & part_mask))
1002 return value & part_mask;
1008 /* If OP is an integer constant of mode MODE with exactly one
1009 part of mode PART_MODE unequal to DEF, return the number of that
1010 part. Otherwise, return -1. */
1013 s390_single_part (rtx op,
1014 enum machine_mode mode,
1015 enum machine_mode part_mode,
1018 unsigned HOST_WIDE_INT value = 0;
1019 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1020 unsigned HOST_WIDE_INT part_mask
1021 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1024 if (GET_CODE (op) != CONST_INT)
1027 for (i = 0; i < n_parts; i++)
1030 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1032 value >>= GET_MODE_BITSIZE (part_mode);
1034 if ((value & part_mask) != (def & part_mask))
1042 return part == -1 ? -1 : n_parts - 1 - part;
1045 /* Check whether we can (and want to) split a double-word
1046 move in mode MODE from SRC to DST into two single-word
1047 moves, moving the subword FIRST_SUBWORD first. */
1050 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1052 /* Floating point registers cannot be split. */
1053 if (FP_REG_P (src) || FP_REG_P (dst))
1056 /* We don't need to split if operands are directly accessible. */
1057 if (s_operand (src, mode) || s_operand (dst, mode))
1060 /* Non-offsettable memory references cannot be split. */
1061 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1062 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1065 /* Moving the first subword must not clobber a register
1066 needed to move the second subword. */
1067 if (register_operand (dst, mode))
1069 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1070 if (reg_overlap_mentioned_p (subreg, src))
1077 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1078 and [MEM2, MEM2 + SIZE] do overlap and false
1082 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1084 rtx addr1, addr2, addr_delta;
1085 HOST_WIDE_INT delta;
1087 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1093 addr1 = XEXP (mem1, 0);
1094 addr2 = XEXP (mem2, 0);
1096 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1098 /* This overlapping check is used by peepholes merging memory block operations.
1099 Overlapping operations would otherwise be recognized by the S/390 hardware
1100 and would fall back to a slower implementation. Allowing overlapping
1101 operations would lead to slow code but not to wrong code. Therefore we are
1102 somewhat optimistic if we cannot prove that the memory blocks are
1104 That's why we return false here although this may accept operations on
1105 overlapping memory areas. */
1106 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1109 delta = INTVAL (addr_delta);
1112 || (delta > 0 && delta < size)
1113 || (delta < 0 && -delta < size))
1119 /* Check whether the address of memory reference MEM2 equals exactly
1120 the address of memory reference MEM1 plus DELTA. Return true if
1121 we can prove this to be the case, false otherwise. */
1124 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1126 rtx addr1, addr2, addr_delta;
1128 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1131 addr1 = XEXP (mem1, 0);
1132 addr2 = XEXP (mem2, 0);
1134 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1135 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1141 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1144 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1147 enum machine_mode wmode = mode;
1148 rtx dst = operands[0];
1149 rtx src1 = operands[1];
1150 rtx src2 = operands[2];
1153 /* If we cannot handle the operation directly, use a temp register. */
1154 if (!s390_logical_operator_ok_p (operands))
1155 dst = gen_reg_rtx (mode);
1157 /* QImode and HImode patterns make sense only if we have a destination
1158 in memory. Otherwise perform the operation in SImode. */
1159 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1162 /* Widen operands if required. */
1165 if (GET_CODE (dst) == SUBREG
1166 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1168 else if (REG_P (dst))
1169 dst = gen_rtx_SUBREG (wmode, dst, 0);
1171 dst = gen_reg_rtx (wmode);
1173 if (GET_CODE (src1) == SUBREG
1174 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1176 else if (GET_MODE (src1) != VOIDmode)
1177 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1179 if (GET_CODE (src2) == SUBREG
1180 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1182 else if (GET_MODE (src2) != VOIDmode)
1183 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1186 /* Emit the instruction. */
1187 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1188 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1189 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1191 /* Fix up the destination if needed. */
1192 if (dst != operands[0])
1193 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1196 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1199 s390_logical_operator_ok_p (rtx *operands)
1201 /* If the destination operand is in memory, it needs to coincide
1202 with one of the source operands. After reload, it has to be
1203 the first source operand. */
1204 if (GET_CODE (operands[0]) == MEM)
1205 return rtx_equal_p (operands[0], operands[1])
1206 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1211 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1212 operand IMMOP to switch from SS to SI type instructions. */
1215 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1217 int def = code == AND ? -1 : 0;
1221 gcc_assert (GET_CODE (*memop) == MEM);
1222 gcc_assert (!MEM_VOLATILE_P (*memop));
1224 mask = s390_extract_part (*immop, QImode, def);
1225 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1226 gcc_assert (part >= 0);
1228 *memop = adjust_address (*memop, QImode, part);
1229 *immop = gen_int_mode (mask, QImode);
1233 /* How to allocate a 'struct machine_function'. */
1235 static struct machine_function *
1236 s390_init_machine_status (void)
1238 return ggc_alloc_cleared (sizeof (struct machine_function));
1241 /* Change optimizations to be performed, depending on the
1244 LEVEL is the optimization level specified; 2 if `-O2' is
1245 specified, 1 if `-O' is specified, and 0 if neither is specified.
1247 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1250 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1252 /* ??? There are apparently still problems with -fcaller-saves. */
1253 flag_caller_saves = 0;
1255 /* By default, always emit DWARF-2 unwind info. This allows debugging
1256 without maintaining a stack frame back-chain. */
1257 flag_asynchronous_unwind_tables = 1;
1259 /* Use MVCLE instructions to decrease code size if requested. */
1261 target_flags |= MASK_MVCLE;
1264 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1265 to the associated processor_type and processor_flags if so. */
1268 s390_handle_arch_option (const char *arg,
1269 enum processor_type *type,
1270 enum processor_flags *flags)
1274 const char *const name; /* processor name or nickname. */
1275 const enum processor_type processor;
1276 const enum processor_flags flags;
1278 const processor_alias_table[] =
1280 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1281 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1282 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1283 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1284 | PF_LONG_DISPLACEMENT},
1285 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1286 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1290 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1291 if (strcmp (arg, processor_alias_table[i].name) == 0)
1293 *type = processor_alias_table[i].processor;
1294 *flags = processor_alias_table[i].flags;
1300 /* Implement TARGET_HANDLE_OPTION. */
1303 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1308 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1310 case OPT_mstack_guard_:
1311 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1313 if (exact_log2 (s390_stack_guard) == -1)
1314 error ("stack guard value must be an exact power of 2");
1317 case OPT_mstack_size_:
1318 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1320 if (exact_log2 (s390_stack_size) == -1)
1321 error ("stack size must be an exact power of 2");
1325 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1327 case OPT_mwarn_framesize_:
1328 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1336 override_options (void)
1338 /* Set up function hooks. */
1339 init_machine_status = s390_init_machine_status;
1341 /* Architecture mode defaults according to ABI. */
1342 if (!(target_flags_explicit & MASK_ZARCH))
1345 target_flags |= MASK_ZARCH;
1347 target_flags &= ~MASK_ZARCH;
1350 /* Determine processor architectural level. */
1351 if (!s390_arch_string)
1353 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1354 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1357 /* Determine processor to tune for. */
1358 if (s390_tune == PROCESSOR_max)
1360 s390_tune = s390_arch;
1361 s390_tune_flags = s390_arch_flags;
1364 /* Sanity checks. */
1365 if (TARGET_ZARCH && !(s390_arch_flags & PF_ZARCH))
1366 error ("z/Architecture mode not supported on %s", s390_arch_string);
1367 if (TARGET_64BIT && !TARGET_ZARCH)
1368 error ("64-bit ABI not supported in ESA/390 mode");
1370 /* Set processor cost function. */
1371 if (s390_tune == PROCESSOR_2094_Z9_109)
1372 s390_cost = &z9_109_cost;
1373 else if (s390_tune == PROCESSOR_2084_Z990)
1374 s390_cost = &z990_cost;
1376 s390_cost = &z900_cost;
1378 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1379 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1382 if (s390_stack_size)
1384 if (!s390_stack_guard)
1385 error ("-mstack-size implies use of -mstack-guard");
1386 else if (s390_stack_guard >= s390_stack_size)
1387 error ("stack size must be greater than the stack guard value");
1388 else if (s390_stack_size > 1 << 16)
1389 error ("stack size must not be greater than 64k");
1391 else if (s390_stack_guard)
1392 error ("-mstack-guard implies use of -mstack-size");
1395 /* Map for smallest class containing reg regno. */
1397 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1398 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1399 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1400 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1401 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1402 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1403 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1404 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1405 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1406 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1407 ACCESS_REGS, ACCESS_REGS
1410 /* Return attribute type of insn. */
1412 static enum attr_type
1413 s390_safe_attr_type (rtx insn)
1415 if (recog_memoized (insn) >= 0)
1416 return get_attr_type (insn);
1421 /* Return true if DISP is a valid short displacement. */
1424 s390_short_displacement (rtx disp)
1426 /* No displacement is OK. */
1430 /* Integer displacement in range. */
1431 if (GET_CODE (disp) == CONST_INT)
1432 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1434 /* GOT offset is not OK, the GOT can be large. */
1435 if (GET_CODE (disp) == CONST
1436 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1437 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1438 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1441 /* All other symbolic constants are literal pool references,
1442 which are OK as the literal pool must be small. */
1443 if (GET_CODE (disp) == CONST)
1449 /* Decompose a RTL expression ADDR for a memory address into
1450 its components, returned in OUT.
1452 Returns false if ADDR is not a valid memory address, true
1453 otherwise. If OUT is NULL, don't return the components,
1454 but check for validity only.
1456 Note: Only addresses in canonical form are recognized.
1457 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1458 canonical form so that they will be recognized. */
1461 s390_decompose_address (rtx addr, struct s390_address *out)
1463 HOST_WIDE_INT offset = 0;
1464 rtx base = NULL_RTX;
1465 rtx indx = NULL_RTX;
1466 rtx disp = NULL_RTX;
1468 bool pointer = false;
1469 bool base_ptr = false;
1470 bool indx_ptr = false;
1472 /* Decompose address into base + index + displacement. */
1474 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1477 else if (GET_CODE (addr) == PLUS)
1479 rtx op0 = XEXP (addr, 0);
1480 rtx op1 = XEXP (addr, 1);
1481 enum rtx_code code0 = GET_CODE (op0);
1482 enum rtx_code code1 = GET_CODE (op1);
1484 if (code0 == REG || code0 == UNSPEC)
1486 if (code1 == REG || code1 == UNSPEC)
1488 indx = op0; /* index + base */
1494 base = op0; /* base + displacement */
1499 else if (code0 == PLUS)
1501 indx = XEXP (op0, 0); /* index + base + disp */
1502 base = XEXP (op0, 1);
1513 disp = addr; /* displacement */
1515 /* Extract integer part of displacement. */
1519 if (GET_CODE (disp) == CONST_INT)
1521 offset = INTVAL (disp);
1524 else if (GET_CODE (disp) == CONST
1525 && GET_CODE (XEXP (disp, 0)) == PLUS
1526 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1528 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1529 disp = XEXP (XEXP (disp, 0), 0);
1533 /* Strip off CONST here to avoid special case tests later. */
1534 if (disp && GET_CODE (disp) == CONST)
1535 disp = XEXP (disp, 0);
1537 /* We can convert literal pool addresses to
1538 displacements by basing them off the base register. */
1539 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1541 /* Either base or index must be free to hold the base register. */
1543 base = gen_rtx_REG (Pmode, BASE_REGNUM);
1545 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
1549 /* Mark up the displacement. */
1550 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1551 UNSPEC_LTREL_OFFSET);
1554 /* Validate base register. */
1557 if (GET_CODE (base) == UNSPEC)
1558 switch (XINT (base, 1))
1562 disp = gen_rtx_UNSPEC (Pmode,
1563 gen_rtvec (1, XVECEXP (base, 0, 0)),
1564 UNSPEC_LTREL_OFFSET);
1568 base = gen_rtx_REG (Pmode, BASE_REGNUM);
1571 case UNSPEC_LTREL_BASE:
1572 base = gen_rtx_REG (Pmode, BASE_REGNUM);
1579 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1582 if (REGNO (base) == BASE_REGNUM
1583 || REGNO (base) == STACK_POINTER_REGNUM
1584 || REGNO (base) == FRAME_POINTER_REGNUM
1585 || ((reload_completed || reload_in_progress)
1586 && frame_pointer_needed
1587 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1588 || REGNO (base) == ARG_POINTER_REGNUM
1590 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1591 pointer = base_ptr = true;
1594 /* Validate index register. */
1597 if (GET_CODE (indx) == UNSPEC)
1598 switch (XINT (indx, 1))
1602 disp = gen_rtx_UNSPEC (Pmode,
1603 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1604 UNSPEC_LTREL_OFFSET);
1608 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
1611 case UNSPEC_LTREL_BASE:
1612 indx = gen_rtx_REG (Pmode, BASE_REGNUM);
1619 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1622 if (REGNO (indx) == BASE_REGNUM
1623 || REGNO (indx) == STACK_POINTER_REGNUM
1624 || REGNO (indx) == FRAME_POINTER_REGNUM
1625 || ((reload_completed || reload_in_progress)
1626 && frame_pointer_needed
1627 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1628 || REGNO (indx) == ARG_POINTER_REGNUM
1630 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1631 pointer = indx_ptr = true;
1634 /* Prefer to use pointer as base, not index. */
1635 if (base && indx && !base_ptr
1636 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1643 /* Validate displacement. */
1646 /* If virtual registers are involved, the displacement will change later
1647 anyway as the virtual registers get eliminated. This could make a
1648 valid displacement invalid, but it is more likely to make an invalid
1649 displacement valid, because we sometimes access the register save area
1650 via negative offsets to one of those registers.
1651 Thus we don't check the displacement for validity here. If after
1652 elimination the displacement turns out to be invalid after all,
1653 this is fixed up by reload in any case. */
1654 if (base != arg_pointer_rtx
1655 && indx != arg_pointer_rtx
1656 && base != return_address_pointer_rtx
1657 && indx != return_address_pointer_rtx
1658 && base != frame_pointer_rtx
1659 && indx != frame_pointer_rtx
1660 && base != virtual_stack_vars_rtx
1661 && indx != virtual_stack_vars_rtx)
1662 if (!DISP_IN_RANGE (offset))
1667 /* All the special cases are pointers. */
1670 /* In the small-PIC case, the linker converts @GOT
1671 and @GOTNTPOFF offsets to possible displacements. */
1672 if (GET_CODE (disp) == UNSPEC
1673 && (XINT (disp, 1) == UNSPEC_GOT
1674 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1681 /* Accept chunkified literal pool symbol references. */
1682 else if (GET_CODE (disp) == MINUS
1683 && GET_CODE (XEXP (disp, 0)) == LABEL_REF
1684 && GET_CODE (XEXP (disp, 1)) == LABEL_REF)
1689 /* Accept literal pool references. */
1690 else if (GET_CODE (disp) == UNSPEC
1691 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1693 orig_disp = gen_rtx_CONST (Pmode, disp);
1696 /* If we have an offset, make sure it does not
1697 exceed the size of the constant pool entry. */
1698 rtx sym = XVECEXP (disp, 0, 0);
1699 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1702 orig_disp = plus_constant (orig_disp, offset);
1717 out->disp = orig_disp;
1718 out->pointer = pointer;
1724 /* Decompose a RTL expression OP for a shift count into its components,
1725 and return the base register in BASE and the offset in OFFSET.
1727 If BITS is non-zero, the expression is used in a context where only
1728 that number to low-order bits is significant. We then allow OP to
1729 contain and outer AND that does not affect significant bits. If BITS
1730 is zero, we allow OP to contain any outer AND with a constant.
1732 Return true if OP is a valid shift count, false if not. */
1735 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset, int bits)
1737 HOST_WIDE_INT off = 0;
1739 /* Drop outer ANDs. */
1740 if (GET_CODE (op) == AND && GET_CODE (XEXP (op, 1)) == CONST_INT)
1742 HOST_WIDE_INT mask = ((HOST_WIDE_INT)1 << bits) - 1;
1743 if ((INTVAL (XEXP (op, 1)) & mask) != mask)
1749 /* We can have an integer constant, an address register,
1750 or a sum of the two. */
1751 if (GET_CODE (op) == CONST_INT)
1756 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
1758 off = INTVAL (XEXP (op, 1));
1761 while (op && GET_CODE (op) == SUBREG)
1762 op = SUBREG_REG (op);
1764 if (op && GET_CODE (op) != REG)
1776 /* Return true if CODE is a valid address without index. */
1779 s390_legitimate_address_without_index_p (rtx op)
1781 struct s390_address addr;
1783 if (!s390_decompose_address (XEXP (op, 0), &addr))
1791 /* Return 1 if OP is a valid operand for a C constraint, 0 else. */
1794 s390_extra_constraint_str (rtx op, int c, const char * str)
1796 struct s390_address addr;
1798 gcc_assert (c == str[0]);
1800 /* Check for offsettable variants of memory constraints. */
1803 /* Only accept non-volatile MEMs. */
1804 if (!MEM_P (op) || MEM_VOLATILE_P (op))
1807 if ((reload_completed || reload_in_progress)
1808 ? !offsettable_memref_p (op)
1809 : !offsettable_nonstrict_memref_p (op))
1815 /* Check for non-literal-pool variants of memory constraints. */
1818 if (GET_CODE (op) != MEM)
1820 if (!s390_decompose_address (XEXP (op, 0), &addr))
1822 if (addr.base && REG_P (addr.base) && REGNO (addr.base) == BASE_REGNUM)
1824 if (addr.indx && REG_P (addr.indx) && REGNO (addr.indx) == BASE_REGNUM)
1833 if (GET_CODE (op) != MEM)
1835 if (!s390_decompose_address (XEXP (op, 0), &addr))
1840 if (TARGET_LONG_DISPLACEMENT)
1842 if (!s390_short_displacement (addr.disp))
1848 if (GET_CODE (op) != MEM)
1851 if (TARGET_LONG_DISPLACEMENT)
1853 if (!s390_decompose_address (XEXP (op, 0), &addr))
1855 if (!s390_short_displacement (addr.disp))
1861 if (!TARGET_LONG_DISPLACEMENT)
1863 if (GET_CODE (op) != MEM)
1865 if (!s390_decompose_address (XEXP (op, 0), &addr))
1869 if (s390_short_displacement (addr.disp))
1874 if (!TARGET_LONG_DISPLACEMENT)
1876 if (GET_CODE (op) != MEM)
1878 /* Any invalid address here will be fixed up by reload,
1879 so accept it for the most generic constraint. */
1880 if (s390_decompose_address (XEXP (op, 0), &addr)
1881 && s390_short_displacement (addr.disp))
1886 if (TARGET_LONG_DISPLACEMENT)
1888 if (!s390_decompose_address (op, &addr))
1890 if (!s390_short_displacement (addr.disp))
1896 if (!TARGET_LONG_DISPLACEMENT)
1898 /* Any invalid address here will be fixed up by reload,
1899 so accept it for the most generic constraint. */
1900 if (s390_decompose_address (op, &addr)
1901 && s390_short_displacement (addr.disp))
1906 /* Simply check for the basic form of a shift count. Reload will
1907 take care of making sure we have a proper base register. */
1908 if (!s390_decompose_shift_count (op, NULL, NULL, 0))
1919 /* Return true if VALUE matches the constraint STR. */
1922 s390_const_double_ok_for_constraint_p (rtx value,
1926 gcc_assert (c == str[0]);
1931 /* The floating point zero constant. */
1932 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
1933 && value == CONST0_RTX (GET_MODE (value)));
1940 /* Return true if VALUE matches the constraint STR. */
1943 s390_const_ok_for_constraint_p (HOST_WIDE_INT value,
1947 enum machine_mode mode, part_mode;
1949 int part, part_goal;
1951 gcc_assert (c == str[0]);
1956 return (unsigned int)value < 256;
1959 return (unsigned int)value < 4096;
1962 return value >= -32768 && value < 32768;
1965 return (TARGET_LONG_DISPLACEMENT ?
1966 (value >= -524288 && value <= 524287)
1967 : (value >= 0 && value <= 4095));
1969 return value == 2147483647;
1975 part_goal = str[1] - '0';
1979 case 'Q': part_mode = QImode; break;
1980 case 'H': part_mode = HImode; break;
1981 case 'S': part_mode = SImode; break;
1987 case 'H': mode = HImode; break;
1988 case 'S': mode = SImode; break;
1989 case 'D': mode = DImode; break;
1995 case '0': def = 0; break;
1996 case 'F': def = -1; break;
2000 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2003 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2006 if (part_goal != -1 && part_goal != part)
2018 return trunc_int_for_mode (value, SImode) == value;
2022 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2026 || s390_single_part (GEN_INT (value), DImode, SImode, -1) == 1;
2034 return legitimate_reload_constant_p (GEN_INT (value));
2043 /* Compute a (partial) cost for rtx X. Return true if the complete
2044 cost has been computed, and false if subexpressions should be
2045 scanned. In either case, *TOTAL contains the cost result.
2046 CODE contains GET_CODE (x), OUTER_CODE contains the code
2047 of the superexpression of x. */
2050 s390_rtx_costs (rtx x, int code, int outer_code, int *total)
2073 *total = COSTS_N_INSNS (1);
2078 /* Check for multiply and add. */
2079 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2080 && GET_CODE (XEXP (x, 0)) == MULT
2081 && TARGET_HARD_FLOAT && TARGET_IEEE_FLOAT && TARGET_FUSED_MADD)
2083 /* This is the multiply and add case. */
2084 if (GET_MODE (x) == DFmode)
2085 *total = s390_cost->madbr;
2087 *total = s390_cost->maebr;
2088 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT)
2089 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT)
2090 + rtx_cost (XEXP (x, 1), code);
2091 return true; /* Do not do an additional recursive descent. */
2093 *total = COSTS_N_INSNS (1);
2097 switch (GET_MODE (x))
2101 rtx left = XEXP (x, 0);
2102 rtx right = XEXP (x, 1);
2103 if (GET_CODE (right) == CONST_INT
2104 && CONST_OK_FOR_K (INTVAL (right)))
2105 *total = s390_cost->mhi;
2106 else if (GET_CODE (left) == SIGN_EXTEND)
2107 *total = s390_cost->mh;
2109 *total = s390_cost->ms; /* msr, ms, msy */
2114 rtx left = XEXP (x, 0);
2115 rtx right = XEXP (x, 1);
2118 if (GET_CODE (right) == CONST_INT
2119 && CONST_OK_FOR_K (INTVAL (right)))
2120 *total = s390_cost->mghi;
2121 else if (GET_CODE (left) == SIGN_EXTEND)
2122 *total = s390_cost->msgf;
2124 *total = s390_cost->msg; /* msgr, msg */
2126 else /* TARGET_31BIT */
2128 if (GET_CODE (left) == SIGN_EXTEND
2129 && GET_CODE (right) == SIGN_EXTEND)
2130 /* mulsidi case: mr, m */
2131 *total = s390_cost->m;
2132 else if (GET_CODE (left) == ZERO_EXTEND
2133 && GET_CODE (right) == ZERO_EXTEND
2134 && TARGET_CPU_ZARCH)
2135 /* umulsidi case: ml, mlr */
2136 *total = s390_cost->ml;
2138 /* Complex calculation is required. */
2139 *total = COSTS_N_INSNS (40);
2145 *total = s390_cost->mult_df;
2154 if (GET_MODE (x) == TImode) /* 128 bit division */
2155 *total = s390_cost->dlgr;
2156 else if (GET_MODE (x) == DImode)
2158 rtx right = XEXP (x, 1);
2159 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2160 *total = s390_cost->dlr;
2161 else /* 64 by 64 bit division */
2162 *total = s390_cost->dlgr;
2164 else if (GET_MODE (x) == SImode) /* 32 bit division */
2165 *total = s390_cost->dlr;
2170 if (GET_MODE (x) == DImode)
2172 rtx right = XEXP (x, 1);
2173 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2175 *total = s390_cost->dsgfr;
2177 *total = s390_cost->dr;
2178 else /* 64 by 64 bit division */
2179 *total = s390_cost->dsgr;
2181 else if (GET_MODE (x) == SImode) /* 32 bit division */
2182 *total = s390_cost->dlr;
2183 else if (GET_MODE (x) == SFmode)
2185 if (TARGET_IEEE_FLOAT)
2186 *total = s390_cost->debr;
2187 else /* TARGET_IBM_FLOAT */
2188 *total = s390_cost->der;
2190 else if (GET_MODE (x) == DFmode)
2192 if (TARGET_IEEE_FLOAT)
2193 *total = s390_cost->ddbr;
2194 else /* TARGET_IBM_FLOAT */
2195 *total = s390_cost->ddr;
2200 if (GET_MODE (x) == SFmode)
2201 *total = s390_cost->sqebr;
2203 *total = s390_cost->sqdbr;
2208 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2209 || outer_code == PLUS || outer_code == MINUS
2210 || outer_code == COMPARE)
2215 *total = COSTS_N_INSNS (1);
2216 if (GET_CODE (XEXP (x, 0)) == AND
2217 && GET_CODE (XEXP (x, 1)) == CONST_INT
2218 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2220 rtx op0 = XEXP (XEXP (x, 0), 0);
2221 rtx op1 = XEXP (XEXP (x, 0), 1);
2222 rtx op2 = XEXP (x, 1);
2224 if (memory_operand (op0, GET_MODE (op0))
2225 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2227 if (register_operand (op0, GET_MODE (op0))
2228 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2238 /* Return the cost of an address rtx ADDR. */
2241 s390_address_cost (rtx addr)
2243 struct s390_address ad;
2244 if (!s390_decompose_address (addr, &ad))
2247 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2250 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2251 otherwise return 0. */
2254 tls_symbolic_operand (rtx op)
2256 if (GET_CODE (op) != SYMBOL_REF)
2258 return SYMBOL_REF_TLS_MODEL (op);
2261 /* Split DImode access register reference REG (on 64-bit) into its constituent
2262 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2263 gen_highpart cannot be used as they assume all registers are word-sized,
2264 while our access registers have only half that size. */
2267 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2269 gcc_assert (TARGET_64BIT);
2270 gcc_assert (ACCESS_REG_P (reg));
2271 gcc_assert (GET_MODE (reg) == DImode);
2272 gcc_assert (!(REGNO (reg) & 1));
2274 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2275 *hi = gen_rtx_REG (SImode, REGNO (reg));
2278 /* Return true if OP contains a symbol reference */
2281 symbolic_reference_mentioned_p (rtx op)
2286 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2289 fmt = GET_RTX_FORMAT (GET_CODE (op));
2290 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2296 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2297 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2301 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2308 /* Return true if OP contains a reference to a thread-local symbol. */
2311 tls_symbolic_reference_mentioned_p (rtx op)
2316 if (GET_CODE (op) == SYMBOL_REF)
2317 return tls_symbolic_operand (op);
2319 fmt = GET_RTX_FORMAT (GET_CODE (op));
2320 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2326 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2327 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2331 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2339 /* Return true if OP is a legitimate general operand when
2340 generating PIC code. It is given that flag_pic is on
2341 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2344 legitimate_pic_operand_p (rtx op)
2346 /* Accept all non-symbolic constants. */
2347 if (!SYMBOLIC_CONST (op))
2350 /* Reject everything else; must be handled
2351 via emit_symbolic_move. */
2355 /* Returns true if the constant value OP is a legitimate general operand.
2356 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2359 legitimate_constant_p (rtx op)
2361 /* Accept all non-symbolic constants. */
2362 if (!SYMBOLIC_CONST (op))
2365 /* Accept immediate LARL operands. */
2366 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2369 /* Thread-local symbols are never legal constants. This is
2370 so that emit_call knows that computing such addresses
2371 might require a function call. */
2372 if (TLS_SYMBOLIC_CONST (op))
2375 /* In the PIC case, symbolic constants must *not* be
2376 forced into the literal pool. We accept them here,
2377 so that they will be handled by emit_symbolic_move. */
2381 /* All remaining non-PIC symbolic constants are
2382 forced into the literal pool. */
2386 /* Determine if it's legal to put X into the constant pool. This
2387 is not possible if X contains the address of a symbol that is
2388 not constant (TLS) or not known at final link time (PIC). */
2391 s390_cannot_force_const_mem (rtx x)
2393 switch (GET_CODE (x))
2397 /* Accept all non-symbolic constants. */
2401 /* Labels are OK iff we are non-PIC. */
2402 return flag_pic != 0;
2405 /* 'Naked' TLS symbol references are never OK,
2406 non-TLS symbols are OK iff we are non-PIC. */
2407 if (tls_symbolic_operand (x))
2410 return flag_pic != 0;
2413 return s390_cannot_force_const_mem (XEXP (x, 0));
2416 return s390_cannot_force_const_mem (XEXP (x, 0))
2417 || s390_cannot_force_const_mem (XEXP (x, 1));
2420 switch (XINT (x, 1))
2422 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2423 case UNSPEC_LTREL_OFFSET:
2431 case UNSPEC_GOTNTPOFF:
2432 case UNSPEC_INDNTPOFF:
2435 /* If the literal pool shares the code section, be put
2436 execute template placeholders into the pool as well. */
2438 return TARGET_CPU_ZARCH;
2450 /* Returns true if the constant value OP is a legitimate general
2451 operand during and after reload. The difference to
2452 legitimate_constant_p is that this function will not accept
2453 a constant that would need to be forced to the literal pool
2454 before it can be used as operand. */
2457 legitimate_reload_constant_p (rtx op)
2459 /* Accept la(y) operands. */
2460 if (GET_CODE (op) == CONST_INT
2461 && DISP_IN_RANGE (INTVAL (op)))
2464 /* Accept l(g)hi/l(g)fi operands. */
2465 if (GET_CODE (op) == CONST_INT
2466 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2469 /* Accept lliXX operands. */
2471 && GET_CODE (op) == CONST_INT
2472 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2473 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2477 && GET_CODE (op) == CONST_INT
2478 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2479 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2482 /* Accept larl operands. */
2483 if (TARGET_CPU_ZARCH
2484 && larl_operand (op, VOIDmode))
2487 /* Accept lzXX operands. */
2488 if (GET_CODE (op) == CONST_DOUBLE
2489 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2492 /* Accept double-word operands that can be split. */
2493 if (GET_CODE (op) == CONST_INT
2494 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2496 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2497 rtx hi = operand_subword (op, 0, 0, dword_mode);
2498 rtx lo = operand_subword (op, 1, 0, dword_mode);
2499 return legitimate_reload_constant_p (hi)
2500 && legitimate_reload_constant_p (lo);
2503 /* Everything else cannot be handled without reload. */
2507 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
2508 return the class of reg to actually use. */
2511 s390_preferred_reload_class (rtx op, enum reg_class class)
2513 switch (GET_CODE (op))
2515 /* Constants we cannot reload must be forced into the
2520 if (legitimate_reload_constant_p (op))
2525 /* If a symbolic constant or a PLUS is reloaded,
2526 it is most likely being used as an address, so
2527 prefer ADDR_REGS. If 'class' is not a superset
2528 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2533 if (reg_class_subset_p (ADDR_REGS, class))
2545 /* Return the register class of a scratch register needed to
2546 load IN into a register of class CLASS in MODE.
2548 We need a temporary when loading a PLUS expression which
2549 is not a legitimate operand of the LOAD ADDRESS instruction. */
2552 s390_secondary_input_reload_class (enum reg_class class,
2553 enum machine_mode mode, rtx in)
2555 if (s390_plus_operand (in, mode))
2558 if (reg_classes_intersect_p (CC_REGS, class))
2559 return GENERAL_REGS;
2564 /* Return the register class of a scratch register needed to
2565 store a register of class CLASS in MODE into OUT:
2567 We need a temporary when storing a double-word to a
2568 non-offsettable memory address. */
2571 s390_secondary_output_reload_class (enum reg_class class,
2572 enum machine_mode mode, rtx out)
2574 if ((TARGET_64BIT ? mode == TImode
2575 : (mode == DImode || mode == DFmode))
2576 && reg_classes_intersect_p (GENERAL_REGS, class)
2577 && GET_CODE (out) == MEM
2578 && GET_CODE (XEXP (out, 0)) == PLUS
2579 && GET_CODE (XEXP (XEXP (out, 0), 0)) == PLUS
2580 && GET_CODE (XEXP (XEXP (out, 0), 1)) == CONST_INT
2581 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (out, 0), 1))
2582 + GET_MODE_SIZE (mode) - 1))
2585 if (reg_classes_intersect_p (CC_REGS, class))
2586 return GENERAL_REGS;
2591 /* Generate code to load SRC, which is PLUS that is not a
2592 legitimate operand for the LA instruction, into TARGET.
2593 SCRATCH may be used as scratch register. */
2596 s390_expand_plus_operand (rtx target, rtx src,
2600 struct s390_address ad;
2602 /* src must be a PLUS; get its two operands. */
2603 gcc_assert (GET_CODE (src) == PLUS);
2604 gcc_assert (GET_MODE (src) == Pmode);
2606 /* Check if any of the two operands is already scheduled
2607 for replacement by reload. This can happen e.g. when
2608 float registers occur in an address. */
2609 sum1 = find_replacement (&XEXP (src, 0));
2610 sum2 = find_replacement (&XEXP (src, 1));
2611 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2613 /* If the address is already strictly valid, there's nothing to do. */
2614 if (!s390_decompose_address (src, &ad)
2615 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2616 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2618 /* Otherwise, one of the operands cannot be an address register;
2619 we reload its value into the scratch register. */
2620 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
2622 emit_move_insn (scratch, sum1);
2625 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
2627 emit_move_insn (scratch, sum2);
2631 /* According to the way these invalid addresses are generated
2632 in reload.c, it should never happen (at least on s390) that
2633 *neither* of the PLUS components, after find_replacements
2634 was applied, is an address register. */
2635 if (sum1 == scratch && sum2 == scratch)
2641 src = gen_rtx_PLUS (Pmode, sum1, sum2);
2644 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
2645 is only ever performed on addresses, so we can mark the
2646 sum as legitimate for LA in any case. */
2647 s390_load_address (target, src);
2651 /* Return true if ADDR is a valid memory address.
2652 STRICT specifies whether strict register checking applies. */
2655 legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2656 rtx addr, int strict)
2658 struct s390_address ad;
2659 if (!s390_decompose_address (addr, &ad))
2664 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2666 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2671 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2673 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2680 /* Return true if OP is a valid operand for the LA instruction.
2681 In 31-bit, we need to prove that the result is used as an
2682 address, as LA performs only a 31-bit addition. */
2685 legitimate_la_operand_p (rtx op)
2687 struct s390_address addr;
2688 if (!s390_decompose_address (op, &addr))
2691 return (TARGET_64BIT || addr.pointer);
2694 /* Return true if it is valid *and* preferable to use LA to
2695 compute the sum of OP1 and OP2. */
2698 preferred_la_operand_p (rtx op1, rtx op2)
2700 struct s390_address addr;
2702 if (op2 != const0_rtx)
2703 op1 = gen_rtx_PLUS (Pmode, op1, op2);
2705 if (!s390_decompose_address (op1, &addr))
2707 if (addr.base && !REG_OK_FOR_BASE_STRICT_P (addr.base))
2709 if (addr.indx && !REG_OK_FOR_INDEX_STRICT_P (addr.indx))
2712 if (!TARGET_64BIT && !addr.pointer)
2718 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2719 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2725 /* Emit a forced load-address operation to load SRC into DST.
2726 This will use the LOAD ADDRESS instruction even in situations
2727 where legitimate_la_operand_p (SRC) returns false. */
2730 s390_load_address (rtx dst, rtx src)
2733 emit_move_insn (dst, src);
2735 emit_insn (gen_force_la_31 (dst, src));
2738 /* Return a legitimate reference for ORIG (an address) using the
2739 register REG. If REG is 0, a new pseudo is generated.
2741 There are two types of references that must be handled:
2743 1. Global data references must load the address from the GOT, via
2744 the PIC reg. An insn is emitted to do this load, and the reg is
2747 2. Static data references, constant pool addresses, and code labels
2748 compute the address as an offset from the GOT, whose base is in
2749 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
2750 differentiate them from global data objects. The returned
2751 address is the PIC reg + an unspec constant.
2753 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2754 reg also appears in the address. */
2757 legitimize_pic_address (rtx orig, rtx reg)
2763 if (GET_CODE (addr) == LABEL_REF
2764 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
2766 /* This is a local symbol. */
2767 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
2769 /* Access local symbols PC-relative via LARL.
2770 This is the same as in the non-PIC case, so it is
2771 handled automatically ... */
2775 /* Access local symbols relative to the GOT. */
2777 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2779 if (reload_in_progress || reload_completed)
2780 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2782 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
2783 addr = gen_rtx_CONST (Pmode, addr);
2784 addr = force_const_mem (Pmode, addr);
2785 emit_move_insn (temp, addr);
2787 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2790 s390_load_address (reg, new);
2795 else if (GET_CODE (addr) == SYMBOL_REF)
2798 reg = gen_reg_rtx (Pmode);
2802 /* Assume GOT offset < 4k. This is handled the same way
2803 in both 31- and 64-bit code (@GOT). */
2805 if (reload_in_progress || reload_completed)
2806 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2808 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2809 new = gen_rtx_CONST (Pmode, new);
2810 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2811 new = gen_const_mem (Pmode, new);
2812 emit_move_insn (reg, new);
2815 else if (TARGET_CPU_ZARCH)
2817 /* If the GOT offset might be >= 4k, we determine the position
2818 of the GOT entry via a PC-relative LARL (@GOTENT). */
2820 rtx temp = gen_reg_rtx (Pmode);
2822 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
2823 new = gen_rtx_CONST (Pmode, new);
2824 emit_move_insn (temp, new);
2826 new = gen_const_mem (Pmode, temp);
2827 emit_move_insn (reg, new);
2832 /* If the GOT offset might be >= 4k, we have to load it
2833 from the literal pool (@GOT). */
2835 rtx temp = gen_reg_rtx (Pmode);
2837 if (reload_in_progress || reload_completed)
2838 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2840 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
2841 addr = gen_rtx_CONST (Pmode, addr);
2842 addr = force_const_mem (Pmode, addr);
2843 emit_move_insn (temp, addr);
2845 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2846 new = gen_const_mem (Pmode, new);
2847 emit_move_insn (reg, new);
2853 if (GET_CODE (addr) == CONST)
2855 addr = XEXP (addr, 0);
2856 if (GET_CODE (addr) == UNSPEC)
2858 gcc_assert (XVECLEN (addr, 0) == 1);
2859 switch (XINT (addr, 1))
2861 /* If someone moved a GOT-relative UNSPEC
2862 out of the literal pool, force them back in. */
2865 new = force_const_mem (Pmode, orig);
2868 /* @GOT is OK as is if small. */
2871 new = force_const_mem (Pmode, orig);
2874 /* @GOTENT is OK as is. */
2878 /* @PLT is OK as is on 64-bit, must be converted to
2879 GOT-relative @PLTOFF on 31-bit. */
2881 if (!TARGET_CPU_ZARCH)
2883 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2885 if (reload_in_progress || reload_completed)
2886 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2888 addr = XVECEXP (addr, 0, 0);
2889 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
2891 addr = gen_rtx_CONST (Pmode, addr);
2892 addr = force_const_mem (Pmode, addr);
2893 emit_move_insn (temp, addr);
2895 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2898 s390_load_address (reg, new);
2904 /* Everything else cannot happen. */
2910 gcc_assert (GET_CODE (addr) == PLUS);
2912 if (GET_CODE (addr) == PLUS)
2914 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2915 /* Check first to see if this is a constant offset
2916 from a local symbol reference. */
2917 if ((GET_CODE (op0) == LABEL_REF
2918 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
2919 && GET_CODE (op1) == CONST_INT)
2921 if (TARGET_CPU_ZARCH && larl_operand (op0, VOIDmode))
2923 if (INTVAL (op1) & 1)
2925 /* LARL can't handle odd offsets, so emit a
2926 pair of LARL and LA. */
2927 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2929 if (!DISP_IN_RANGE (INTVAL (op1)))
2931 int even = INTVAL (op1) - 1;
2932 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2933 op0 = gen_rtx_CONST (Pmode, op0);
2937 emit_move_insn (temp, op0);
2938 new = gen_rtx_PLUS (Pmode, temp, op1);
2942 s390_load_address (reg, new);
2948 /* If the offset is even, we can just use LARL.
2949 This will happen automatically. */
2954 /* Access local symbols relative to the GOT. */
2956 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2958 if (reload_in_progress || reload_completed)
2959 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2961 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
2963 addr = gen_rtx_PLUS (Pmode, addr, op1);
2964 addr = gen_rtx_CONST (Pmode, addr);
2965 addr = force_const_mem (Pmode, addr);
2966 emit_move_insn (temp, addr);
2968 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2971 s390_load_address (reg, new);
2977 /* Now, check whether it is a GOT relative symbol plus offset
2978 that was pulled out of the literal pool. Force it back in. */
2980 else if (GET_CODE (op0) == UNSPEC
2981 && GET_CODE (op1) == CONST_INT
2982 && XINT (op0, 1) == UNSPEC_GOTOFF)
2984 gcc_assert (XVECLEN (op0, 0) == 1);
2986 new = force_const_mem (Pmode, orig);
2989 /* Otherwise, compute the sum. */
2992 base = legitimize_pic_address (XEXP (addr, 0), reg);
2993 new = legitimize_pic_address (XEXP (addr, 1),
2994 base == reg ? NULL_RTX : reg);
2995 if (GET_CODE (new) == CONST_INT)
2996 new = plus_constant (base, INTVAL (new));
2999 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
3001 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
3002 new = XEXP (new, 1);
3004 new = gen_rtx_PLUS (Pmode, base, new);
3007 if (GET_CODE (new) == CONST)
3008 new = XEXP (new, 0);
3009 new = force_operand (new, 0);
3016 /* Load the thread pointer into a register. */
3019 s390_get_thread_pointer (void)
3021 rtx tp = gen_reg_rtx (Pmode);
3023 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3024 mark_reg_pointer (tp, BITS_PER_WORD);
3029 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3030 in s390_tls_symbol which always refers to __tls_get_offset.
3031 The returned offset is written to RESULT_REG and an USE rtx is
3032 generated for TLS_CALL. */
3034 static GTY(()) rtx s390_tls_symbol;
3037 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3041 gcc_assert (flag_pic);
3043 if (!s390_tls_symbol)
3044 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3046 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3047 gen_rtx_REG (Pmode, RETURN_REGNUM));
3049 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3050 CONST_OR_PURE_CALL_P (insn) = 1;
3053 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3054 this (thread-local) address. REG may be used as temporary. */
3057 legitimize_tls_address (rtx addr, rtx reg)
3059 rtx new, tls_call, temp, base, r2, insn;
3061 if (GET_CODE (addr) == SYMBOL_REF)
3062 switch (tls_symbolic_operand (addr))
3064 case TLS_MODEL_GLOBAL_DYNAMIC:
3066 r2 = gen_rtx_REG (Pmode, 2);
3067 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3068 new = gen_rtx_CONST (Pmode, tls_call);
3069 new = force_const_mem (Pmode, new);
3070 emit_move_insn (r2, new);
3071 s390_emit_tls_call_insn (r2, tls_call);
3072 insn = get_insns ();
3075 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3076 temp = gen_reg_rtx (Pmode);
3077 emit_libcall_block (insn, temp, r2, new);
3079 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3082 s390_load_address (reg, new);
3087 case TLS_MODEL_LOCAL_DYNAMIC:
3089 r2 = gen_rtx_REG (Pmode, 2);
3090 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3091 new = gen_rtx_CONST (Pmode, tls_call);
3092 new = force_const_mem (Pmode, new);
3093 emit_move_insn (r2, new);
3094 s390_emit_tls_call_insn (r2, tls_call);
3095 insn = get_insns ();
3098 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3099 temp = gen_reg_rtx (Pmode);
3100 emit_libcall_block (insn, temp, r2, new);
3102 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3103 base = gen_reg_rtx (Pmode);
3104 s390_load_address (base, new);
3106 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3107 new = gen_rtx_CONST (Pmode, new);
3108 new = force_const_mem (Pmode, new);
3109 temp = gen_reg_rtx (Pmode);
3110 emit_move_insn (temp, new);
3112 new = gen_rtx_PLUS (Pmode, base, temp);
3115 s390_load_address (reg, new);
3120 case TLS_MODEL_INITIAL_EXEC:
3123 /* Assume GOT offset < 4k. This is handled the same way
3124 in both 31- and 64-bit code. */
3126 if (reload_in_progress || reload_completed)
3127 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3129 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3130 new = gen_rtx_CONST (Pmode, new);
3131 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
3132 new = gen_const_mem (Pmode, new);
3133 temp = gen_reg_rtx (Pmode);
3134 emit_move_insn (temp, new);
3136 else if (TARGET_CPU_ZARCH)
3138 /* If the GOT offset might be >= 4k, we determine the position
3139 of the GOT entry via a PC-relative LARL. */
3141 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3142 new = gen_rtx_CONST (Pmode, new);
3143 temp = gen_reg_rtx (Pmode);
3144 emit_move_insn (temp, new);
3146 new = gen_const_mem (Pmode, temp);
3147 temp = gen_reg_rtx (Pmode);
3148 emit_move_insn (temp, new);
3152 /* If the GOT offset might be >= 4k, we have to load it
3153 from the literal pool. */
3155 if (reload_in_progress || reload_completed)
3156 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
3158 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3159 new = gen_rtx_CONST (Pmode, new);
3160 new = force_const_mem (Pmode, new);
3161 temp = gen_reg_rtx (Pmode);
3162 emit_move_insn (temp, new);
3164 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3165 new = gen_const_mem (Pmode, new);
3167 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3168 temp = gen_reg_rtx (Pmode);
3169 emit_insn (gen_rtx_SET (Pmode, temp, new));
3173 /* In position-dependent code, load the absolute address of
3174 the GOT entry from the literal pool. */
3176 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3177 new = gen_rtx_CONST (Pmode, new);
3178 new = force_const_mem (Pmode, new);
3179 temp = gen_reg_rtx (Pmode);
3180 emit_move_insn (temp, new);
3183 new = gen_const_mem (Pmode, new);
3184 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
3185 temp = gen_reg_rtx (Pmode);
3186 emit_insn (gen_rtx_SET (Pmode, temp, new));
3189 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3192 s390_load_address (reg, new);
3197 case TLS_MODEL_LOCAL_EXEC:
3198 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3199 new = gen_rtx_CONST (Pmode, new);
3200 new = force_const_mem (Pmode, new);
3201 temp = gen_reg_rtx (Pmode);
3202 emit_move_insn (temp, new);
3204 new = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3207 s390_load_address (reg, new);
3216 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3218 switch (XINT (XEXP (addr, 0), 1))
3220 case UNSPEC_INDNTPOFF:
3221 gcc_assert (TARGET_CPU_ZARCH);
3230 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3231 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3233 new = XEXP (XEXP (addr, 0), 0);
3234 if (GET_CODE (new) != SYMBOL_REF)
3235 new = gen_rtx_CONST (Pmode, new);
3237 new = legitimize_tls_address (new, reg);
3238 new = plus_constant (new, INTVAL (XEXP (XEXP (addr, 0), 1)));
3239 new = force_operand (new, 0);
3243 gcc_unreachable (); /* for now ... */
3248 /* Emit insns to move operands[1] into operands[0]. */
3251 emit_symbolic_move (rtx *operands)
3253 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
3255 if (GET_CODE (operands[0]) == MEM)
3256 operands[1] = force_reg (Pmode, operands[1]);
3257 else if (TLS_SYMBOLIC_CONST (operands[1]))
3258 operands[1] = legitimize_tls_address (operands[1], temp);
3260 operands[1] = legitimize_pic_address (operands[1], temp);
3263 /* Try machine-dependent ways of modifying an illegitimate address X
3264 to be legitimate. If we find one, return the new, valid address.
3266 OLDX is the address as it was before break_out_memory_refs was called.
3267 In some cases it is useful to look at this to decide what needs to be done.
3269 MODE is the mode of the operand pointed to by X.
3271 When -fpic is used, special handling is needed for symbolic references.
3272 See comments by legitimize_pic_address for details. */
3275 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3276 enum machine_mode mode ATTRIBUTE_UNUSED)
3278 rtx constant_term = const0_rtx;
3280 if (TLS_SYMBOLIC_CONST (x))
3282 x = legitimize_tls_address (x, 0);
3284 if (legitimate_address_p (mode, x, FALSE))
3289 if (SYMBOLIC_CONST (x)
3290 || (GET_CODE (x) == PLUS
3291 && (SYMBOLIC_CONST (XEXP (x, 0))
3292 || SYMBOLIC_CONST (XEXP (x, 1)))))
3293 x = legitimize_pic_address (x, 0);
3295 if (legitimate_address_p (mode, x, FALSE))
3299 x = eliminate_constant_term (x, &constant_term);
3301 /* Optimize loading of large displacements by splitting them
3302 into the multiple of 4K and the rest; this allows the
3303 former to be CSE'd if possible.
3305 Don't do this if the displacement is added to a register
3306 pointing into the stack frame, as the offsets will
3307 change later anyway. */
3309 if (GET_CODE (constant_term) == CONST_INT
3310 && !TARGET_LONG_DISPLACEMENT
3311 && !DISP_IN_RANGE (INTVAL (constant_term))
3312 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3314 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3315 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3317 rtx temp = gen_reg_rtx (Pmode);
3318 rtx val = force_operand (GEN_INT (upper), temp);
3320 emit_move_insn (temp, val);
3322 x = gen_rtx_PLUS (Pmode, x, temp);
3323 constant_term = GEN_INT (lower);
3326 if (GET_CODE (x) == PLUS)
3328 if (GET_CODE (XEXP (x, 0)) == REG)
3330 rtx temp = gen_reg_rtx (Pmode);
3331 rtx val = force_operand (XEXP (x, 1), temp);
3333 emit_move_insn (temp, val);
3335 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3338 else if (GET_CODE (XEXP (x, 1)) == REG)
3340 rtx temp = gen_reg_rtx (Pmode);
3341 rtx val = force_operand (XEXP (x, 0), temp);
3343 emit_move_insn (temp, val);
3345 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3349 if (constant_term != const0_rtx)
3350 x = gen_rtx_PLUS (Pmode, x, constant_term);
3355 /* Try a machine-dependent way of reloading an illegitimate address AD
3356 operand. If we find one, push the reload and and return the new address.
3358 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3359 and TYPE is the reload type of the current reload. */
3362 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3363 int opnum, int type)
3365 if (!optimize || TARGET_LONG_DISPLACEMENT)
3368 if (GET_CODE (ad) == PLUS)
3370 rtx tem = simplify_binary_operation (PLUS, Pmode,
3371 XEXP (ad, 0), XEXP (ad, 1));
3376 if (GET_CODE (ad) == PLUS
3377 && GET_CODE (XEXP (ad, 0)) == REG
3378 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3379 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3381 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3382 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3385 cst = GEN_INT (upper);
3386 if (!legitimate_reload_constant_p (cst))
3387 cst = force_const_mem (Pmode, cst);
3389 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3390 new = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3392 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3393 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3394 opnum, (enum reload_type) type);
3401 /* Emit code to move LEN bytes from DST to SRC. */
3404 s390_expand_movmem (rtx dst, rtx src, rtx len)
3406 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3408 if (INTVAL (len) > 0)
3409 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3412 else if (TARGET_MVCLE)
3414 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3419 rtx dst_addr, src_addr, count, blocks, temp;
3420 rtx loop_start_label = gen_label_rtx ();
3421 rtx loop_end_label = gen_label_rtx ();
3422 rtx end_label = gen_label_rtx ();
3423 enum machine_mode mode;
3425 mode = GET_MODE (len);
3426 if (mode == VOIDmode)
3429 dst_addr = gen_reg_rtx (Pmode);
3430 src_addr = gen_reg_rtx (Pmode);
3431 count = gen_reg_rtx (mode);
3432 blocks = gen_reg_rtx (mode);
3434 convert_move (count, len, 1);
3435 emit_cmp_and_jump_insns (count, const0_rtx,
3436 EQ, NULL_RTX, mode, 1, end_label);
3438 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3439 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3440 dst = change_address (dst, VOIDmode, dst_addr);
3441 src = change_address (src, VOIDmode, src_addr);
3443 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3445 emit_move_insn (count, temp);
3447 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3449 emit_move_insn (blocks, temp);
3451 emit_cmp_and_jump_insns (blocks, const0_rtx,
3452 EQ, NULL_RTX, mode, 1, loop_end_label);
3454 emit_label (loop_start_label);
3456 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3457 s390_load_address (dst_addr,
3458 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3459 s390_load_address (src_addr,
3460 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3462 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3464 emit_move_insn (blocks, temp);
3466 emit_cmp_and_jump_insns (blocks, const0_rtx,
3467 EQ, NULL_RTX, mode, 1, loop_end_label);
3469 emit_jump (loop_start_label);
3470 emit_label (loop_end_label);
3472 emit_insn (gen_movmem_short (dst, src,
3473 convert_to_mode (Pmode, count, 1)));
3474 emit_label (end_label);
3478 /* Emit code to set LEN bytes at DST to VAL.
3479 Make use of clrmem if VAL is zero. */
3482 s390_expand_setmem (rtx dst, rtx len, rtx val)
3484 gcc_assert (GET_CODE (len) != CONST_INT || INTVAL (len) > 0);
3485 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3487 if (GET_CODE (len) == CONST_INT && INTVAL (len) <= 257)
3489 if (val == const0_rtx && INTVAL (len) <= 256)
3490 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
3493 /* Initialize memory by storing the first byte. */
3494 emit_move_insn (adjust_address (dst, QImode, 0), val);
3496 if (INTVAL (len) > 1)
3498 /* Initiate 1 byte overlap move.
3499 The first byte of DST is propagated through DSTP1.
3500 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
3501 DST is set to size 1 so the rest of the memory location
3502 does not count as source operand. */
3503 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
3504 set_mem_size (dst, const1_rtx);
3506 emit_insn (gen_movmem_short (dstp1, dst,
3507 GEN_INT (INTVAL (len) - 2)));
3512 else if (TARGET_MVCLE)
3514 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
3515 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
3520 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
3521 rtx loop_start_label = gen_label_rtx ();
3522 rtx loop_end_label = gen_label_rtx ();
3523 rtx end_label = gen_label_rtx ();
3524 enum machine_mode mode;
3526 mode = GET_MODE (len);
3527 if (mode == VOIDmode)
3530 dst_addr = gen_reg_rtx (Pmode);
3531 src_addr = gen_reg_rtx (Pmode);
3532 count = gen_reg_rtx (mode);
3533 blocks = gen_reg_rtx (mode);
3535 convert_move (count, len, 1);
3536 emit_cmp_and_jump_insns (count, const0_rtx,
3537 EQ, NULL_RTX, mode, 1, end_label);
3539 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3540 dst = change_address (dst, VOIDmode, dst_addr);
3542 if (val == const0_rtx)
3543 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3546 dstp1 = adjust_address (dst, VOIDmode, 1);
3547 set_mem_size (dst, const1_rtx);
3549 /* Initialize memory by storing the first byte. */
3550 emit_move_insn (adjust_address (dst, QImode, 0), val);
3552 /* If count is 1 we are done. */
3553 emit_cmp_and_jump_insns (count, const1_rtx,
3554 EQ, NULL_RTX, mode, 1, end_label);
3556 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0);
3559 emit_move_insn (count, temp);
3561 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3563 emit_move_insn (blocks, temp);
3565 emit_cmp_and_jump_insns (blocks, const0_rtx,
3566 EQ, NULL_RTX, mode, 1, loop_end_label);
3568 emit_label (loop_start_label);
3570 if (val == const0_rtx)
3571 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
3573 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
3574 s390_load_address (dst_addr,
3575 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3577 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3579 emit_move_insn (blocks, temp);
3581 emit_cmp_and_jump_insns (blocks, const0_rtx,
3582 EQ, NULL_RTX, mode, 1, loop_end_label);
3584 emit_jump (loop_start_label);
3585 emit_label (loop_end_label);
3587 if (val == const0_rtx)
3588 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
3590 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
3591 emit_label (end_label);
3595 /* Emit code to compare LEN bytes at OP0 with those at OP1,
3596 and return the result in TARGET. */
3599 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
3601 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
3604 /* As the result of CMPINT is inverted compared to what we need,
3605 we have to swap the operands. */
3606 tmp = op0; op0 = op1; op1 = tmp;
3608 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3610 if (INTVAL (len) > 0)
3612 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
3613 emit_insn (gen_cmpint (target, ccreg));
3616 emit_move_insn (target, const0_rtx);
3618 else if (TARGET_MVCLE)
3620 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
3621 emit_insn (gen_cmpint (target, ccreg));
3625 rtx addr0, addr1, count, blocks, temp;
3626 rtx loop_start_label = gen_label_rtx ();
3627 rtx loop_end_label = gen_label_rtx ();
3628 rtx end_label = gen_label_rtx ();
3629 enum machine_mode mode;
3631 mode = GET_MODE (len);
3632 if (mode == VOIDmode)
3635 addr0 = gen_reg_rtx (Pmode);
3636 addr1 = gen_reg_rtx (Pmode);
3637 count = gen_reg_rtx (mode);
3638 blocks = gen_reg_rtx (mode);
3640 convert_move (count, len, 1);
3641 emit_cmp_and_jump_insns (count, const0_rtx,
3642 EQ, NULL_RTX, mode, 1, end_label);
3644 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3645 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3646 op0 = change_address (op0, VOIDmode, addr0);
3647 op1 = change_address (op1, VOIDmode, addr1);
3649 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3651 emit_move_insn (count, temp);
3653 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3655 emit_move_insn (blocks, temp);
3657 emit_cmp_and_jump_insns (blocks, const0_rtx,
3658 EQ, NULL_RTX, mode, 1, loop_end_label);
3660 emit_label (loop_start_label);
3662 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
3663 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
3664 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3665 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3666 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3667 emit_jump_insn (temp);
3669 s390_load_address (addr0,
3670 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3671 s390_load_address (addr1,
3672 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3674 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3676 emit_move_insn (blocks, temp);
3678 emit_cmp_and_jump_insns (blocks, const0_rtx,
3679 EQ, NULL_RTX, mode, 1, loop_end_label);
3681 emit_jump (loop_start_label);
3682 emit_label (loop_end_label);
3684 emit_insn (gen_cmpmem_short (op0, op1,
3685 convert_to_mode (Pmode, count, 1)));
3686 emit_label (end_label);
3688 emit_insn (gen_cmpint (target, ccreg));
3693 /* Expand conditional increment or decrement using alc/slb instructions.
3694 Should generate code setting DST to either SRC or SRC + INCREMENT,
3695 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
3696 Returns true if successful, false otherwise.
3698 That makes it possible to implement some if-constructs without jumps e.g.:
3699 (borrow = CC0 | CC1 and carry = CC2 | CC3)
3700 unsigned int a, b, c;
3701 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
3702 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
3703 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
3704 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
3706 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
3707 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
3708 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
3709 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
3710 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
3713 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
3714 rtx dst, rtx src, rtx increment)
3716 enum machine_mode cmp_mode;
3717 enum machine_mode cc_mode;
3723 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
3724 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
3726 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
3727 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
3732 /* Try ADD LOGICAL WITH CARRY. */
3733 if (increment == const1_rtx)
3735 /* Determine CC mode to use. */
3736 if (cmp_code == EQ || cmp_code == NE)
3738 if (cmp_op1 != const0_rtx)
3740 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3741 NULL_RTX, 0, OPTAB_WIDEN);
3742 cmp_op1 = const0_rtx;
3745 cmp_code = cmp_code == EQ ? LEU : GTU;
3748 if (cmp_code == LTU || cmp_code == LEU)
3753 cmp_code = swap_condition (cmp_code);
3770 /* Emit comparison instruction pattern. */
3771 if (!register_operand (cmp_op0, cmp_mode))
3772 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3774 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3775 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3776 /* We use insn_invalid_p here to add clobbers if required. */
3777 ret = insn_invalid_p (emit_insn (insn));
3780 /* Emit ALC instruction pattern. */
3781 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3782 gen_rtx_REG (cc_mode, CC_REGNUM),
3785 if (src != const0_rtx)
3787 if (!register_operand (src, GET_MODE (dst)))
3788 src = force_reg (GET_MODE (dst), src);
3790 src = gen_rtx_PLUS (GET_MODE (dst), src, const0_rtx);
3791 op_res = gen_rtx_PLUS (GET_MODE (dst), src, op_res);
3794 p = rtvec_alloc (2);
3796 gen_rtx_SET (VOIDmode, dst, op_res);
3798 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3799 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3804 /* Try SUBTRACT LOGICAL WITH BORROW. */
3805 if (increment == constm1_rtx)
3807 /* Determine CC mode to use. */
3808 if (cmp_code == EQ || cmp_code == NE)
3810 if (cmp_op1 != const0_rtx)
3812 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
3813 NULL_RTX, 0, OPTAB_WIDEN);
3814 cmp_op1 = const0_rtx;
3817 cmp_code = cmp_code == EQ ? LEU : GTU;
3820 if (cmp_code == GTU || cmp_code == GEU)
3825 cmp_code = swap_condition (cmp_code);
3842 /* Emit comparison instruction pattern. */
3843 if (!register_operand (cmp_op0, cmp_mode))
3844 cmp_op0 = force_reg (cmp_mode, cmp_op0);
3846 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
3847 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
3848 /* We use insn_invalid_p here to add clobbers if required. */
3849 ret = insn_invalid_p (emit_insn (insn));
3852 /* Emit SLB instruction pattern. */
3853 if (!register_operand (src, GET_MODE (dst)))
3854 src = force_reg (GET_MODE (dst), src);
3856 op_res = gen_rtx_MINUS (GET_MODE (dst),
3857 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
3858 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
3859 gen_rtx_REG (cc_mode, CC_REGNUM),
3861 p = rtvec_alloc (2);
3863 gen_rtx_SET (VOIDmode, dst, op_res);
3865 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
3866 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
3874 /* Expand code for the insv template. Return true if successful, false else. */
3877 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
3879 int bitsize = INTVAL (op1);
3880 int bitpos = INTVAL (op2);
3882 /* We need byte alignment. */
3883 if (bitsize % BITS_PER_UNIT)
3887 && memory_operand (dest, VOIDmode)
3888 && (register_operand (src, word_mode)
3889 || const_int_operand (src, VOIDmode)))
3891 /* Emit standard pattern if possible. */
3892 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
3893 if (GET_MODE_BITSIZE (mode) == bitsize)
3894 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
3896 /* (set (ze (mem)) (const_int)). */
3897 else if (const_int_operand (src, VOIDmode))
3899 int size = bitsize / BITS_PER_UNIT;
3900 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
3901 GET_MODE_SIZE (word_mode) - size);
3903 dest = adjust_address (dest, BLKmode, 0);
3904 set_mem_size (dest, GEN_INT (size));
3905 s390_expand_movmem (dest, src_mem, GEN_INT (size));
3908 /* (set (ze (mem)) (reg)). */
3909 else if (register_operand (src, word_mode))
3911 if (bitsize <= GET_MODE_BITSIZE (SImode))
3912 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
3916 /* Emit st,stcmh sequence. */
3917 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
3918 int size = stcmh_width / BITS_PER_UNIT;
3920 emit_move_insn (adjust_address (dest, SImode, size),
3921 gen_lowpart (SImode, src));
3922 set_mem_size (dest, GEN_INT (size));
3923 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
3924 (stcmh_width), const0_rtx),
3925 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
3926 (GET_MODE_BITSIZE (SImode))));
3935 /* (set (ze (reg)) (const_int)). */
3937 && register_operand (dest, word_mode)
3938 && (bitpos % 16) == 0
3939 && (bitsize % 16) == 0
3940 && const_int_operand (src, VOIDmode))
3942 HOST_WIDE_INT val = INTVAL (src);
3943 int regpos = bitpos + bitsize;
3945 while (regpos > bitpos)
3947 enum machine_mode putmode;
3950 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
3955 putsize = GET_MODE_BITSIZE (putmode);
3957 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
3960 gen_int_mode (val, putmode));
3963 gcc_assert (regpos == bitpos);
3970 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3971 We need to emit DTP-relative relocations. */
3973 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
3976 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
3981 fputs ("\t.long\t", file);
3984 fputs ("\t.quad\t", file);
3989 output_addr_const (file, x);
3990 fputs ("@DTPOFF", file);
3993 /* In the name of slightly smaller debug output, and to cater to
3994 general assembler lossage, recognize various UNSPEC sequences
3995 and turn them back into a direct symbol reference. */
3998 s390_delegitimize_address (rtx orig_x)
4002 if (GET_CODE (x) != MEM)
4006 if (GET_CODE (x) == PLUS
4007 && GET_CODE (XEXP (x, 1)) == CONST
4008 && GET_CODE (XEXP (x, 0)) == REG
4009 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4011 y = XEXP (XEXP (x, 1), 0);
4012 if (GET_CODE (y) == UNSPEC
4013 && XINT (y, 1) == UNSPEC_GOT)
4014 return XVECEXP (y, 0, 0);
4018 if (GET_CODE (x) == CONST)
4021 if (GET_CODE (y) == UNSPEC
4022 && XINT (y, 1) == UNSPEC_GOTENT)
4023 return XVECEXP (y, 0, 0);
4030 /* Output operand OP to stdio stream FILE.
4031 OP is an address (register + offset) which is not used to address data;
4032 instead the rightmost bits are interpreted as the value. */
4035 print_shift_count_operand (FILE *file, rtx op)
4037 HOST_WIDE_INT offset;
4040 /* Extract base register and offset. */
4041 if (!s390_decompose_shift_count (op, &base, &offset, 0))
4047 gcc_assert (GET_CODE (base) == REG);
4048 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4049 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4052 /* Offsets are constricted to twelve bits. */
4053 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4055 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4058 /* See 'get_some_local_dynamic_name'. */
4061 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4065 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4067 x = get_pool_constant (x);
4068 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4071 if (GET_CODE (x) == SYMBOL_REF
4072 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4074 cfun->machine->some_ld_name = XSTR (x, 0);
4081 /* Locate some local-dynamic symbol still in use by this function
4082 so that we can print its name in local-dynamic base patterns. */
4085 get_some_local_dynamic_name (void)
4089 if (cfun->machine->some_ld_name)
4090 return cfun->machine->some_ld_name;
4092 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4094 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4095 return cfun->machine->some_ld_name;
4100 /* Output machine-dependent UNSPECs occurring in address constant X
4101 in assembler syntax to stdio stream FILE. Returns true if the
4102 constant X could be recognized, false otherwise. */
4105 s390_output_addr_const_extra (FILE *file, rtx x)
4107 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4108 switch (XINT (x, 1))
4111 output_addr_const (file, XVECEXP (x, 0, 0));
4112 fprintf (file, "@GOTENT");
4115 output_addr_const (file, XVECEXP (x, 0, 0));
4116 fprintf (file, "@GOT");
4119 output_addr_const (file, XVECEXP (x, 0, 0));
4120 fprintf (file, "@GOTOFF");
4123 output_addr_const (file, XVECEXP (x, 0, 0));
4124 fprintf (file, "@PLT");
4127 output_addr_const (file, XVECEXP (x, 0, 0));
4128 fprintf (file, "@PLTOFF");
4131 output_addr_const (file, XVECEXP (x, 0, 0));
4132 fprintf (file, "@TLSGD");
4135 assemble_name (file, get_some_local_dynamic_name ());
4136 fprintf (file, "@TLSLDM");
4139 output_addr_const (file, XVECEXP (x, 0, 0));
4140 fprintf (file, "@DTPOFF");
4143 output_addr_const (file, XVECEXP (x, 0, 0));
4144 fprintf (file, "@NTPOFF");
4146 case UNSPEC_GOTNTPOFF:
4147 output_addr_const (file, XVECEXP (x, 0, 0));
4148 fprintf (file, "@GOTNTPOFF");
4150 case UNSPEC_INDNTPOFF:
4151 output_addr_const (file, XVECEXP (x, 0, 0));
4152 fprintf (file, "@INDNTPOFF");
4159 /* Output address operand ADDR in assembler syntax to
4160 stdio stream FILE. */
4163 print_operand_address (FILE *file, rtx addr)
4165 struct s390_address ad;
4167 if (!s390_decompose_address (addr, &ad)
4168 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
4169 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
4170 output_operand_lossage ("cannot decompose address");
4173 output_addr_const (file, ad.disp);
4175 fprintf (file, "0");
4177 if (ad.base && ad.indx)
4178 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
4179 reg_names[REGNO (ad.base)]);
4181 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4184 /* Output operand X in assembler syntax to stdio stream FILE.
4185 CODE specified the format flag. The following format flags
4188 'C': print opcode suffix for branch condition.
4189 'D': print opcode suffix for inverse branch condition.
4190 'J': print tls_load/tls_gdcall/tls_ldcall suffix
4191 'G': print the size of the operand in bytes.
4192 'O': print only the displacement of a memory reference.
4193 'R': print only the base register of a memory reference.
4194 'S': print S-type memory reference (base+displacement).
4195 'N': print the second word of a DImode operand.
4196 'M': print the second word of a TImode operand.
4197 'Y': print shift count operand.
4199 'b': print integer X as if it's an unsigned byte.
4200 'x': print integer X as if it's an unsigned halfword.
4201 'h': print integer X as if it's a signed halfword.
4202 'i': print the first nonzero HImode part of X.
4203 'j': print the first HImode part unequal to -1 of X.
4204 'k': print the first nonzero SImode part of X.
4205 'm': print the first SImode part unequal to -1 of X.
4206 'o': print integer X as if it's an unsigned 32bit word. */
4209 print_operand (FILE *file, rtx x, int code)
4214 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
4218 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
4222 if (GET_CODE (x) == SYMBOL_REF)
4224 fprintf (file, "%s", ":tls_load:");
4225 output_addr_const (file, x);
4227 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
4229 fprintf (file, "%s", ":tls_gdcall:");
4230 output_addr_const (file, XVECEXP (x, 0, 0));
4232 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
4234 fprintf (file, "%s", ":tls_ldcall:");
4235 assemble_name (file, get_some_local_dynamic_name ());
4242 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
4247 struct s390_address ad;
4250 gcc_assert (GET_CODE (x) == MEM);
4251 ret = s390_decompose_address (XEXP (x, 0), &ad);
4253 gcc_assert (!ad.base || REG_OK_FOR_BASE_STRICT_P (ad.base));
4254 gcc_assert (!ad.indx);
4257 output_addr_const (file, ad.disp);
4259 fprintf (file, "0");
4265 struct s390_address ad;
4268 gcc_assert (GET_CODE (x) == MEM);
4269 ret = s390_decompose_address (XEXP (x, 0), &ad);
4271 gcc_assert (!ad.base || REG_OK_FOR_BASE_STRICT_P (ad.base));
4272 gcc_assert (!ad.indx);
4275 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
4277 fprintf (file, "0");
4283 struct s390_address ad;
4286 gcc_assert (GET_CODE (x) == MEM);
4287 ret = s390_decompose_address (XEXP (x, 0), &ad);
4289 gcc_assert (!ad.base || REG_OK_FOR_BASE_STRICT_P (ad.base));
4290 gcc_assert (!ad.indx);
4293 output_addr_const (file, ad.disp);
4295 fprintf (file, "0");
4298 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
4303 if (GET_CODE (x) == REG)
4304 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4305 else if (GET_CODE (x) == MEM)
4306 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
4312 if (GET_CODE (x) == REG)
4313 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
4314 else if (GET_CODE (x) == MEM)
4315 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
4321 print_shift_count_operand (file, x);
4325 switch (GET_CODE (x))
4328 fprintf (file, "%s", reg_names[REGNO (x)]);
4332 output_address (XEXP (x, 0));
4339 output_addr_const (file, x);
4344 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
4345 else if (code == 'x')
4346 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
4347 else if (code == 'h')
4348 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
4349 else if (code == 'i')
4350 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4351 s390_extract_part (x, HImode, 0));
4352 else if (code == 'j')
4353 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4354 s390_extract_part (x, HImode, -1));
4355 else if (code == 'k')
4356 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4357 s390_extract_part (x, SImode, 0));
4358 else if (code == 'm')
4359 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
4360 s390_extract_part (x, SImode, -1));
4361 else if (code == 'o')
4362 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
4364 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
4368 gcc_assert (GET_MODE (x) == VOIDmode);
4370 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
4371 else if (code == 'x')
4372 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
4373 else if (code == 'h')
4374 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
4380 fatal_insn ("UNKNOWN in print_operand !?", x);
4385 /* Target hook for assembling integer objects. We need to define it
4386 here to work a round a bug in some versions of GAS, which couldn't
4387 handle values smaller than INT_MIN when printed in decimal. */
4390 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
4392 if (size == 8 && aligned_p
4393 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
4395 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
4399 return default_assemble_integer (x, size, aligned_p);
4402 /* Returns true if register REGNO is used for forming
4403 a memory address in expression X. */
4406 reg_used_in_mem_p (int regno, rtx x)
4408 enum rtx_code code = GET_CODE (x);
4414 if (refers_to_regno_p (regno, regno+1,
4418 else if (code == SET
4419 && GET_CODE (SET_DEST (x)) == PC)
4421 if (refers_to_regno_p (regno, regno+1,
4426 fmt = GET_RTX_FORMAT (code);
4427 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4430 && reg_used_in_mem_p (regno, XEXP (x, i)))
4433 else if (fmt[i] == 'E')
4434 for (j = 0; j < XVECLEN (x, i); j++)
4435 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
4441 /* Returns true if expression DEP_RTX sets an address register
4442 used by instruction INSN to address memory. */
4445 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
4449 if (GET_CODE (dep_rtx) == INSN)
4450 dep_rtx = PATTERN (dep_rtx);
4452 if (GET_CODE (dep_rtx) == SET)
4454 target = SET_DEST (dep_rtx);
4455 if (GET_CODE (target) == STRICT_LOW_PART)
4456 target = XEXP (target, 0);
4457 while (GET_CODE (target) == SUBREG)
4458 target = SUBREG_REG (target);
4460 if (GET_CODE (target) == REG)
4462 int regno = REGNO (target);
4464 if (s390_safe_attr_type (insn) == TYPE_LA)
4466 pat = PATTERN (insn);
4467 if (GET_CODE (pat) == PARALLEL)
4469 gcc_assert (XVECLEN (pat, 0) == 2);
4470 pat = XVECEXP (pat, 0, 0);
4472 gcc_assert (GET_CODE (pat) == SET);
4473 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
4475 else if (get_attr_atype (insn) == ATYPE_AGEN)
4476 return reg_used_in_mem_p (regno, PATTERN (insn));
4482 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
4485 s390_agen_dep_p (rtx dep_insn, rtx insn)
4487 rtx dep_rtx = PATTERN (dep_insn);
4490 if (GET_CODE (dep_rtx) == SET
4491 && addr_generation_dependency_p (dep_rtx, insn))
4493 else if (GET_CODE (dep_rtx) == PARALLEL)
4495 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
4497 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
4504 /* A C statement (sans semicolon) to update the integer scheduling priority
4505 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
4506 reduce the priority to execute INSN later. Do not define this macro if
4507 you do not need to adjust the scheduling priorities of insns.
4509 A STD instruction should be scheduled earlier,
4510 in order to use the bypass. */
4513 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
4515 if (! INSN_P (insn))
4518 if (s390_tune != PROCESSOR_2084_Z990
4519 && s390_tune != PROCESSOR_2094_Z9_109)
4522 switch (s390_safe_attr_type (insn))
4526 priority = priority << 3;
4530 priority = priority << 1;
4538 /* The number of instructions that can be issued per cycle. */
4541 s390_issue_rate (void)
4543 if (s390_tune == PROCESSOR_2084_Z990
4544 || s390_tune == PROCESSOR_2094_Z9_109)
4550 s390_first_cycle_multipass_dfa_lookahead (void)
4556 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
4557 Fix up MEMs as required. */
4560 annotate_constant_pool_refs (rtx *x)
4565 gcc_assert (GET_CODE (*x) != SYMBOL_REF
4566 || !CONSTANT_POOL_ADDRESS_P (*x));
4568 /* Literal pool references can only occur inside a MEM ... */
4569 if (GET_CODE (*x) == MEM)
4571 rtx memref = XEXP (*x, 0);
4573 if (GET_CODE (memref) == SYMBOL_REF
4574 && CONSTANT_POOL_ADDRESS_P (memref))
4576 rtx base = cfun->machine->base_reg;
4577 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
4580 *x = replace_equiv_address (*x, addr);
4584 if (GET_CODE (memref) == CONST
4585 && GET_CODE (XEXP (memref, 0)) == PLUS
4586 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
4587 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
4588 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
4590 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
4591 rtx sym = XEXP (XEXP (memref, 0), 0);
4592 rtx base = cfun->machine->base_reg;
4593 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4596 *x = replace_equiv_address (*x, plus_constant (addr, off));
4601 /* ... or a load-address type pattern. */
4602 if (GET_CODE (*x) == SET)
4604 rtx addrref = SET_SRC (*x);
4606 if (GET_CODE (addrref) == SYMBOL_REF
4607 && CONSTANT_POOL_ADDRESS_P (addrref))
4609 rtx base = cfun->machine->base_reg;
4610 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
4613 SET_SRC (*x) = addr;
4617 if (GET_CODE (addrref) == CONST
4618 && GET_CODE (XEXP (addrref, 0)) == PLUS
4619 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
4620 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
4621 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
4623 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
4624 rtx sym = XEXP (XEXP (addrref, 0), 0);
4625 rtx base = cfun->machine->base_reg;
4626 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
4629 SET_SRC (*x) = plus_constant (addr, off);
4634 /* Annotate LTREL_BASE as well. */
4635 if (GET_CODE (*x) == UNSPEC
4636 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4638 rtx base = cfun->machine->base_reg;
4639 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
4644 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4645 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4649 annotate_constant_pool_refs (&XEXP (*x, i));
4651 else if (fmt[i] == 'E')
4653 for (j = 0; j < XVECLEN (*x, i); j++)
4654 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
4659 /* Split all branches that exceed the maximum distance.
4660 Returns true if this created a new literal pool entry. */
4663 s390_split_branches (void)
4665 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4666 int new_literal = 0, ret;
4667 rtx insn, pat, tmp, target;
4670 /* We need correct insn addresses. */
4672 shorten_branches (get_insns ());
4674 /* Find all branches that exceed 64KB, and split them. */
4676 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4678 if (GET_CODE (insn) != JUMP_INSN)
4681 pat = PATTERN (insn);
4682 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4683 pat = XVECEXP (pat, 0, 0);
4684 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
4687 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
4689 label = &SET_SRC (pat);
4691 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
4693 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
4694 label = &XEXP (SET_SRC (pat), 1);
4695 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
4696 label = &XEXP (SET_SRC (pat), 2);
4703 if (get_attr_length (insn) <= 4)
4706 /* We are going to use the return register as scratch register,
4707 make sure it will be saved/restored by the prologue/epilogue. */
4708 cfun_frame_layout.save_return_addr_p = 1;
4713 tmp = force_const_mem (Pmode, *label);
4714 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
4715 INSN_ADDRESSES_NEW (tmp, -1);
4716 annotate_constant_pool_refs (&PATTERN (tmp));
4723 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
4724 UNSPEC_LTREL_OFFSET);
4725 target = gen_rtx_CONST (Pmode, target);
4726 target = force_const_mem (Pmode, target);
4727 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
4728 INSN_ADDRESSES_NEW (tmp, -1);
4729 annotate_constant_pool_refs (&PATTERN (tmp));
4731 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
4732 cfun->machine->base_reg),
4734 target = gen_rtx_PLUS (Pmode, temp_reg, target);
4737 ret = validate_change (insn, label, target, 0);
4745 /* Find an annotated literal pool symbol referenced in RTX X,
4746 and store it at REF. Will abort if X contains references to
4747 more than one such pool symbol; multiple references to the same
4748 symbol are allowed, however.
4750 The rtx pointed to by REF must be initialized to NULL_RTX
4751 by the caller before calling this routine. */
4754 find_constant_pool_ref (rtx x, rtx *ref)
4759 /* Ignore LTREL_BASE references. */
4760 if (GET_CODE (x) == UNSPEC
4761 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4763 /* Likewise POOL_ENTRY insns. */
4764 if (GET_CODE (x) == UNSPEC_VOLATILE
4765 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
4768 gcc_assert (GET_CODE (x) != SYMBOL_REF
4769 || !CONSTANT_POOL_ADDRESS_P (x));
4771 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
4773 rtx sym = XVECEXP (x, 0, 0);
4774 gcc_assert (GET_CODE (sym) == SYMBOL_REF
4775 && CONSTANT_POOL_ADDRESS_P (sym));
4777 if (*ref == NULL_RTX)
4780 gcc_assert (*ref == sym);
4785 fmt = GET_RTX_FORMAT (GET_CODE (x));
4786 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4790 find_constant_pool_ref (XEXP (x, i), ref);
4792 else if (fmt[i] == 'E')
4794 for (j = 0; j < XVECLEN (x, i); j++)
4795 find_constant_pool_ref (XVECEXP (x, i, j), ref);
4800 /* Replace every reference to the annotated literal pool
4801 symbol REF in X by its base plus OFFSET. */
4804 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
4809 gcc_assert (*x != ref);
4811 if (GET_CODE (*x) == UNSPEC
4812 && XINT (*x, 1) == UNSPEC_LTREF
4813 && XVECEXP (*x, 0, 0) == ref)
4815 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
4819 if (GET_CODE (*x) == PLUS
4820 && GET_CODE (XEXP (*x, 1)) == CONST_INT
4821 && GET_CODE (XEXP (*x, 0)) == UNSPEC
4822 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
4823 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
4825 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
4826 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
4830 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4831 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4835 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
4837 else if (fmt[i] == 'E')
4839 for (j = 0; j < XVECLEN (*x, i); j++)
4840 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
4845 /* Check whether X contains an UNSPEC_LTREL_BASE.
4846 Return its constant pool symbol if found, NULL_RTX otherwise. */
4849 find_ltrel_base (rtx x)
4854 if (GET_CODE (x) == UNSPEC
4855 && XINT (x, 1) == UNSPEC_LTREL_BASE)
4856 return XVECEXP (x, 0, 0);
4858 fmt = GET_RTX_FORMAT (GET_CODE (x));
4859 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4863 rtx fnd = find_ltrel_base (XEXP (x, i));
4867 else if (fmt[i] == 'E')
4869 for (j = 0; j < XVECLEN (x, i); j++)
4871 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
4881 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
4884 replace_ltrel_base (rtx *x)
4889 if (GET_CODE (*x) == UNSPEC
4890 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
4892 *x = XVECEXP (*x, 0, 1);
4896 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4897 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4901 replace_ltrel_base (&XEXP (*x, i));
4903 else if (fmt[i] == 'E')
4905 for (j = 0; j < XVECLEN (*x, i); j++)
4906 replace_ltrel_base (&XVECEXP (*x, i, j));
4912 /* We keep a list of constants which we have to add to internal
4913 constant tables in the middle of large functions. */
4915 #define NR_C_MODES 7
4916 enum machine_mode constant_modes[NR_C_MODES] =
4927 struct constant *next;
4932 struct constant_pool
4934 struct constant_pool *next;
4939 struct constant *constants[NR_C_MODES];
4940 struct constant *execute;
4945 /* Allocate new constant_pool structure. */
4947 static struct constant_pool *
4948 s390_alloc_pool (void)
4950 struct constant_pool *pool;
4953 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4955 for (i = 0; i < NR_C_MODES; i++)
4956 pool->constants[i] = NULL;
4958 pool->execute = NULL;
4959 pool->label = gen_label_rtx ();
4960 pool->first_insn = NULL_RTX;
4961 pool->pool_insn = NULL_RTX;
4962 pool->insns = BITMAP_ALLOC (NULL);
4968 /* Create new constant pool covering instructions starting at INSN
4969 and chain it to the end of POOL_LIST. */
4971 static struct constant_pool *
4972 s390_start_pool (struct constant_pool **pool_list, rtx insn)
4974 struct constant_pool *pool, **prev;
4976 pool = s390_alloc_pool ();
4977 pool->first_insn = insn;
4979 for (prev = pool_list; *prev; prev = &(*prev)->next)
4986 /* End range of instructions covered by POOL at INSN and emit
4987 placeholder insn representing the pool. */
4990 s390_end_pool (struct constant_pool *pool, rtx insn)
4992 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4995 insn = get_last_insn ();
4997 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4998 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5001 /* Add INSN to the list of insns covered by POOL. */
5004 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5006 bitmap_set_bit (pool->insns, INSN_UID (insn));
5009 /* Return pool out of POOL_LIST that covers INSN. */
5011 static struct constant_pool *
5012 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5014 struct constant_pool *pool;
5016 for (pool = pool_list; pool; pool = pool->next)
5017 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5023 /* Add constant VAL of mode MODE to the constant pool POOL. */
5026 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5031 for (i = 0; i < NR_C_MODES; i++)
5032 if (constant_modes[i] == mode)
5034 gcc_assert (i != NR_C_MODES);
5036 for (c = pool->constants[i]; c != NULL; c = c->next)
5037 if (rtx_equal_p (val, c->value))
5042 c = (struct constant *) xmalloc (sizeof *c);
5044 c->label = gen_label_rtx ();
5045 c->next = pool->constants[i];
5046 pool->constants[i] = c;
5047 pool->size += GET_MODE_SIZE (mode);
5051 /* Find constant VAL of mode MODE in the constant pool POOL.
5052 Return an RTX describing the distance from the start of
5053 the pool to the location of the new constant. */
5056 s390_find_constant (struct constant_pool *pool, rtx val,
5057 enum machine_mode mode)
5063 for (i = 0; i < NR_C_MODES; i++)
5064 if (constant_modes[i] == mode)
5066 gcc_assert (i != NR_C_MODES);
5068 for (c = pool->constants[i]; c != NULL; c = c->next)
5069 if (rtx_equal_p (val, c->value))
5074 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5075 gen_rtx_LABEL_REF (Pmode, pool->label));
5076 offset = gen_rtx_CONST (Pmode, offset);
5080 /* Check whether INSN is an execute. Return the label_ref to its
5081 execute target template if so, NULL_RTX otherwise. */
5084 s390_execute_label (rtx insn)
5086 if (GET_CODE (insn) == INSN
5087 && GET_CODE (PATTERN (insn)) == PARALLEL
5088 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5089 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5090 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5095 /* Add execute target for INSN to the constant pool POOL. */
5098 s390_add_execute (struct constant_pool *pool, rtx insn)
5102 for (c = pool->execute; c != NULL; c = c->next)
5103 if (INSN_UID (insn) == INSN_UID (c->value))
5108 c = (struct constant *) xmalloc (sizeof *c);
5110 c->label = gen_label_rtx ();
5111 c->next = pool->execute;
5117 /* Find execute target for INSN in the constant pool POOL.
5118 Return an RTX describing the distance from the start of
5119 the pool to the location of the execute target. */
5122 s390_find_execute (struct constant_pool *pool, rtx insn)
5127 for (c = pool->execute; c != NULL; c = c->next)
5128 if (INSN_UID (insn) == INSN_UID (c->value))
5133 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
5134 gen_rtx_LABEL_REF (Pmode, pool->label));
5135 offset = gen_rtx_CONST (Pmode, offset);
5139 /* For an execute INSN, extract the execute target template. */
5142 s390_execute_target (rtx insn)
5144 rtx pattern = PATTERN (insn);
5145 gcc_assert (s390_execute_label (insn));
5147 if (XVECLEN (pattern, 0) == 2)
5149 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
5153 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
5156 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
5157 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
5159 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
5165 /* Indicate that INSN cannot be duplicated. This is the case for
5166 execute insns that carry a unique label. */
5169 s390_cannot_copy_insn_p (rtx insn)
5171 rtx label = s390_execute_label (insn);
5172 return label && label != const0_rtx;
5175 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
5176 do not emit the pool base label. */
5179 s390_dump_pool (struct constant_pool *pool, bool remote_label)
5182 rtx insn = pool->pool_insn;
5185 /* Switch to rodata section. */
5186 if (TARGET_CPU_ZARCH)
5188 insn = emit_insn_after (gen_pool_section_start (), insn);
5189 INSN_ADDRESSES_NEW (insn, -1);
5192 /* Ensure minimum pool alignment. */
5193 if (TARGET_CPU_ZARCH)
5194 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
5196 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
5197 INSN_ADDRESSES_NEW (insn, -1);
5199 /* Emit pool base label. */
5202 insn = emit_label_after (pool->label, insn);
5203 INSN_ADDRESSES_NEW (insn, -1);
5206 /* Dump constants in descending alignment requirement order,
5207 ensuring proper alignment for every constant. */
5208 for (i = 0; i < NR_C_MODES; i++)
5209 for (c = pool->constants[i]; c; c = c->next)
5211 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
5212 rtx value = c->value;
5213 if (GET_CODE (value) == CONST
5214 && GET_CODE (XEXP (value, 0)) == UNSPEC
5215 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
5216 && XVECLEN (XEXP (value, 0), 0) == 1)
5218 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
5219 gen_rtx_LABEL_REF (VOIDmode, pool->label));
5220 value = gen_rtx_CONST (VOIDmode, value);
5223 insn = emit_label_after (c->label, insn);
5224 INSN_ADDRESSES_NEW (insn, -1);
5226 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
5227 gen_rtvec (1, value),
5228 UNSPECV_POOL_ENTRY);
5229 insn = emit_insn_after (value, insn);
5230 INSN_ADDRESSES_NEW (insn, -1);
5233 /* Ensure minimum alignment for instructions. */
5234 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
5235 INSN_ADDRESSES_NEW (insn, -1);
5237 /* Output in-pool execute template insns. */
5238 for (c = pool->execute; c; c = c->next)
5240 insn = emit_label_after (c->label, insn);
5241 INSN_ADDRESSES_NEW (insn, -1);
5243 insn = emit_insn_after (s390_execute_target (c->value), insn);
5244 INSN_ADDRESSES_NEW (insn, -1);
5247 /* Switch back to previous section. */
5248 if (TARGET_CPU_ZARCH)
5250 insn = emit_insn_after (gen_pool_section_end (), insn);
5251 INSN_ADDRESSES_NEW (insn, -1);
5254 insn = emit_barrier_after (insn);
5255 INSN_ADDRESSES_NEW (insn, -1);
5257 /* Remove placeholder insn. */
5258 remove_insn (pool->pool_insn);
5261 /* Free all memory used by POOL. */
5264 s390_free_pool (struct constant_pool *pool)
5266 struct constant *c, *next;
5269 for (i = 0; i < NR_C_MODES; i++)
5270 for (c = pool->constants[i]; c; c = next)
5276 for (c = pool->execute; c; c = next)
5282 BITMAP_FREE (pool->insns);
5287 /* Collect main literal pool. Return NULL on overflow. */
5289 static struct constant_pool *
5290 s390_mainpool_start (void)
5292 struct constant_pool *pool;
5295 pool = s390_alloc_pool ();
5297 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5299 if (GET_CODE (insn) == INSN
5300 && GET_CODE (PATTERN (insn)) == SET
5301 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
5302 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
5304 gcc_assert (!pool->pool_insn);
5305 pool->pool_insn = insn;
5308 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5310 s390_add_execute (pool, insn);
5312 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5314 rtx pool_ref = NULL_RTX;
5315 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5318 rtx constant = get_pool_constant (pool_ref);
5319 enum machine_mode mode = get_pool_mode (pool_ref);
5320 s390_add_constant (pool, constant, mode);
5325 gcc_assert (pool->pool_insn || pool->size == 0);
5327 if (pool->size >= 4096)
5329 /* We're going to chunkify the pool, so remove the main
5330 pool placeholder insn. */
5331 remove_insn (pool->pool_insn);
5333 s390_free_pool (pool);
5340 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5341 Modify the current function to output the pool constants as well as
5342 the pool register setup instruction. */
5345 s390_mainpool_finish (struct constant_pool *pool)
5347 rtx base_reg = cfun->machine->base_reg;
5350 /* If the pool is empty, we're done. */
5351 if (pool->size == 0)
5353 /* We don't actually need a base register after all. */
5354 cfun->machine->base_reg = NULL_RTX;
5356 if (pool->pool_insn)
5357 remove_insn (pool->pool_insn);
5358 s390_free_pool (pool);
5362 /* We need correct insn addresses. */
5363 shorten_branches (get_insns ());
5365 /* On zSeries, we use a LARL to load the pool register. The pool is
5366 located in the .rodata section, so we emit it after the function. */
5367 if (TARGET_CPU_ZARCH)
5369 insn = gen_main_base_64 (base_reg, pool->label);
5370 insn = emit_insn_after (insn, pool->pool_insn);
5371 INSN_ADDRESSES_NEW (insn, -1);
5372 remove_insn (pool->pool_insn);
5374 insn = get_last_insn ();
5375 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5376 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5378 s390_dump_pool (pool, 0);
5381 /* On S/390, if the total size of the function's code plus literal pool
5382 does not exceed 4096 bytes, we use BASR to set up a function base
5383 pointer, and emit the literal pool at the end of the function. */
5384 else if (INSN_ADDRESSES (INSN_UID (get_last_insn ()))
5385 + pool->size + 8 /* alignment slop */ < 4096)
5387 insn = gen_main_base_31_small (base_reg, pool->label);
5388 insn = emit_insn_after (insn, pool->pool_insn);
5389 INSN_ADDRESSES_NEW (insn, -1);
5390 remove_insn (pool->pool_insn);
5392 insn = emit_label_after (pool->label, insn);
5393 INSN_ADDRESSES_NEW (insn, -1);
5395 insn = get_last_insn ();
5396 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5397 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5399 s390_dump_pool (pool, 1);
5402 /* Otherwise, we emit an inline literal pool and use BASR to branch
5403 over it, setting up the pool register at the same time. */
5406 rtx pool_end = gen_label_rtx ();
5408 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
5409 insn = emit_insn_after (insn, pool->pool_insn);
5410 INSN_ADDRESSES_NEW (insn, -1);
5411 remove_insn (pool->pool_insn);
5413 insn = emit_label_after (pool->label, insn);
5414 INSN_ADDRESSES_NEW (insn, -1);
5416 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
5417 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5419 insn = emit_label_after (pool_end, pool->pool_insn);
5420 INSN_ADDRESSES_NEW (insn, -1);
5422 s390_dump_pool (pool, 1);
5426 /* Replace all literal pool references. */
5428 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5431 replace_ltrel_base (&PATTERN (insn));
5433 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5435 rtx addr, pool_ref = NULL_RTX;
5436 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5439 if (s390_execute_label (insn))
5440 addr = s390_find_execute (pool, insn);
5442 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
5443 get_pool_mode (pool_ref));
5445 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5446 INSN_CODE (insn) = -1;
5452 /* Free the pool. */
5453 s390_free_pool (pool);
5456 /* POOL holds the main literal pool as collected by s390_mainpool_start.
5457 We have decided we cannot use this pool, so revert all changes
5458 to the current function that were done by s390_mainpool_start. */
5460 s390_mainpool_cancel (struct constant_pool *pool)
5462 /* We didn't actually change the instruction stream, so simply
5463 free the pool memory. */
5464 s390_free_pool (pool);
5468 /* Chunkify the literal pool. */
5470 #define S390_POOL_CHUNK_MIN 0xc00
5471 #define S390_POOL_CHUNK_MAX 0xe00
5473 static struct constant_pool *
5474 s390_chunkify_start (void)
5476 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
5479 rtx pending_ltrel = NULL_RTX;
5482 rtx (*gen_reload_base) (rtx, rtx) =
5483 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
5486 /* We need correct insn addresses. */
5488 shorten_branches (get_insns ());
5490 /* Scan all insns and move literals to pool chunks. */
5492 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5494 /* Check for pending LTREL_BASE. */
5497 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
5500 gcc_assert (ltrel_base == pending_ltrel);
5501 pending_ltrel = NULL_RTX;
5505 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
5508 curr_pool = s390_start_pool (&pool_list, insn);
5510 s390_add_execute (curr_pool, insn);
5511 s390_add_pool_insn (curr_pool, insn);
5513 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5515 rtx pool_ref = NULL_RTX;
5516 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5519 rtx constant = get_pool_constant (pool_ref);
5520 enum machine_mode mode = get_pool_mode (pool_ref);
5523 curr_pool = s390_start_pool (&pool_list, insn);
5525 s390_add_constant (curr_pool, constant, mode);
5526 s390_add_pool_insn (curr_pool, insn);
5528 /* Don't split the pool chunk between a LTREL_OFFSET load
5529 and the corresponding LTREL_BASE. */
5530 if (GET_CODE (constant) == CONST
5531 && GET_CODE (XEXP (constant, 0)) == UNSPEC
5532 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
5534 gcc_assert (!pending_ltrel);
5535 pending_ltrel = pool_ref;
5540 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
5543 s390_add_pool_insn (curr_pool, insn);
5544 /* An LTREL_BASE must follow within the same basic block. */
5545 gcc_assert (!pending_ltrel);
5549 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
5550 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
5553 if (TARGET_CPU_ZARCH)
5555 if (curr_pool->size < S390_POOL_CHUNK_MAX)
5558 s390_end_pool (curr_pool, NULL_RTX);
5563 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
5564 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
5567 /* We will later have to insert base register reload insns.
5568 Those will have an effect on code size, which we need to
5569 consider here. This calculation makes rather pessimistic
5570 worst-case assumptions. */
5571 if (GET_CODE (insn) == CODE_LABEL)
5574 if (chunk_size < S390_POOL_CHUNK_MIN
5575 && curr_pool->size < S390_POOL_CHUNK_MIN)
5578 /* Pool chunks can only be inserted after BARRIERs ... */
5579 if (GET_CODE (insn) == BARRIER)
5581 s390_end_pool (curr_pool, insn);
5586 /* ... so if we don't find one in time, create one. */
5587 else if ((chunk_size > S390_POOL_CHUNK_MAX
5588 || curr_pool->size > S390_POOL_CHUNK_MAX))
5590 rtx label, jump, barrier;
5592 /* We can insert the barrier only after a 'real' insn. */
5593 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
5595 if (get_attr_length (insn) == 0)
5598 /* Don't separate LTREL_BASE from the corresponding
5599 LTREL_OFFSET load. */
5603 label = gen_label_rtx ();
5604 jump = emit_jump_insn_after (gen_jump (label), insn);
5605 barrier = emit_barrier_after (jump);
5606 insn = emit_label_after (label, barrier);
5607 JUMP_LABEL (jump) = label;
5608 LABEL_NUSES (label) = 1;
5610 INSN_ADDRESSES_NEW (jump, -1);
5611 INSN_ADDRESSES_NEW (barrier, -1);
5612 INSN_ADDRESSES_NEW (insn, -1);
5614 s390_end_pool (curr_pool, barrier);
5622 s390_end_pool (curr_pool, NULL_RTX);
5623 gcc_assert (!pending_ltrel);
5625 /* Find all labels that are branched into
5626 from an insn belonging to a different chunk. */
5628 far_labels = BITMAP_ALLOC (NULL);
5630 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5632 /* Labels marked with LABEL_PRESERVE_P can be target
5633 of non-local jumps, so we have to mark them.
5634 The same holds for named labels.
5636 Don't do that, however, if it is the label before
5639 if (GET_CODE (insn) == CODE_LABEL
5640 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
5642 rtx vec_insn = next_real_insn (insn);
5643 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5644 PATTERN (vec_insn) : NULL_RTX;
5646 || !(GET_CODE (vec_pat) == ADDR_VEC
5647 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5648 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
5651 /* If we have a direct jump (conditional or unconditional)
5652 or a casesi jump, check all potential targets. */
5653 else if (GET_CODE (insn) == JUMP_INSN)
5655 rtx pat = PATTERN (insn);
5656 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5657 pat = XVECEXP (pat, 0, 0);
5659 if (GET_CODE (pat) == SET)
5661 rtx label = JUMP_LABEL (insn);
5664 if (s390_find_pool (pool_list, label)
5665 != s390_find_pool (pool_list, insn))
5666 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5669 else if (GET_CODE (pat) == PARALLEL
5670 && XVECLEN (pat, 0) == 2
5671 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5672 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
5673 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
5675 /* Find the jump table used by this casesi jump. */
5676 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
5677 rtx vec_insn = next_real_insn (vec_label);
5678 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
5679 PATTERN (vec_insn) : NULL_RTX;
5681 && (GET_CODE (vec_pat) == ADDR_VEC
5682 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
5684 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
5686 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
5688 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
5690 if (s390_find_pool (pool_list, label)
5691 != s390_find_pool (pool_list, insn))
5692 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
5699 /* Insert base register reload insns before every pool. */
5701 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5703 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5705 rtx insn = curr_pool->first_insn;
5706 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
5709 /* Insert base register reload insns at every far label. */
5711 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5712 if (GET_CODE (insn) == CODE_LABEL
5713 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
5715 struct constant_pool *pool = s390_find_pool (pool_list, insn);
5718 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
5720 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
5725 BITMAP_FREE (far_labels);
5728 /* Recompute insn addresses. */
5730 init_insn_lengths ();
5731 shorten_branches (get_insns ());
5736 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5737 After we have decided to use this list, finish implementing
5738 all changes to the current function as required. */
5741 s390_chunkify_finish (struct constant_pool *pool_list)
5743 struct constant_pool *curr_pool = NULL;
5747 /* Replace all literal pool references. */
5749 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5752 replace_ltrel_base (&PATTERN (insn));
5754 curr_pool = s390_find_pool (pool_list, insn);
5758 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
5760 rtx addr, pool_ref = NULL_RTX;
5761 find_constant_pool_ref (PATTERN (insn), &pool_ref);
5764 if (s390_execute_label (insn))
5765 addr = s390_find_execute (curr_pool, insn);
5767 addr = s390_find_constant (curr_pool,
5768 get_pool_constant (pool_ref),
5769 get_pool_mode (pool_ref));
5771 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
5772 INSN_CODE (insn) = -1;
5777 /* Dump out all literal pools. */
5779 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5780 s390_dump_pool (curr_pool, 0);
5782 /* Free pool list. */
5786 struct constant_pool *next = pool_list->next;
5787 s390_free_pool (pool_list);
5792 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
5793 We have decided we cannot use this list, so revert all changes
5794 to the current function that were done by s390_chunkify_start. */
5797 s390_chunkify_cancel (struct constant_pool *pool_list)
5799 struct constant_pool *curr_pool = NULL;
5802 /* Remove all pool placeholder insns. */
5804 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
5806 /* Did we insert an extra barrier? Remove it. */
5807 rtx barrier = PREV_INSN (curr_pool->pool_insn);
5808 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
5809 rtx label = NEXT_INSN (curr_pool->pool_insn);
5811 if (jump && GET_CODE (jump) == JUMP_INSN
5812 && barrier && GET_CODE (barrier) == BARRIER
5813 && label && GET_CODE (label) == CODE_LABEL
5814 && GET_CODE (PATTERN (jump)) == SET
5815 && SET_DEST (PATTERN (jump)) == pc_rtx
5816 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
5817 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
5820 remove_insn (barrier);
5821 remove_insn (label);
5824 remove_insn (curr_pool->pool_insn);
5827 /* Remove all base register reload insns. */
5829 for (insn = get_insns (); insn; )
5831 rtx next_insn = NEXT_INSN (insn);
5833 if (GET_CODE (insn) == INSN
5834 && GET_CODE (PATTERN (insn)) == SET
5835 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
5836 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
5842 /* Free pool list. */
5846 struct constant_pool *next = pool_list->next;
5847 s390_free_pool (pool_list);
5853 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
5856 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
5860 switch (GET_MODE_CLASS (mode))
5863 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
5865 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
5866 assemble_real (r, mode, align);
5870 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
5879 /* Return an RTL expression representing the value of the return address
5880 for the frame COUNT steps up from the current frame. FRAME is the
5881 frame pointer of that frame. */
5884 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
5889 /* Without backchain, we fail for all but the current frame. */
5891 if (!TARGET_BACKCHAIN && count > 0)
5894 /* For the current frame, we need to make sure the initial
5895 value of RETURN_REGNUM is actually saved. */
5899 /* On non-z architectures branch splitting could overwrite r14. */
5900 if (TARGET_CPU_ZARCH)
5901 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
5904 cfun_frame_layout.save_return_addr_p = true;
5905 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
5909 if (TARGET_PACKED_STACK)
5910 offset = -2 * UNITS_PER_WORD;
5912 offset = RETURN_REGNUM * UNITS_PER_WORD;
5914 addr = plus_constant (frame, offset);
5915 addr = memory_address (Pmode, addr);
5916 return gen_rtx_MEM (Pmode, addr);
5919 /* Return an RTL expression representing the back chain stored in
5920 the current stack frame. */
5923 s390_back_chain_rtx (void)
5927 gcc_assert (TARGET_BACKCHAIN);
5929 if (TARGET_PACKED_STACK)
5930 chain = plus_constant (stack_pointer_rtx,
5931 STACK_POINTER_OFFSET - UNITS_PER_WORD);
5933 chain = stack_pointer_rtx;
5935 chain = gen_rtx_MEM (Pmode, chain);
5939 /* Find first call clobbered register unused in a function.
5940 This could be used as base register in a leaf function
5941 or for holding the return address before epilogue. */
5944 find_unused_clobbered_reg (void)
5947 for (i = 0; i < 6; i++)
5948 if (!regs_ever_live[i])
5954 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
5955 clobbered hard regs in SETREG. */
5958 s390_reg_clobbered_rtx (rtx setreg, rtx set_insn ATTRIBUTE_UNUSED, void *data)
5960 int *regs_ever_clobbered = (int *)data;
5961 unsigned int i, regno;
5962 enum machine_mode mode = GET_MODE (setreg);
5964 if (GET_CODE (setreg) == SUBREG)
5966 rtx inner = SUBREG_REG (setreg);
5967 if (!GENERAL_REG_P (inner))
5969 regno = subreg_regno (setreg);
5971 else if (GENERAL_REG_P (setreg))
5972 regno = REGNO (setreg);
5977 i < regno + HARD_REGNO_NREGS (regno, mode);
5979 regs_ever_clobbered[i] = 1;
5982 /* Walks through all basic blocks of the current function looking
5983 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
5984 of the passed integer array REGS_EVER_CLOBBERED are set to one for
5985 each of those regs. */
5988 s390_regs_ever_clobbered (int *regs_ever_clobbered)
5994 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
5996 /* For non-leaf functions we have to consider all call clobbered regs to be
5998 if (!current_function_is_leaf)
6000 for (i = 0; i < 16; i++)
6001 regs_ever_clobbered[i] = call_really_used_regs[i];
6004 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6005 this work is done by liveness analysis (mark_regs_live_at_end).
6006 Special care is needed for functions containing landing pads. Landing pads
6007 may use the eh registers, but the code which sets these registers is not
6008 contained in that function. Hence s390_regs_ever_clobbered is not able to
6009 deal with this automatically. */
6010 if (current_function_calls_eh_return || cfun->machine->has_landing_pad_p)
6011 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6012 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6014 /* For nonlocal gotos all call-saved registers have to be saved.
6015 This flag is also set for the unwinding code in libgcc.
6016 See expand_builtin_unwind_init. For regs_ever_live this is done by
6018 if (current_function_has_nonlocal_label)
6019 for (i = 0; i < 16; i++)
6020 if (!call_really_used_regs[i])
6021 regs_ever_clobbered[i] = 1;
6023 FOR_EACH_BB (cur_bb)
6025 FOR_BB_INSNS (cur_bb, cur_insn)
6027 if (INSN_P (cur_insn))
6028 note_stores (PATTERN (cur_insn),
6029 s390_reg_clobbered_rtx,
6030 regs_ever_clobbered);
6035 /* Determine the frame area which actually has to be accessed
6036 in the function epilogue. The values are stored at the
6037 given pointers AREA_BOTTOM (address of the lowest used stack
6038 address) and AREA_TOP (address of the first item which does
6039 not belong to the stack frame). */
6042 s390_frame_area (int *area_bottom, int *area_top)
6050 if (cfun_frame_layout.first_restore_gpr != -1)
6052 b = (cfun_frame_layout.gprs_offset
6053 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6054 t = b + (cfun_frame_layout.last_restore_gpr
6055 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6058 if (TARGET_64BIT && cfun_save_high_fprs_p)
6060 b = MIN (b, cfun_frame_layout.f8_offset);
6061 t = MAX (t, (cfun_frame_layout.f8_offset
6062 + cfun_frame_layout.high_fprs * 8));
6066 for (i = 2; i < 4; i++)
6067 if (cfun_fpr_bit_p (i))
6069 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6070 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6077 /* Fill cfun->machine with info about register usage of current function.
6078 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6081 s390_register_info (int clobbered_regs[])
6085 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6086 cfun_frame_layout.fpr_bitmap = 0;
6087 cfun_frame_layout.high_fprs = 0;
6089 for (i = 24; i < 32; i++)
6090 if (regs_ever_live[i] && !global_regs[i])
6092 cfun_set_fpr_bit (i - 16);
6093 cfun_frame_layout.high_fprs++;
6096 /* Find first and last gpr to be saved. We trust regs_ever_live
6097 data, except that we don't save and restore global registers.
6099 Also, all registers with special meaning to the compiler need
6100 to be handled extra. */
6102 s390_regs_ever_clobbered (clobbered_regs);
6104 for (i = 0; i < 16; i++)
6105 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
6107 if (frame_pointer_needed)
6108 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
6111 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
6112 |= regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
6114 clobbered_regs[BASE_REGNUM]
6115 |= (cfun->machine->base_reg
6116 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
6118 clobbered_regs[RETURN_REGNUM]
6119 |= (!current_function_is_leaf
6120 || TARGET_TPF_PROFILING
6121 || cfun->machine->split_branches_pending_p
6122 || cfun_frame_layout.save_return_addr_p
6123 || current_function_calls_eh_return
6124 || current_function_stdarg);
6126 clobbered_regs[STACK_POINTER_REGNUM]
6127 |= (!current_function_is_leaf
6128 || TARGET_TPF_PROFILING
6129 || cfun_save_high_fprs_p
6130 || get_frame_size () > 0
6131 || current_function_calls_alloca
6132 || current_function_stdarg);
6134 for (i = 6; i < 16; i++)
6135 if (clobbered_regs[i])
6137 for (j = 15; j > i; j--)
6138 if (clobbered_regs[j])
6143 /* Nothing to save/restore. */
6144 cfun_frame_layout.first_save_gpr = -1;
6145 cfun_frame_layout.first_restore_gpr = -1;
6146 cfun_frame_layout.last_save_gpr = -1;
6147 cfun_frame_layout.last_restore_gpr = -1;
6151 /* Save / Restore from gpr i to j. */
6152 cfun_frame_layout.first_save_gpr = i;
6153 cfun_frame_layout.first_restore_gpr = i;
6154 cfun_frame_layout.last_save_gpr = j;
6155 cfun_frame_layout.last_restore_gpr = j;
6158 if (current_function_stdarg)
6160 /* Varargs functions need to save gprs 2 to 6. */
6161 if (cfun->va_list_gpr_size
6162 && current_function_args_info.gprs < GP_ARG_NUM_REG)
6164 int min_gpr = current_function_args_info.gprs;
6165 int max_gpr = min_gpr + cfun->va_list_gpr_size;
6166 if (max_gpr > GP_ARG_NUM_REG)
6167 max_gpr = GP_ARG_NUM_REG;
6169 if (cfun_frame_layout.first_save_gpr == -1
6170 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
6171 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
6173 if (cfun_frame_layout.last_save_gpr == -1
6174 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
6175 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
6178 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
6179 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
6180 && current_function_args_info.fprs < FP_ARG_NUM_REG)
6182 int min_fpr = current_function_args_info.fprs;
6183 int max_fpr = min_fpr + cfun->va_list_fpr_size;
6184 if (max_fpr > FP_ARG_NUM_REG)
6185 max_fpr = FP_ARG_NUM_REG;
6187 /* ??? This is currently required to ensure proper location
6188 of the fpr save slots within the va_list save area. */
6189 if (TARGET_PACKED_STACK)
6192 for (i = min_fpr; i < max_fpr; i++)
6193 cfun_set_fpr_bit (i);
6198 for (i = 2; i < 4; i++)
6199 if (regs_ever_live[i + 16] && !global_regs[i + 16])
6200 cfun_set_fpr_bit (i);
6203 /* Fill cfun->machine with info about frame of current function. */
6206 s390_frame_info (void)
6210 cfun_frame_layout.frame_size = get_frame_size ();
6211 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
6212 fatal_error ("total size of local variables exceeds architecture limit");
6214 if (!TARGET_PACKED_STACK)
6216 cfun_frame_layout.backchain_offset = 0;
6217 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
6218 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
6219 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
6220 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr
6223 else if (TARGET_BACKCHAIN) /* kernel stack layout */
6225 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
6227 cfun_frame_layout.gprs_offset
6228 = (cfun_frame_layout.backchain_offset
6229 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr + 1)
6234 cfun_frame_layout.f4_offset
6235 = (cfun_frame_layout.gprs_offset
6236 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6238 cfun_frame_layout.f0_offset
6239 = (cfun_frame_layout.f4_offset
6240 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6244 /* On 31 bit we have to care about alignment of the
6245 floating point regs to provide fastest access. */
6246 cfun_frame_layout.f0_offset
6247 = ((cfun_frame_layout.gprs_offset
6248 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
6249 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6251 cfun_frame_layout.f4_offset
6252 = (cfun_frame_layout.f0_offset
6253 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6256 else /* no backchain */
6258 cfun_frame_layout.f4_offset
6259 = (STACK_POINTER_OFFSET
6260 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
6262 cfun_frame_layout.f0_offset
6263 = (cfun_frame_layout.f4_offset
6264 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
6266 cfun_frame_layout.gprs_offset
6267 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
6270 if (current_function_is_leaf
6271 && !TARGET_TPF_PROFILING
6272 && cfun_frame_layout.frame_size == 0
6273 && !cfun_save_high_fprs_p
6274 && !current_function_calls_alloca
6275 && !current_function_stdarg)
6278 if (!TARGET_PACKED_STACK)
6279 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
6280 + current_function_outgoing_args_size
6281 + cfun_frame_layout.high_fprs * 8);
6284 if (TARGET_BACKCHAIN)
6285 cfun_frame_layout.frame_size += UNITS_PER_WORD;
6287 /* No alignment trouble here because f8-f15 are only saved under
6289 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
6290 cfun_frame_layout.f4_offset),
6291 cfun_frame_layout.gprs_offset)
6292 - cfun_frame_layout.high_fprs * 8);
6294 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
6296 for (i = 0; i < 8; i++)
6297 if (cfun_fpr_bit_p (i))
6298 cfun_frame_layout.frame_size += 8;
6300 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
6302 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
6303 the frame size to sustain 8 byte alignment of stack frames. */
6304 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
6305 STACK_BOUNDARY / BITS_PER_UNIT - 1)
6306 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
6308 cfun_frame_layout.frame_size += current_function_outgoing_args_size;
6312 /* Generate frame layout. Fills in register and frame data for the current
6313 function in cfun->machine. This routine can be called multiple times;
6314 it will re-do the complete frame layout every time. */
6317 s390_init_frame_layout (void)
6319 HOST_WIDE_INT frame_size;
6321 int clobbered_regs[16];
6323 /* On S/390 machines, we may need to perform branch splitting, which
6324 will require both base and return address register. We have no
6325 choice but to assume we're going to need them until right at the
6326 end of the machine dependent reorg phase. */
6327 if (!TARGET_CPU_ZARCH)
6328 cfun->machine->split_branches_pending_p = true;
6332 frame_size = cfun_frame_layout.frame_size;
6334 /* Try to predict whether we'll need the base register. */
6335 base_used = cfun->machine->split_branches_pending_p
6336 || current_function_uses_const_pool
6337 || (!DISP_IN_RANGE (-frame_size)
6338 && !CONST_OK_FOR_K (-frame_size));
6340 /* Decide which register to use as literal pool base. In small
6341 leaf functions, try to use an unused call-clobbered register
6342 as base register to avoid save/restore overhead. */
6344 cfun->machine->base_reg = NULL_RTX;
6345 else if (current_function_is_leaf && !regs_ever_live[5])
6346 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
6348 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
6350 s390_register_info (clobbered_regs);
6353 while (frame_size != cfun_frame_layout.frame_size);
6356 /* Update frame layout. Recompute actual register save data based on
6357 current info and update regs_ever_live for the special registers.
6358 May be called multiple times, but may never cause *more* registers
6359 to be saved than s390_init_frame_layout allocated room for. */
6362 s390_update_frame_layout (void)
6364 int clobbered_regs[16];
6366 s390_register_info (clobbered_regs);
6368 regs_ever_live[BASE_REGNUM] = clobbered_regs[BASE_REGNUM];
6369 regs_ever_live[RETURN_REGNUM] = clobbered_regs[RETURN_REGNUM];
6370 regs_ever_live[STACK_POINTER_REGNUM] = clobbered_regs[STACK_POINTER_REGNUM];
6372 if (cfun->machine->base_reg)
6373 regs_ever_live[REGNO (cfun->machine->base_reg)] = 1;
6376 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6379 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
6381 /* Once we've decided upon a register to use as base register, it must
6382 no longer be used for any other purpose. */
6383 if (cfun->machine->base_reg)
6384 if (REGNO (cfun->machine->base_reg) == old_reg
6385 || REGNO (cfun->machine->base_reg) == new_reg)
6391 /* Return true if register FROM can be eliminated via register TO. */
6394 s390_can_eliminate (int from, int to)
6396 /* On zSeries machines, we have not marked the base register as fixed.
6397 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
6398 If a function requires the base register, we say here that this
6399 elimination cannot be performed. This will cause reload to free
6400 up the base register (as if it were fixed). On the other hand,
6401 if the current function does *not* require the base register, we
6402 say here the elimination succeeds, which in turn allows reload
6403 to allocate the base register for any other purpose. */
6404 if (from == BASE_REGNUM && to == BASE_REGNUM)
6406 if (TARGET_CPU_ZARCH)
6408 s390_init_frame_layout ();
6409 return cfun->machine->base_reg == NULL_RTX;
6415 /* Everything else must point into the stack frame. */
6416 gcc_assert (to == STACK_POINTER_REGNUM
6417 || to == HARD_FRAME_POINTER_REGNUM);
6419 gcc_assert (from == FRAME_POINTER_REGNUM
6420 || from == ARG_POINTER_REGNUM
6421 || from == RETURN_ADDRESS_POINTER_REGNUM);
6423 /* Make sure we actually saved the return address. */
6424 if (from == RETURN_ADDRESS_POINTER_REGNUM)
6425 if (!current_function_calls_eh_return
6426 && !current_function_stdarg
6427 && !cfun_frame_layout.save_return_addr_p)
6433 /* Return offset between register FROM and TO initially after prolog. */
6436 s390_initial_elimination_offset (int from, int to)
6438 HOST_WIDE_INT offset;
6441 /* ??? Why are we called for non-eliminable pairs? */
6442 if (!s390_can_eliminate (from, to))
6447 case FRAME_POINTER_REGNUM:
6448 offset = (get_frame_size()
6449 + STACK_POINTER_OFFSET
6450 + current_function_outgoing_args_size);
6453 case ARG_POINTER_REGNUM:
6454 s390_init_frame_layout ();
6455 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
6458 case RETURN_ADDRESS_POINTER_REGNUM:
6459 s390_init_frame_layout ();
6460 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr;
6461 gcc_assert (index >= 0);
6462 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
6463 offset += index * UNITS_PER_WORD;
6477 /* Emit insn to save fpr REGNUM at offset OFFSET relative
6478 to register BASE. Return generated insn. */
6481 save_fpr (rtx base, int offset, int regnum)
6484 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6486 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
6487 set_mem_alias_set (addr, get_varargs_alias_set ());
6489 set_mem_alias_set (addr, get_frame_alias_set ());
6491 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
6494 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
6495 to register BASE. Return generated insn. */
6498 restore_fpr (rtx base, int offset, int regnum)
6501 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
6502 set_mem_alias_set (addr, get_frame_alias_set ());
6504 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
6507 /* Generate insn to save registers FIRST to LAST into
6508 the register save area located at offset OFFSET
6509 relative to register BASE. */
6512 save_gprs (rtx base, int offset, int first, int last)
6514 rtx addr, insn, note;
6517 addr = plus_constant (base, offset);
6518 addr = gen_rtx_MEM (Pmode, addr);
6520 set_mem_alias_set (addr, get_frame_alias_set ());
6522 /* Special-case single register. */
6526 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
6528 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
6530 RTX_FRAME_RELATED_P (insn) = 1;
6535 insn = gen_store_multiple (addr,
6536 gen_rtx_REG (Pmode, first),
6537 GEN_INT (last - first + 1));
6539 if (first <= 6 && current_function_stdarg)
6540 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6542 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
6545 set_mem_alias_set (mem, get_varargs_alias_set ());
6548 /* We need to set the FRAME_RELATED flag on all SETs
6549 inside the store-multiple pattern.
6551 However, we must not emit DWARF records for registers 2..5
6552 if they are stored for use by variable arguments ...
6554 ??? Unfortunately, it is not enough to simply not the
6555 FRAME_RELATED flags for those SETs, because the first SET
6556 of the PARALLEL is always treated as if it had the flag
6557 set, even if it does not. Therefore we emit a new pattern
6558 without those registers as REG_FRAME_RELATED_EXPR note. */
6562 rtx pat = PATTERN (insn);
6564 for (i = 0; i < XVECLEN (pat, 0); i++)
6565 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
6566 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
6568 RTX_FRAME_RELATED_P (insn) = 1;
6572 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD);
6573 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
6574 gen_rtx_REG (Pmode, 6),
6575 GEN_INT (last - 6 + 1));
6576 note = PATTERN (note);
6579 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6580 note, REG_NOTES (insn));
6582 for (i = 0; i < XVECLEN (note, 0); i++)
6583 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
6584 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
6586 RTX_FRAME_RELATED_P (insn) = 1;
6592 /* Generate insn to restore registers FIRST to LAST from
6593 the register save area located at offset OFFSET
6594 relative to register BASE. */
6597 restore_gprs (rtx base, int offset, int first, int last)
6601 addr = plus_constant (base, offset);
6602 addr = gen_rtx_MEM (Pmode, addr);
6603 set_mem_alias_set (addr, get_frame_alias_set ());
6605 /* Special-case single register. */
6609 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
6611 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
6616 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
6618 GEN_INT (last - first + 1));
6622 /* Return insn sequence to load the GOT register. */
6624 static GTY(()) rtx got_symbol;
6626 s390_load_got (void)
6632 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
6633 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
6638 if (TARGET_CPU_ZARCH)
6640 emit_move_insn (pic_offset_table_rtx, got_symbol);
6646 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
6647 UNSPEC_LTREL_OFFSET);
6648 offset = gen_rtx_CONST (Pmode, offset);
6649 offset = force_const_mem (Pmode, offset);
6651 emit_move_insn (pic_offset_table_rtx, offset);
6653 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
6655 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
6657 emit_move_insn (pic_offset_table_rtx, offset);
6660 insns = get_insns ();
6665 /* Expand the prologue into a bunch of separate insns. */
6668 s390_emit_prologue (void)
6676 /* Complete frame layout. */
6678 s390_update_frame_layout ();
6680 /* Annotate all constant pool references to let the scheduler know
6681 they implicitly use the base register. */
6683 push_topmost_sequence ();
6685 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6687 annotate_constant_pool_refs (&PATTERN (insn));
6689 pop_topmost_sequence ();
6691 /* Choose best register to use for temp use within prologue.
6692 See below for why TPF must use the register 1. */
6694 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
6695 && !current_function_is_leaf
6696 && !TARGET_TPF_PROFILING)
6697 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
6699 temp_reg = gen_rtx_REG (Pmode, 1);
6701 /* Save call saved gprs. */
6702 if (cfun_frame_layout.first_save_gpr != -1)
6704 insn = save_gprs (stack_pointer_rtx,
6705 cfun_frame_layout.gprs_offset,
6706 cfun_frame_layout.first_save_gpr,
6707 cfun_frame_layout.last_save_gpr);
6711 /* Dummy insn to mark literal pool slot. */
6713 if (cfun->machine->base_reg)
6714 emit_insn (gen_main_pool (cfun->machine->base_reg));
6716 offset = cfun_frame_layout.f0_offset;
6718 /* Save f0 and f2. */
6719 for (i = 0; i < 2; i++)
6721 if (cfun_fpr_bit_p (i))
6723 save_fpr (stack_pointer_rtx, offset, i + 16);
6726 else if (!TARGET_PACKED_STACK)
6730 /* Save f4 and f6. */
6731 offset = cfun_frame_layout.f4_offset;
6732 for (i = 2; i < 4; i++)
6734 if (cfun_fpr_bit_p (i))
6736 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6739 /* If f4 and f6 are call clobbered they are saved due to stdargs and
6740 therefore are not frame related. */
6741 if (!call_really_used_regs[i + 16])
6742 RTX_FRAME_RELATED_P (insn) = 1;
6744 else if (!TARGET_PACKED_STACK)
6748 if (TARGET_PACKED_STACK
6749 && cfun_save_high_fprs_p
6750 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
6752 offset = (cfun_frame_layout.f8_offset
6753 + (cfun_frame_layout.high_fprs - 1) * 8);
6755 for (i = 15; i > 7 && offset >= 0; i--)
6756 if (cfun_fpr_bit_p (i))
6758 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
6760 RTX_FRAME_RELATED_P (insn) = 1;
6763 if (offset >= cfun_frame_layout.f8_offset)
6767 if (!TARGET_PACKED_STACK)
6768 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
6770 /* Decrement stack pointer. */
6772 if (cfun_frame_layout.frame_size > 0)
6774 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
6776 if (s390_stack_size)
6778 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
6779 & ~(s390_stack_guard - 1));
6780 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
6781 GEN_INT (stack_check_mask));
6784 gen_cmpdi (t, const0_rtx);
6786 gen_cmpsi (t, const0_rtx);
6788 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode,
6789 gen_rtx_REG (CCmode,
6795 if (s390_warn_framesize > 0
6796 && cfun_frame_layout.frame_size >= s390_warn_framesize)
6797 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
6798 current_function_name (), cfun_frame_layout.frame_size);
6800 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
6801 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
6803 /* Save incoming stack pointer into temp reg. */
6804 if (TARGET_BACKCHAIN || next_fpr)
6805 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
6807 /* Subtract frame size from stack pointer. */
6809 if (DISP_IN_RANGE (INTVAL (frame_off)))
6811 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6812 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6814 insn = emit_insn (insn);
6818 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
6819 frame_off = force_const_mem (Pmode, frame_off);
6821 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
6822 annotate_constant_pool_refs (&PATTERN (insn));
6825 RTX_FRAME_RELATED_P (insn) = 1;
6827 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6828 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
6829 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
6830 GEN_INT (-cfun_frame_layout.frame_size))),
6833 /* Set backchain. */
6835 if (TARGET_BACKCHAIN)
6837 if (cfun_frame_layout.backchain_offset)
6838 addr = gen_rtx_MEM (Pmode,
6839 plus_constant (stack_pointer_rtx,
6840 cfun_frame_layout.backchain_offset));
6842 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
6843 set_mem_alias_set (addr, get_frame_alias_set ());
6844 insn = emit_insn (gen_move_insn (addr, temp_reg));
6847 /* If we support asynchronous exceptions (e.g. for Java),
6848 we need to make sure the backchain pointer is set up
6849 before any possibly trapping memory access. */
6851 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
6853 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
6854 emit_insn (gen_rtx_CLOBBER (VOIDmode, addr));
6858 /* Save fprs 8 - 15 (64 bit ABI). */
6860 if (cfun_save_high_fprs_p && next_fpr)
6862 insn = emit_insn (gen_add2_insn (temp_reg,
6863 GEN_INT (cfun_frame_layout.f8_offset)));
6867 for (i = 24; i <= next_fpr; i++)
6868 if (cfun_fpr_bit_p (i - 16))
6870 rtx addr = plus_constant (stack_pointer_rtx,
6871 cfun_frame_layout.frame_size
6872 + cfun_frame_layout.f8_offset
6875 insn = save_fpr (temp_reg, offset, i);
6877 RTX_FRAME_RELATED_P (insn) = 1;
6879 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
6880 gen_rtx_SET (VOIDmode,
6881 gen_rtx_MEM (DFmode, addr),
6882 gen_rtx_REG (DFmode, i)),
6887 /* Set frame pointer, if needed. */
6889 if (frame_pointer_needed)
6891 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
6892 RTX_FRAME_RELATED_P (insn) = 1;
6895 /* Set up got pointer, if needed. */
6897 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
6899 rtx insns = s390_load_got ();
6901 for (insn = insns; insn; insn = NEXT_INSN (insn))
6903 annotate_constant_pool_refs (&PATTERN (insn));
6905 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
6912 if (TARGET_TPF_PROFILING)
6914 /* Generate a BAS instruction to serve as a function
6915 entry intercept to facilitate the use of tracing
6916 algorithms located at the branch target. */
6917 emit_insn (gen_prologue_tpf ());
6919 /* Emit a blockage here so that all code
6920 lies between the profiling mechanisms. */
6921 emit_insn (gen_blockage ());
6925 /* Expand the epilogue into a bunch of separate insns. */
6928 s390_emit_epilogue (bool sibcall)
6930 rtx frame_pointer, return_reg;
6931 int area_bottom, area_top, offset = 0;
6936 if (TARGET_TPF_PROFILING)
6939 /* Generate a BAS instruction to serve as a function
6940 entry intercept to facilitate the use of tracing
6941 algorithms located at the branch target. */
6943 /* Emit a blockage here so that all code
6944 lies between the profiling mechanisms. */
6945 emit_insn (gen_blockage ());
6947 emit_insn (gen_epilogue_tpf ());
6950 /* Check whether to use frame or stack pointer for restore. */
6952 frame_pointer = (frame_pointer_needed
6953 ? hard_frame_pointer_rtx : stack_pointer_rtx);
6955 s390_frame_area (&area_bottom, &area_top);
6957 /* Check whether we can access the register save area.
6958 If not, increment the frame pointer as required. */
6960 if (area_top <= area_bottom)
6962 /* Nothing to restore. */
6964 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
6965 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
6967 /* Area is in range. */
6968 offset = cfun_frame_layout.frame_size;
6972 rtx insn, frame_off;
6974 offset = area_bottom < 0 ? -area_bottom : 0;
6975 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
6977 if (DISP_IN_RANGE (INTVAL (frame_off)))
6979 insn = gen_rtx_SET (VOIDmode, frame_pointer,
6980 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
6981 insn = emit_insn (insn);
6985 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
6986 frame_off = force_const_mem (Pmode, frame_off);
6988 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
6989 annotate_constant_pool_refs (&PATTERN (insn));
6993 /* Restore call saved fprs. */
6997 if (cfun_save_high_fprs_p)
6999 next_offset = cfun_frame_layout.f8_offset;
7000 for (i = 24; i < 32; i++)
7002 if (cfun_fpr_bit_p (i - 16))
7004 restore_fpr (frame_pointer,
7005 offset + next_offset, i);
7014 next_offset = cfun_frame_layout.f4_offset;
7015 for (i = 18; i < 20; i++)
7017 if (cfun_fpr_bit_p (i - 16))
7019 restore_fpr (frame_pointer,
7020 offset + next_offset, i);
7023 else if (!TARGET_PACKED_STACK)
7029 /* Return register. */
7031 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7033 /* Restore call saved gprs. */
7035 if (cfun_frame_layout.first_restore_gpr != -1)
7040 /* Check for global register and save them
7041 to stack location from where they get restored. */
7043 for (i = cfun_frame_layout.first_restore_gpr;
7044 i <= cfun_frame_layout.last_restore_gpr;
7047 /* These registers are special and need to be
7048 restored in any case. */
7049 if (i == STACK_POINTER_REGNUM
7050 || i == RETURN_REGNUM
7052 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
7057 addr = plus_constant (frame_pointer,
7058 offset + cfun_frame_layout.gprs_offset
7059 + (i - cfun_frame_layout.first_save_gpr)
7061 addr = gen_rtx_MEM (Pmode, addr);
7062 set_mem_alias_set (addr, get_frame_alias_set ());
7063 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
7069 /* Fetch return address from stack before load multiple,
7070 this will do good for scheduling. */
7072 if (cfun_frame_layout.save_return_addr_p
7073 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
7074 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
7076 int return_regnum = find_unused_clobbered_reg();
7079 return_reg = gen_rtx_REG (Pmode, return_regnum);
7081 addr = plus_constant (frame_pointer,
7082 offset + cfun_frame_layout.gprs_offset
7084 - cfun_frame_layout.first_save_gpr)
7086 addr = gen_rtx_MEM (Pmode, addr);
7087 set_mem_alias_set (addr, get_frame_alias_set ());
7088 emit_move_insn (return_reg, addr);
7092 insn = restore_gprs (frame_pointer,
7093 offset + cfun_frame_layout.gprs_offset
7094 + (cfun_frame_layout.first_restore_gpr
7095 - cfun_frame_layout.first_save_gpr)
7097 cfun_frame_layout.first_restore_gpr,
7098 cfun_frame_layout.last_restore_gpr);
7105 /* Return to caller. */
7107 p = rtvec_alloc (2);
7109 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
7110 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
7111 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
7116 /* Return the size in bytes of a function argument of
7117 type TYPE and/or mode MODE. At least one of TYPE or
7118 MODE must be specified. */
7121 s390_function_arg_size (enum machine_mode mode, tree type)
7124 return int_size_in_bytes (type);
7126 /* No type info available for some library calls ... */
7127 if (mode != BLKmode)
7128 return GET_MODE_SIZE (mode);
7130 /* If we have neither type nor mode, abort */
7134 /* Return true if a function argument of type TYPE and mode MODE
7135 is to be passed in a floating-point register, if available. */
7138 s390_function_arg_float (enum machine_mode mode, tree type)
7140 int size = s390_function_arg_size (mode, type);
7144 /* Soft-float changes the ABI: no floating-point registers are used. */
7145 if (TARGET_SOFT_FLOAT)
7148 /* No type info available for some library calls ... */
7150 return mode == SFmode || mode == DFmode;
7152 /* The ABI says that record types with a single member are treated
7153 just like that member would be. */
7154 while (TREE_CODE (type) == RECORD_TYPE)
7156 tree field, single = NULL_TREE;
7158 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
7160 if (TREE_CODE (field) != FIELD_DECL)
7163 if (single == NULL_TREE)
7164 single = TREE_TYPE (field);
7169 if (single == NULL_TREE)
7175 return TREE_CODE (type) == REAL_TYPE;
7178 /* Return true if a function argument of type TYPE and mode MODE
7179 is to be passed in an integer register, or a pair of integer
7180 registers, if available. */
7183 s390_function_arg_integer (enum machine_mode mode, tree type)
7185 int size = s390_function_arg_size (mode, type);
7189 /* No type info available for some library calls ... */
7191 return GET_MODE_CLASS (mode) == MODE_INT
7192 || (TARGET_SOFT_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT);
7194 /* We accept small integral (and similar) types. */
7195 if (INTEGRAL_TYPE_P (type)
7196 || POINTER_TYPE_P (type)
7197 || TREE_CODE (type) == OFFSET_TYPE
7198 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
7201 /* We also accept structs of size 1, 2, 4, 8 that are not
7202 passed in floating-point registers. */
7203 if (AGGREGATE_TYPE_P (type)
7204 && exact_log2 (size) >= 0
7205 && !s390_function_arg_float (mode, type))
7211 /* Return 1 if a function argument of type TYPE and mode MODE
7212 is to be passed by reference. The ABI specifies that only
7213 structures of size 1, 2, 4, or 8 bytes are passed by value,
7214 all other structures (and complex numbers) are passed by
7218 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
7219 enum machine_mode mode, tree type,
7220 bool named ATTRIBUTE_UNUSED)
7222 int size = s390_function_arg_size (mode, type);
7228 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
7231 if (TREE_CODE (type) == COMPLEX_TYPE
7232 || TREE_CODE (type) == VECTOR_TYPE)
7239 /* Update the data in CUM to advance over an argument of mode MODE and
7240 data type TYPE. (TYPE is null for libcalls where that information
7241 may not be available.). The boolean NAMED specifies whether the
7242 argument is a named argument (as opposed to an unnamed argument
7243 matching an ellipsis). */
7246 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7247 tree type, int named ATTRIBUTE_UNUSED)
7249 if (s390_function_arg_float (mode, type))
7253 else if (s390_function_arg_integer (mode, type))
7255 int size = s390_function_arg_size (mode, type);
7256 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
7262 /* Define where to put the arguments to a function.
7263 Value is zero to push the argument on the stack,
7264 or a hard register in which to store the argument.
7266 MODE is the argument's machine mode.
7267 TYPE is the data type of the argument (as a tree).
7268 This is null for libcalls where that information may
7270 CUM is a variable of type CUMULATIVE_ARGS which gives info about
7271 the preceding args and about the function being called.
7272 NAMED is nonzero if this argument is a named parameter
7273 (otherwise it is an extra parameter matching an ellipsis).
7275 On S/390, we use general purpose registers 2 through 6 to
7276 pass integer, pointer, and certain structure arguments, and
7277 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
7278 to pass floating point arguments. All remaining arguments
7279 are pushed to the stack. */
7282 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
7283 int named ATTRIBUTE_UNUSED)
7285 if (s390_function_arg_float (mode, type))
7287 if (cum->fprs + 1 > FP_ARG_NUM_REG)
7290 return gen_rtx_REG (mode, cum->fprs + 16);
7292 else if (s390_function_arg_integer (mode, type))
7294 int size = s390_function_arg_size (mode, type);
7295 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
7297 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
7300 return gen_rtx_REG (mode, cum->gprs + 2);
7303 /* After the real arguments, expand_call calls us once again
7304 with a void_type_node type. Whatever we return here is
7305 passed as operand 2 to the call expanders.
7307 We don't need this feature ... */
7308 else if (type == void_type_node)
7314 /* Return true if return values of type TYPE should be returned
7315 in a memory buffer whose address is passed by the caller as
7316 hidden first argument. */
7319 s390_return_in_memory (tree type, tree fundecl ATTRIBUTE_UNUSED)
7321 /* We accept small integral (and similar) types. */
7322 if (INTEGRAL_TYPE_P (type)
7323 || POINTER_TYPE_P (type)
7324 || TREE_CODE (type) == OFFSET_TYPE
7325 || TREE_CODE (type) == REAL_TYPE)
7326 return int_size_in_bytes (type) > 8;
7328 /* Aggregates and similar constructs are always returned
7330 if (AGGREGATE_TYPE_P (type)
7331 || TREE_CODE (type) == COMPLEX_TYPE
7332 || TREE_CODE (type) == VECTOR_TYPE)
7335 /* ??? We get called on all sorts of random stuff from
7336 aggregate_value_p. We can't abort, but it's not clear
7337 what's safe to return. Pretend it's a struct I guess. */
7341 /* Define where to return a (scalar) value of type TYPE.
7342 If TYPE is null, define where to return a (scalar)
7343 value of mode MODE from a libcall. */
7346 s390_function_value (tree type, enum machine_mode mode)
7350 int unsignedp = TYPE_UNSIGNED (type);
7351 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
7354 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
7355 || GET_MODE_CLASS (mode) == MODE_FLOAT);
7356 gcc_assert (GET_MODE_SIZE (mode) <= 8);
7358 if (TARGET_HARD_FLOAT && GET_MODE_CLASS (mode) == MODE_FLOAT)
7359 return gen_rtx_REG (mode, 16);
7361 return gen_rtx_REG (mode, 2);
7365 /* Create and return the va_list datatype.
7367 On S/390, va_list is an array type equivalent to
7369 typedef struct __va_list_tag
7373 void *__overflow_arg_area;
7374 void *__reg_save_area;
7377 where __gpr and __fpr hold the number of general purpose
7378 or floating point arguments used up to now, respectively,
7379 __overflow_arg_area points to the stack location of the
7380 next argument passed on the stack, and __reg_save_area
7381 always points to the start of the register area in the
7382 call frame of the current function. The function prologue
7383 saves all registers used for argument passing into this
7384 area if the function uses variable arguments. */
7387 s390_build_builtin_va_list (void)
7389 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
7391 record = lang_hooks.types.make_type (RECORD_TYPE);
7394 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
7396 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
7397 long_integer_type_node);
7398 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
7399 long_integer_type_node);
7400 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
7402 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
7405 va_list_gpr_counter_field = f_gpr;
7406 va_list_fpr_counter_field = f_fpr;
7408 DECL_FIELD_CONTEXT (f_gpr) = record;
7409 DECL_FIELD_CONTEXT (f_fpr) = record;
7410 DECL_FIELD_CONTEXT (f_ovf) = record;
7411 DECL_FIELD_CONTEXT (f_sav) = record;
7413 TREE_CHAIN (record) = type_decl;
7414 TYPE_NAME (record) = type_decl;
7415 TYPE_FIELDS (record) = f_gpr;
7416 TREE_CHAIN (f_gpr) = f_fpr;
7417 TREE_CHAIN (f_fpr) = f_ovf;
7418 TREE_CHAIN (f_ovf) = f_sav;
7420 layout_type (record);
7422 /* The correct type is an array type of one element. */
7423 return build_array_type (record, build_index_type (size_zero_node));
7426 /* Implement va_start by filling the va_list structure VALIST.
7427 STDARG_P is always true, and ignored.
7428 NEXTARG points to the first anonymous stack argument.
7430 The following global variables are used to initialize
7431 the va_list structure:
7433 current_function_args_info:
7434 holds number of gprs and fprs used for named arguments.
7435 current_function_arg_offset_rtx:
7436 holds the offset of the first anonymous stack argument
7437 (relative to the virtual arg pointer). */
7440 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
7442 HOST_WIDE_INT n_gpr, n_fpr;
7444 tree f_gpr, f_fpr, f_ovf, f_sav;
7445 tree gpr, fpr, ovf, sav, t;
7447 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7448 f_fpr = TREE_CHAIN (f_gpr);
7449 f_ovf = TREE_CHAIN (f_fpr);
7450 f_sav = TREE_CHAIN (f_ovf);
7452 valist = build_va_arg_indirect_ref (valist);
7453 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7454 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7455 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7456 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7458 /* Count number of gp and fp argument registers used. */
7460 n_gpr = current_function_args_info.gprs;
7461 n_fpr = current_function_args_info.fprs;
7463 if (cfun->va_list_gpr_size)
7465 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
7466 build_int_cst (NULL_TREE, n_gpr));
7467 TREE_SIDE_EFFECTS (t) = 1;
7468 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7471 if (cfun->va_list_fpr_size)
7473 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
7474 build_int_cst (NULL_TREE, n_fpr));
7475 TREE_SIDE_EFFECTS (t) = 1;
7476 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7479 /* Find the overflow area. */
7480 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
7481 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
7483 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
7485 off = INTVAL (current_function_arg_offset_rtx);
7486 off = off < 0 ? 0 : off;
7487 if (TARGET_DEBUG_ARG)
7488 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
7489 (int)n_gpr, (int)n_fpr, off);
7491 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
7493 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
7494 TREE_SIDE_EFFECTS (t) = 1;
7495 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7498 /* Find the register save area. */
7499 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
7500 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
7502 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
7503 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
7504 build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
7506 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
7507 TREE_SIDE_EFFECTS (t) = 1;
7508 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7512 /* Implement va_arg by updating the va_list structure
7513 VALIST as required to retrieve an argument of type
7514 TYPE, and returning that argument.
7516 Generates code equivalent to:
7518 if (integral value) {
7519 if (size <= 4 && args.gpr < 5 ||
7520 size > 4 && args.gpr < 4 )
7521 ret = args.reg_save_area[args.gpr+8]
7523 ret = *args.overflow_arg_area++;
7524 } else if (float value) {
7526 ret = args.reg_save_area[args.fpr+64]
7528 ret = *args.overflow_arg_area++;
7529 } else if (aggregate value) {
7531 ret = *args.reg_save_area[args.gpr]
7533 ret = **args.overflow_arg_area++;
7537 s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
7538 tree *post_p ATTRIBUTE_UNUSED)
7540 tree f_gpr, f_fpr, f_ovf, f_sav;
7541 tree gpr, fpr, ovf, sav, reg, t, u;
7542 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
7543 tree lab_false, lab_over, addr;
7545 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
7546 f_fpr = TREE_CHAIN (f_gpr);
7547 f_ovf = TREE_CHAIN (f_fpr);
7548 f_sav = TREE_CHAIN (f_ovf);
7550 valist = build_va_arg_indirect_ref (valist);
7551 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
7552 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
7553 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
7554 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
7556 size = int_size_in_bytes (type);
7558 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
7560 if (TARGET_DEBUG_ARG)
7562 fprintf (stderr, "va_arg: aggregate type");
7566 /* Aggregates are passed by reference. */
7571 /* kernel stack layout on 31 bit: It is assumed here that no padding
7572 will be added by s390_frame_info because for va_args always an even
7573 number of gprs has to be saved r15-r2 = 14 regs. */
7574 sav_ofs = 2 * UNITS_PER_WORD;
7575 sav_scale = UNITS_PER_WORD;
7576 size = UNITS_PER_WORD;
7577 max_reg = GP_ARG_NUM_REG - n_reg;
7579 else if (s390_function_arg_float (TYPE_MODE (type), type))
7581 if (TARGET_DEBUG_ARG)
7583 fprintf (stderr, "va_arg: float type");
7587 /* FP args go in FP registers, if present. */
7591 sav_ofs = 16 * UNITS_PER_WORD;
7593 max_reg = FP_ARG_NUM_REG - n_reg;
7597 if (TARGET_DEBUG_ARG)
7599 fprintf (stderr, "va_arg: other type");
7603 /* Otherwise into GP registers. */
7606 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7608 /* kernel stack layout on 31 bit: It is assumed here that no padding
7609 will be added by s390_frame_info because for va_args always an even
7610 number of gprs has to be saved r15-r2 = 14 regs. */
7611 sav_ofs = 2 * UNITS_PER_WORD;
7613 if (size < UNITS_PER_WORD)
7614 sav_ofs += UNITS_PER_WORD - size;
7616 sav_scale = UNITS_PER_WORD;
7617 max_reg = GP_ARG_NUM_REG - n_reg;
7620 /* Pull the value out of the saved registers ... */
7622 lab_false = create_artificial_label ();
7623 lab_over = create_artificial_label ();
7624 addr = create_tmp_var (ptr_type_node, "addr");
7625 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set ();
7627 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
7628 t = build2 (GT_EXPR, boolean_type_node, reg, t);
7629 u = build1 (GOTO_EXPR, void_type_node, lab_false);
7630 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
7631 gimplify_and_add (t, pre_p);
7633 t = build2 (PLUS_EXPR, ptr_type_node, sav,
7634 fold_convert (ptr_type_node, size_int (sav_ofs)));
7635 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
7636 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
7637 t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
7639 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
7640 gimplify_and_add (t, pre_p);
7642 t = build1 (GOTO_EXPR, void_type_node, lab_over);
7643 gimplify_and_add (t, pre_p);
7645 t = build1 (LABEL_EXPR, void_type_node, lab_false);
7646 append_to_statement_list (t, pre_p);
7649 /* ... Otherwise out of the overflow area. */
7652 if (size < UNITS_PER_WORD)
7653 t = build2 (PLUS_EXPR, ptr_type_node, t,
7654 fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
7656 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
7658 u = build2 (MODIFY_EXPR, void_type_node, addr, t);
7659 gimplify_and_add (u, pre_p);
7661 t = build2 (PLUS_EXPR, ptr_type_node, t,
7662 fold_convert (ptr_type_node, size_int (size)));
7663 t = build2 (MODIFY_EXPR, ptr_type_node, ovf, t);
7664 gimplify_and_add (t, pre_p);
7666 t = build1 (LABEL_EXPR, void_type_node, lab_over);
7667 append_to_statement_list (t, pre_p);
7670 /* Increment register save count. */
7672 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
7673 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
7674 gimplify_and_add (u, pre_p);
7678 t = build_pointer_type (build_pointer_type (type));
7679 addr = fold_convert (t, addr);
7680 addr = build_va_arg_indirect_ref (addr);
7684 t = build_pointer_type (type);
7685 addr = fold_convert (t, addr);
7688 return build_va_arg_indirect_ref (addr);
7696 S390_BUILTIN_THREAD_POINTER,
7697 S390_BUILTIN_SET_THREAD_POINTER,
7702 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
7707 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
7713 s390_init_builtins (void)
7717 ftype = build_function_type (ptr_type_node, void_list_node);
7718 lang_hooks.builtin_function ("__builtin_thread_pointer", ftype,
7719 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
7722 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
7723 lang_hooks.builtin_function ("__builtin_set_thread_pointer", ftype,
7724 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
7728 /* Expand an expression EXP that calls a built-in function,
7729 with result going to TARGET if that's convenient
7730 (and in mode MODE if that's convenient).
7731 SUBTARGET may be used as the target for computing one of EXP's operands.
7732 IGNORE is nonzero if the value is to be ignored. */
7735 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
7736 enum machine_mode mode ATTRIBUTE_UNUSED,
7737 int ignore ATTRIBUTE_UNUSED)
7741 unsigned int const *code_for_builtin =
7742 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
7744 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7745 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
7746 tree arglist = TREE_OPERAND (exp, 1);
7747 enum insn_code icode;
7748 rtx op[MAX_ARGS], pat;
7752 if (fcode >= S390_BUILTIN_max)
7753 internal_error ("bad builtin fcode");
7754 icode = code_for_builtin[fcode];
7756 internal_error ("bad builtin fcode");
7758 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
7760 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
7762 arglist = TREE_CHAIN (arglist), arity++)
7764 const struct insn_operand_data *insn_op;
7766 tree arg = TREE_VALUE (arglist);
7767 if (arg == error_mark_node)
7769 if (arity > MAX_ARGS)
7772 insn_op = &insn_data[icode].operand[arity + nonvoid];
7774 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
7776 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
7777 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
7782 enum machine_mode tmode = insn_data[icode].operand[0].mode;
7784 || GET_MODE (target) != tmode
7785 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
7786 target = gen_reg_rtx (tmode);
7792 pat = GEN_FCN (icode) (target);
7796 pat = GEN_FCN (icode) (target, op[0]);
7798 pat = GEN_FCN (icode) (op[0]);
7801 pat = GEN_FCN (icode) (target, op[0], op[1]);
7817 /* Output assembly code for the trampoline template to
7820 On S/390, we use gpr 1 internally in the trampoline code;
7821 gpr 0 is used to hold the static chain. */
7824 s390_trampoline_template (FILE *file)
7827 op[0] = gen_rtx_REG (Pmode, 0);
7828 op[1] = gen_rtx_REG (Pmode, 1);
7832 output_asm_insn ("basr\t%1,0", op);
7833 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
7834 output_asm_insn ("br\t%1", op);
7835 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
7839 output_asm_insn ("basr\t%1,0", op);
7840 output_asm_insn ("lm\t%0,%1,6(%1)", op);
7841 output_asm_insn ("br\t%1", op);
7842 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
7846 /* Emit RTL insns to initialize the variable parts of a trampoline.
7847 FNADDR is an RTX for the address of the function's pure code.
7848 CXT is an RTX for the static chain value for the function. */
7851 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
7853 emit_move_insn (gen_rtx_MEM (Pmode,
7854 memory_address (Pmode,
7855 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt);
7856 emit_move_insn (gen_rtx_MEM (Pmode,
7857 memory_address (Pmode,
7858 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr);
7861 /* Return rtx for 64-bit constant formed from the 32-bit subwords
7862 LOW and HIGH, independent of the host word size. */
7865 s390_gen_rtx_const_DI (int high, int low)
7867 #if HOST_BITS_PER_WIDE_INT >= 64
7869 val = (HOST_WIDE_INT)high;
7871 val |= (HOST_WIDE_INT)low;
7873 return GEN_INT (val);
7875 #if HOST_BITS_PER_WIDE_INT >= 32
7876 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
7883 /* Output assembler code to FILE to increment profiler label # LABELNO
7884 for profiling a function entry. */
7887 s390_function_profiler (FILE *file, int labelno)
7892 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
7894 fprintf (file, "# function profiler \n");
7896 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
7897 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
7898 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
7900 op[2] = gen_rtx_REG (Pmode, 1);
7901 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
7902 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
7904 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
7907 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
7908 op[4] = gen_rtx_CONST (Pmode, op[4]);
7913 output_asm_insn ("stg\t%0,%1", op);
7914 output_asm_insn ("larl\t%2,%3", op);
7915 output_asm_insn ("brasl\t%0,%4", op);
7916 output_asm_insn ("lg\t%0,%1", op);
7920 op[6] = gen_label_rtx ();
7922 output_asm_insn ("st\t%0,%1", op);
7923 output_asm_insn ("bras\t%2,%l6", op);
7924 output_asm_insn (".long\t%4", op);
7925 output_asm_insn (".long\t%3", op);
7926 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7927 output_asm_insn ("l\t%0,0(%2)", op);
7928 output_asm_insn ("l\t%2,4(%2)", op);
7929 output_asm_insn ("basr\t%0,%0", op);
7930 output_asm_insn ("l\t%0,%1", op);
7934 op[5] = gen_label_rtx ();
7935 op[6] = gen_label_rtx ();
7937 output_asm_insn ("st\t%0,%1", op);
7938 output_asm_insn ("bras\t%2,%l6", op);
7939 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
7940 output_asm_insn (".long\t%4-%l5", op);
7941 output_asm_insn (".long\t%3-%l5", op);
7942 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
7943 output_asm_insn ("lr\t%0,%2", op);
7944 output_asm_insn ("a\t%0,0(%2)", op);
7945 output_asm_insn ("a\t%2,4(%2)", op);
7946 output_asm_insn ("basr\t%0,%0", op);
7947 output_asm_insn ("l\t%0,%1", op);
7951 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
7952 into its SYMBOL_REF_FLAGS. */
7955 s390_encode_section_info (tree decl, rtx rtl, int first)
7957 default_encode_section_info (decl, rtl, first);
7959 /* If a variable has a forced alignment to < 2 bytes, mark it with
7960 SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL operand. */
7961 if (TREE_CODE (decl) == VAR_DECL
7962 && DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
7963 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
7966 /* Output thunk to FILE that implements a C++ virtual function call (with
7967 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
7968 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
7969 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
7970 relative to the resulting this pointer. */
7973 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
7974 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
7980 /* Operand 0 is the target function. */
7981 op[0] = XEXP (DECL_RTL (function), 0);
7982 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
7985 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
7986 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
7987 op[0] = gen_rtx_CONST (Pmode, op[0]);
7990 /* Operand 1 is the 'this' pointer. */
7991 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
7992 op[1] = gen_rtx_REG (Pmode, 3);
7994 op[1] = gen_rtx_REG (Pmode, 2);
7996 /* Operand 2 is the delta. */
7997 op[2] = GEN_INT (delta);
7999 /* Operand 3 is the vcall_offset. */
8000 op[3] = GEN_INT (vcall_offset);
8002 /* Operand 4 is the temporary register. */
8003 op[4] = gen_rtx_REG (Pmode, 1);
8005 /* Operands 5 to 8 can be used as labels. */
8011 /* Operand 9 can be used for temporary register. */
8014 /* Generate code. */
8017 /* Setup literal pool pointer if required. */
8018 if ((!DISP_IN_RANGE (delta)
8019 && !CONST_OK_FOR_K (delta)
8020 && !CONST_OK_FOR_Os (delta))
8021 || (!DISP_IN_RANGE (vcall_offset)
8022 && !CONST_OK_FOR_K (vcall_offset)
8023 && !CONST_OK_FOR_Os (vcall_offset)))
8025 op[5] = gen_label_rtx ();
8026 output_asm_insn ("larl\t%4,%5", op);
8029 /* Add DELTA to this pointer. */
8032 if (CONST_OK_FOR_J (delta))
8033 output_asm_insn ("la\t%1,%2(%1)", op);
8034 else if (DISP_IN_RANGE (delta))
8035 output_asm_insn ("lay\t%1,%2(%1)", op);
8036 else if (CONST_OK_FOR_K (delta))
8037 output_asm_insn ("aghi\t%1,%2", op);
8038 else if (CONST_OK_FOR_Os (delta))
8039 output_asm_insn ("agfi\t%1,%2", op);
8042 op[6] = gen_label_rtx ();
8043 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
8047 /* Perform vcall adjustment. */
8050 if (DISP_IN_RANGE (vcall_offset))
8052 output_asm_insn ("lg\t%4,0(%1)", op);
8053 output_asm_insn ("ag\t%1,%3(%4)", op);
8055 else if (CONST_OK_FOR_K (vcall_offset))
8057 output_asm_insn ("lghi\t%4,%3", op);
8058 output_asm_insn ("ag\t%4,0(%1)", op);
8059 output_asm_insn ("ag\t%1,0(%4)", op);
8061 else if (CONST_OK_FOR_Os (vcall_offset))
8063 output_asm_insn ("lgfi\t%4,%3", op);
8064 output_asm_insn ("ag\t%4,0(%1)", op);
8065 output_asm_insn ("ag\t%1,0(%4)", op);
8069 op[7] = gen_label_rtx ();
8070 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
8071 output_asm_insn ("ag\t%4,0(%1)", op);
8072 output_asm_insn ("ag\t%1,0(%4)", op);
8076 /* Jump to target. */
8077 output_asm_insn ("jg\t%0", op);
8079 /* Output literal pool if required. */
8082 output_asm_insn (".align\t4", op);
8083 targetm.asm_out.internal_label (file, "L",
8084 CODE_LABEL_NUMBER (op[5]));
8088 targetm.asm_out.internal_label (file, "L",
8089 CODE_LABEL_NUMBER (op[6]));
8090 output_asm_insn (".long\t%2", op);
8094 targetm.asm_out.internal_label (file, "L",
8095 CODE_LABEL_NUMBER (op[7]));
8096 output_asm_insn (".long\t%3", op);
8101 /* Setup base pointer if required. */
8103 || (!DISP_IN_RANGE (delta)
8104 && !CONST_OK_FOR_K (delta)
8105 && !CONST_OK_FOR_Os (delta))
8106 || (!DISP_IN_RANGE (delta)
8107 && !CONST_OK_FOR_K (vcall_offset)
8108 && !CONST_OK_FOR_Os (vcall_offset)))
8110 op[5] = gen_label_rtx ();
8111 output_asm_insn ("basr\t%4,0", op);
8112 targetm.asm_out.internal_label (file, "L",
8113 CODE_LABEL_NUMBER (op[5]));
8116 /* Add DELTA to this pointer. */
8119 if (CONST_OK_FOR_J (delta))
8120 output_asm_insn ("la\t%1,%2(%1)", op);
8121 else if (DISP_IN_RANGE (delta))
8122 output_asm_insn ("lay\t%1,%2(%1)", op);
8123 else if (CONST_OK_FOR_K (delta))
8124 output_asm_insn ("ahi\t%1,%2", op);
8125 else if (CONST_OK_FOR_Os (delta))
8126 output_asm_insn ("afi\t%1,%2", op);
8129 op[6] = gen_label_rtx ();
8130 output_asm_insn ("a\t%1,%6-%5(%4)", op);
8134 /* Perform vcall adjustment. */
8137 if (CONST_OK_FOR_J (vcall_offset))
8139 output_asm_insn ("l\t%4,0(%1)", op);
8140 output_asm_insn ("a\t%1,%3(%4)", op);
8142 else if (DISP_IN_RANGE (vcall_offset))
8144 output_asm_insn ("l\t%4,0(%1)", op);
8145 output_asm_insn ("ay\t%1,%3(%4)", op);
8147 else if (CONST_OK_FOR_K (vcall_offset))
8149 output_asm_insn ("lhi\t%4,%3", op);
8150 output_asm_insn ("a\t%4,0(%1)", op);
8151 output_asm_insn ("a\t%1,0(%4)", op);
8153 else if (CONST_OK_FOR_Os (vcall_offset))
8155 output_asm_insn ("iilf\t%4,%3", op);
8156 output_asm_insn ("a\t%4,0(%1)", op);
8157 output_asm_insn ("a\t%1,0(%4)", op);
8161 op[7] = gen_label_rtx ();
8162 output_asm_insn ("l\t%4,%7-%5(%4)", op);
8163 output_asm_insn ("a\t%4,0(%1)", op);
8164 output_asm_insn ("a\t%1,0(%4)", op);
8167 /* We had to clobber the base pointer register.
8168 Re-setup the base pointer (with a different base). */
8169 op[5] = gen_label_rtx ();
8170 output_asm_insn ("basr\t%4,0", op);
8171 targetm.asm_out.internal_label (file, "L",
8172 CODE_LABEL_NUMBER (op[5]));
8175 /* Jump to target. */
8176 op[8] = gen_label_rtx ();
8179 output_asm_insn ("l\t%4,%8-%5(%4)", op);
8181 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8182 /* We cannot call through .plt, since .plt requires %r12 loaded. */
8183 else if (flag_pic == 1)
8185 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8186 output_asm_insn ("l\t%4,%0(%4)", op);
8188 else if (flag_pic == 2)
8190 op[9] = gen_rtx_REG (Pmode, 0);
8191 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
8192 output_asm_insn ("a\t%4,%8-%5(%4)", op);
8193 output_asm_insn ("ar\t%4,%9", op);
8194 output_asm_insn ("l\t%4,0(%4)", op);
8197 output_asm_insn ("br\t%4", op);
8199 /* Output literal pool. */
8200 output_asm_insn (".align\t4", op);
8202 if (nonlocal && flag_pic == 2)
8203 output_asm_insn (".long\t%0", op);
8206 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
8207 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
8210 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
8212 output_asm_insn (".long\t%0", op);
8214 output_asm_insn (".long\t%0-%5", op);
8218 targetm.asm_out.internal_label (file, "L",
8219 CODE_LABEL_NUMBER (op[6]));
8220 output_asm_insn (".long\t%2", op);
8224 targetm.asm_out.internal_label (file, "L",
8225 CODE_LABEL_NUMBER (op[7]));
8226 output_asm_insn (".long\t%3", op);
8232 s390_valid_pointer_mode (enum machine_mode mode)
8234 return (mode == SImode || (TARGET_64BIT && mode == DImode));
8237 /* Checks whether the given ARGUMENT_LIST would use a caller
8238 saved register. This is used to decide whether sibling call
8239 optimization could be performed on the respective function
8243 s390_call_saved_register_used (tree argument_list)
8245 CUMULATIVE_ARGS cum;
8247 enum machine_mode mode;
8252 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
8254 while (argument_list)
8256 parameter = TREE_VALUE (argument_list);
8257 argument_list = TREE_CHAIN (argument_list);
8259 gcc_assert (parameter);
8261 /* For an undeclared variable passed as parameter we will get
8262 an ERROR_MARK node here. */
8263 if (TREE_CODE (parameter) == ERROR_MARK)
8266 type = TREE_TYPE (parameter);
8269 mode = TYPE_MODE (type);
8272 if (pass_by_reference (&cum, mode, type, true))
8275 type = build_pointer_type (type);
8278 parm_rtx = s390_function_arg (&cum, mode, type, 0);
8280 s390_function_arg_advance (&cum, mode, type, 0);
8282 if (parm_rtx && REG_P (parm_rtx))
8285 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
8287 if (! call_used_regs[reg + REGNO (parm_rtx)])
8294 /* Return true if the given call expression can be
8295 turned into a sibling call.
8296 DECL holds the declaration of the function to be called whereas
8297 EXP is the call expression itself. */
8300 s390_function_ok_for_sibcall (tree decl, tree exp)
8302 /* The TPF epilogue uses register 1. */
8303 if (TARGET_TPF_PROFILING)
8306 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
8307 which would have to be restored before the sibcall. */
8308 if (!TARGET_64BIT && flag_pic && decl && TREE_PUBLIC (decl))
8311 /* Register 6 on s390 is available as an argument register but unfortunately
8312 "caller saved". This makes functions needing this register for arguments
8313 not suitable for sibcalls. */
8314 if (TREE_OPERAND (exp, 1)
8315 && s390_call_saved_register_used (TREE_OPERAND (exp, 1)))
8321 /* Return the fixed registers used for condition codes. */
8324 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
8327 *p2 = INVALID_REGNUM;
8332 /* This function is used by the call expanders of the machine description.
8333 It emits the call insn itself together with the necessary operations
8334 to adjust the target address and returns the emitted insn.
8335 ADDR_LOCATION is the target address rtx
8336 TLS_CALL the location of the thread-local symbol
8337 RESULT_REG the register where the result of the call should be stored
8338 RETADDR_REG the register where the return address should be stored
8339 If this parameter is NULL_RTX the call is considered
8340 to be a sibling call. */
8343 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
8346 bool plt_call = false;
8352 /* Direct function calls need special treatment. */
8353 if (GET_CODE (addr_location) == SYMBOL_REF)
8355 /* When calling a global routine in PIC mode, we must
8356 replace the symbol itself with the PLT stub. */
8357 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
8359 addr_location = gen_rtx_UNSPEC (Pmode,
8360 gen_rtvec (1, addr_location),
8362 addr_location = gen_rtx_CONST (Pmode, addr_location);
8366 /* Unless we can use the bras(l) insn, force the
8367 routine address into a register. */
8368 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
8371 addr_location = legitimize_pic_address (addr_location, 0);
8373 addr_location = force_reg (Pmode, addr_location);
8377 /* If it is already an indirect call or the code above moved the
8378 SYMBOL_REF to somewhere else make sure the address can be found in
8380 if (retaddr_reg == NULL_RTX
8381 && GET_CODE (addr_location) != SYMBOL_REF
8384 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
8385 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
8388 addr_location = gen_rtx_MEM (QImode, addr_location);
8389 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
8391 if (result_reg != NULL_RTX)
8392 call = gen_rtx_SET (VOIDmode, result_reg, call);
8394 if (retaddr_reg != NULL_RTX)
8396 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
8398 if (tls_call != NULL_RTX)
8399 vec = gen_rtvec (3, call, clobber,
8400 gen_rtx_USE (VOIDmode, tls_call));
8402 vec = gen_rtvec (2, call, clobber);
8404 call = gen_rtx_PARALLEL (VOIDmode, vec);
8407 insn = emit_call_insn (call);
8409 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
8410 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
8412 /* s390_function_ok_for_sibcall should
8413 have denied sibcalls in this case. */
8414 gcc_assert (retaddr_reg != NULL_RTX);
8416 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
8421 /* Implement CONDITIONAL_REGISTER_USAGE. */
8424 s390_conditional_register_usage (void)
8430 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8431 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
8433 if (TARGET_CPU_ZARCH)
8435 fixed_regs[BASE_REGNUM] = 0;
8436 call_used_regs[BASE_REGNUM] = 0;
8437 fixed_regs[RETURN_REGNUM] = 0;
8438 call_used_regs[RETURN_REGNUM] = 0;
8442 for (i = 24; i < 32; i++)
8443 call_used_regs[i] = call_really_used_regs[i] = 0;
8447 for (i = 18; i < 20; i++)
8448 call_used_regs[i] = call_really_used_regs[i] = 0;
8451 if (TARGET_SOFT_FLOAT)
8453 for (i = 16; i < 32; i++)
8454 call_used_regs[i] = fixed_regs[i] = 1;
8458 /* Corresponding function to eh_return expander. */
8460 static GTY(()) rtx s390_tpf_eh_return_symbol;
8462 s390_emit_tpf_eh_return (rtx target)
8466 if (!s390_tpf_eh_return_symbol)
8467 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
8469 reg = gen_rtx_REG (Pmode, 2);
8471 emit_move_insn (reg, target);
8472 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
8473 gen_rtx_REG (Pmode, RETURN_REGNUM));
8474 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
8476 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
8479 /* Rework the prologue/epilogue to avoid saving/restoring
8480 registers unnecessarily. */
8483 s390_optimize_prologue (void)
8485 rtx insn, new_insn, next_insn;
8487 /* Do a final recompute of the frame-related data. */
8489 s390_update_frame_layout ();
8491 /* If all special registers are in fact used, there's nothing we
8492 can do, so no point in walking the insn list. */
8494 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
8495 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
8496 && (TARGET_CPU_ZARCH
8497 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
8498 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
8501 /* Search for prologue/epilogue insns and replace them. */
8503 for (insn = get_insns (); insn; insn = next_insn)
8505 int first, last, off;
8506 rtx set, base, offset;
8508 next_insn = NEXT_INSN (insn);
8510 if (GET_CODE (insn) != INSN)
8513 if (GET_CODE (PATTERN (insn)) == PARALLEL
8514 && store_multiple_operation (PATTERN (insn), VOIDmode))
8516 set = XVECEXP (PATTERN (insn), 0, 0);
8517 first = REGNO (SET_SRC (set));
8518 last = first + XVECLEN (PATTERN (insn), 0) - 1;
8519 offset = const0_rtx;
8520 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
8521 off = INTVAL (offset);
8523 if (GET_CODE (base) != REG || off < 0)
8525 if (cfun_frame_layout.first_save_gpr != -1
8526 && (cfun_frame_layout.first_save_gpr < first
8527 || cfun_frame_layout.last_save_gpr > last))
8529 if (REGNO (base) != STACK_POINTER_REGNUM
8530 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
8532 if (first > BASE_REGNUM || last < BASE_REGNUM)
8535 if (cfun_frame_layout.first_save_gpr != -1)
8537 new_insn = save_gprs (base,
8538 off + (cfun_frame_layout.first_save_gpr
8539 - first) * UNITS_PER_WORD,
8540 cfun_frame_layout.first_save_gpr,
8541 cfun_frame_layout.last_save_gpr);
8542 new_insn = emit_insn_before (new_insn, insn);
8543 INSN_ADDRESSES_NEW (new_insn, -1);
8550 if (cfun_frame_layout.first_save_gpr == -1
8551 && GET_CODE (PATTERN (insn)) == SET
8552 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
8553 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
8554 || (!TARGET_CPU_ZARCH
8555 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
8556 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
8558 set = PATTERN (insn);
8559 first = REGNO (SET_SRC (set));
8560 offset = const0_rtx;
8561 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
8562 off = INTVAL (offset);
8564 if (GET_CODE (base) != REG || off < 0)
8566 if (REGNO (base) != STACK_POINTER_REGNUM
8567 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
8574 if (GET_CODE (PATTERN (insn)) == PARALLEL
8575 && load_multiple_operation (PATTERN (insn), VOIDmode))
8577 set = XVECEXP (PATTERN (insn), 0, 0);
8578 first = REGNO (SET_DEST (set));
8579 last = first + XVECLEN (PATTERN (insn), 0) - 1;
8580 offset = const0_rtx;
8581 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
8582 off = INTVAL (offset);
8584 if (GET_CODE (base) != REG || off < 0)
8586 if (cfun_frame_layout.first_restore_gpr != -1
8587 && (cfun_frame_layout.first_restore_gpr < first
8588 || cfun_frame_layout.last_restore_gpr > last))
8590 if (REGNO (base) != STACK_POINTER_REGNUM
8591 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
8593 if (first > BASE_REGNUM || last < BASE_REGNUM)
8596 if (cfun_frame_layout.first_restore_gpr != -1)
8598 new_insn = restore_gprs (base,
8599 off + (cfun_frame_layout.first_restore_gpr
8600 - first) * UNITS_PER_WORD,
8601 cfun_frame_layout.first_restore_gpr,
8602 cfun_frame_layout.last_restore_gpr);
8603 new_insn = emit_insn_before (new_insn, insn);
8604 INSN_ADDRESSES_NEW (new_insn, -1);
8611 if (cfun_frame_layout.first_restore_gpr == -1
8612 && GET_CODE (PATTERN (insn)) == SET
8613 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
8614 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
8615 || (!TARGET_CPU_ZARCH
8616 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
8617 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
8619 set = PATTERN (insn);
8620 first = REGNO (SET_DEST (set));
8621 offset = const0_rtx;
8622 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
8623 off = INTVAL (offset);
8625 if (GET_CODE (base) != REG || off < 0)
8627 if (REGNO (base) != STACK_POINTER_REGNUM
8628 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
8637 /* Perform machine-dependent processing. */
8642 bool pool_overflow = false;
8644 /* Make sure all splits have been performed; splits after
8645 machine_dependent_reorg might confuse insn length counts. */
8646 split_all_insns_noflow ();
8649 /* Install the main literal pool and the associated base
8650 register load insns.
8652 In addition, there are two problematic situations we need
8655 - the literal pool might be > 4096 bytes in size, so that
8656 some of its elements cannot be directly accessed
8658 - a branch target might be > 64K away from the branch, so that
8659 it is not possible to use a PC-relative instruction.
8661 To fix those, we split the single literal pool into multiple
8662 pool chunks, reloading the pool base register at various
8663 points throughout the function to ensure it always points to
8664 the pool chunk the following code expects, and / or replace
8665 PC-relative branches by absolute branches.
8667 However, the two problems are interdependent: splitting the
8668 literal pool can move a branch further away from its target,
8669 causing the 64K limit to overflow, and on the other hand,
8670 replacing a PC-relative branch by an absolute branch means
8671 we need to put the branch target address into the literal
8672 pool, possibly causing it to overflow.
8674 So, we loop trying to fix up both problems until we manage
8675 to satisfy both conditions at the same time. Note that the
8676 loop is guaranteed to terminate as every pass of the loop
8677 strictly decreases the total number of PC-relative branches
8678 in the function. (This is not completely true as there
8679 might be branch-over-pool insns introduced by chunkify_start.
8680 Those never need to be split however.) */
8684 struct constant_pool *pool = NULL;
8686 /* Collect the literal pool. */
8689 pool = s390_mainpool_start ();
8691 pool_overflow = true;
8694 /* If literal pool overflowed, start to chunkify it. */
8696 pool = s390_chunkify_start ();
8698 /* Split out-of-range branches. If this has created new
8699 literal pool entries, cancel current chunk list and
8700 recompute it. zSeries machines have large branch
8701 instructions, so we never need to split a branch. */
8702 if (!TARGET_CPU_ZARCH && s390_split_branches ())
8705 s390_chunkify_cancel (pool);
8707 s390_mainpool_cancel (pool);
8712 /* If we made it up to here, both conditions are satisfied.
8713 Finish up literal pool related changes. */
8715 s390_chunkify_finish (pool);
8717 s390_mainpool_finish (pool);
8719 /* We're done splitting branches. */
8720 cfun->machine->split_branches_pending_p = false;
8724 /* Generate out-of-pool execute target insns. */
8725 if (TARGET_CPU_ZARCH)
8727 rtx insn, label, target;
8729 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8731 label = s390_execute_label (insn);
8735 gcc_assert (label != const0_rtx);
8737 target = emit_label (XEXP (label, 0));
8738 INSN_ADDRESSES_NEW (target, -1);
8740 target = emit_insn (s390_execute_target (insn));
8741 INSN_ADDRESSES_NEW (target, -1);
8745 /* Try to optimize prologue and epilogue further. */
8746 s390_optimize_prologue ();
8750 /* Initialize GCC target structure. */
8752 #undef TARGET_ASM_ALIGNED_HI_OP
8753 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
8754 #undef TARGET_ASM_ALIGNED_DI_OP
8755 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
8756 #undef TARGET_ASM_INTEGER
8757 #define TARGET_ASM_INTEGER s390_assemble_integer
8759 #undef TARGET_ASM_OPEN_PAREN
8760 #define TARGET_ASM_OPEN_PAREN ""
8762 #undef TARGET_ASM_CLOSE_PAREN
8763 #define TARGET_ASM_CLOSE_PAREN ""
8765 #undef TARGET_DEFAULT_TARGET_FLAGS
8766 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
8767 #undef TARGET_HANDLE_OPTION
8768 #define TARGET_HANDLE_OPTION s390_handle_option
8770 #undef TARGET_ENCODE_SECTION_INFO
8771 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
8774 #undef TARGET_HAVE_TLS
8775 #define TARGET_HAVE_TLS true
8777 #undef TARGET_CANNOT_FORCE_CONST_MEM
8778 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
8780 #undef TARGET_DELEGITIMIZE_ADDRESS
8781 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
8783 #undef TARGET_RETURN_IN_MEMORY
8784 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
8786 #undef TARGET_INIT_BUILTINS
8787 #define TARGET_INIT_BUILTINS s390_init_builtins
8788 #undef TARGET_EXPAND_BUILTIN
8789 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
8791 #undef TARGET_ASM_OUTPUT_MI_THUNK
8792 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
8793 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
8794 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
8796 #undef TARGET_SCHED_ADJUST_PRIORITY
8797 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
8798 #undef TARGET_SCHED_ISSUE_RATE
8799 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
8800 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
8801 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
8803 #undef TARGET_CANNOT_COPY_INSN_P
8804 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
8805 #undef TARGET_RTX_COSTS
8806 #define TARGET_RTX_COSTS s390_rtx_costs
8807 #undef TARGET_ADDRESS_COST
8808 #define TARGET_ADDRESS_COST s390_address_cost
8810 #undef TARGET_MACHINE_DEPENDENT_REORG
8811 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
8813 #undef TARGET_VALID_POINTER_MODE
8814 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
8816 #undef TARGET_BUILD_BUILTIN_VA_LIST
8817 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
8818 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
8819 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
8821 #undef TARGET_PROMOTE_FUNCTION_ARGS
8822 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
8823 #undef TARGET_PROMOTE_FUNCTION_RETURN
8824 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
8825 #undef TARGET_PASS_BY_REFERENCE
8826 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
8828 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
8829 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
8831 #undef TARGET_FIXED_CONDITION_CODE_REGS
8832 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
8834 #undef TARGET_CC_MODES_COMPATIBLE
8835 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
8837 #undef TARGET_INVALID_WITHIN_DOLOOP
8838 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_rtx_null
8841 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
8842 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
8845 struct gcc_target targetm = TARGET_INITIALIZER;
8847 #include "gt-s390.h"