1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
5 Ulrich Weigand (uweigand@de.ibm.com) and
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "conditions.h"
37 #include "insn-attr.h"
45 #include "basic-block.h"
46 #include "integrate.h"
49 #include "target-def.h"
51 #include "langhooks.h"
58 /* Define the specific costs for a given cpu. */
60 struct processor_costs
63 const int m; /* cost of an M instruction. */
64 const int mghi; /* cost of an MGHI instruction. */
65 const int mh; /* cost of an MH instruction. */
66 const int mhi; /* cost of an MHI instruction. */
67 const int ml; /* cost of an ML instruction. */
68 const int mr; /* cost of an MR instruction. */
69 const int ms; /* cost of an MS instruction. */
70 const int msg; /* cost of an MSG instruction. */
71 const int msgf; /* cost of an MSGF instruction. */
72 const int msgfr; /* cost of an MSGFR instruction. */
73 const int msgr; /* cost of an MSGR instruction. */
74 const int msr; /* cost of an MSR instruction. */
75 const int mult_df; /* cost of multiplication in DFmode. */
78 const int sqxbr; /* cost of square root in TFmode. */
79 const int sqdbr; /* cost of square root in DFmode. */
80 const int sqebr; /* cost of square root in SFmode. */
81 /* multiply and add */
82 const int madbr; /* cost of multiply and add in DFmode. */
83 const int maebr; /* cost of multiply and add in SFmode. */
95 const struct processor_costs *s390_cost;
98 struct processor_costs z900_cost =
100 COSTS_N_INSNS (5), /* M */
101 COSTS_N_INSNS (10), /* MGHI */
102 COSTS_N_INSNS (5), /* MH */
103 COSTS_N_INSNS (4), /* MHI */
104 COSTS_N_INSNS (5), /* ML */
105 COSTS_N_INSNS (5), /* MR */
106 COSTS_N_INSNS (4), /* MS */
107 COSTS_N_INSNS (15), /* MSG */
108 COSTS_N_INSNS (7), /* MSGF */
109 COSTS_N_INSNS (7), /* MSGFR */
110 COSTS_N_INSNS (10), /* MSGR */
111 COSTS_N_INSNS (4), /* MSR */
112 COSTS_N_INSNS (7), /* multiplication in DFmode */
113 COSTS_N_INSNS (13), /* MXBR */
114 COSTS_N_INSNS (136), /* SQXBR */
115 COSTS_N_INSNS (44), /* SQDBR */
116 COSTS_N_INSNS (35), /* SQEBR */
117 COSTS_N_INSNS (18), /* MADBR */
118 COSTS_N_INSNS (13), /* MAEBR */
119 COSTS_N_INSNS (134), /* DXBR */
120 COSTS_N_INSNS (30), /* DDBR */
121 COSTS_N_INSNS (27), /* DEBR */
122 COSTS_N_INSNS (220), /* DLGR */
123 COSTS_N_INSNS (34), /* DLR */
124 COSTS_N_INSNS (34), /* DR */
125 COSTS_N_INSNS (32), /* DSGFR */
126 COSTS_N_INSNS (32), /* DSGR */
130 struct processor_costs z990_cost =
132 COSTS_N_INSNS (4), /* M */
133 COSTS_N_INSNS (2), /* MGHI */
134 COSTS_N_INSNS (2), /* MH */
135 COSTS_N_INSNS (2), /* MHI */
136 COSTS_N_INSNS (4), /* ML */
137 COSTS_N_INSNS (4), /* MR */
138 COSTS_N_INSNS (5), /* MS */
139 COSTS_N_INSNS (6), /* MSG */
140 COSTS_N_INSNS (4), /* MSGF */
141 COSTS_N_INSNS (4), /* MSGFR */
142 COSTS_N_INSNS (4), /* MSGR */
143 COSTS_N_INSNS (4), /* MSR */
144 COSTS_N_INSNS (1), /* multiplication in DFmode */
145 COSTS_N_INSNS (28), /* MXBR */
146 COSTS_N_INSNS (130), /* SQXBR */
147 COSTS_N_INSNS (66), /* SQDBR */
148 COSTS_N_INSNS (38), /* SQEBR */
149 COSTS_N_INSNS (1), /* MADBR */
150 COSTS_N_INSNS (1), /* MAEBR */
151 COSTS_N_INSNS (60), /* DXBR */
152 COSTS_N_INSNS (40), /* DDBR */
153 COSTS_N_INSNS (26), /* DEBR */
154 COSTS_N_INSNS (176), /* DLGR */
155 COSTS_N_INSNS (31), /* DLR */
156 COSTS_N_INSNS (31), /* DR */
157 COSTS_N_INSNS (31), /* DSGFR */
158 COSTS_N_INSNS (31), /* DSGR */
162 struct processor_costs z9_109_cost =
164 COSTS_N_INSNS (4), /* M */
165 COSTS_N_INSNS (2), /* MGHI */
166 COSTS_N_INSNS (2), /* MH */
167 COSTS_N_INSNS (2), /* MHI */
168 COSTS_N_INSNS (4), /* ML */
169 COSTS_N_INSNS (4), /* MR */
170 COSTS_N_INSNS (5), /* MS */
171 COSTS_N_INSNS (6), /* MSG */
172 COSTS_N_INSNS (4), /* MSGF */
173 COSTS_N_INSNS (4), /* MSGFR */
174 COSTS_N_INSNS (4), /* MSGR */
175 COSTS_N_INSNS (4), /* MSR */
176 COSTS_N_INSNS (1), /* multiplication in DFmode */
177 COSTS_N_INSNS (28), /* MXBR */
178 COSTS_N_INSNS (130), /* SQXBR */
179 COSTS_N_INSNS (66), /* SQDBR */
180 COSTS_N_INSNS (38), /* SQEBR */
181 COSTS_N_INSNS (1), /* MADBR */
182 COSTS_N_INSNS (1), /* MAEBR */
183 COSTS_N_INSNS (60), /* DXBR */
184 COSTS_N_INSNS (40), /* DDBR */
185 COSTS_N_INSNS (26), /* DEBR */
186 COSTS_N_INSNS (30), /* DLGR */
187 COSTS_N_INSNS (23), /* DLR */
188 COSTS_N_INSNS (23), /* DR */
189 COSTS_N_INSNS (24), /* DSGFR */
190 COSTS_N_INSNS (24), /* DSGR */
194 struct processor_costs z10_cost =
196 COSTS_N_INSNS (10), /* M */
197 COSTS_N_INSNS (10), /* MGHI */
198 COSTS_N_INSNS (10), /* MH */
199 COSTS_N_INSNS (10), /* MHI */
200 COSTS_N_INSNS (10), /* ML */
201 COSTS_N_INSNS (10), /* MR */
202 COSTS_N_INSNS (10), /* MS */
203 COSTS_N_INSNS (10), /* MSG */
204 COSTS_N_INSNS (10), /* MSGF */
205 COSTS_N_INSNS (10), /* MSGFR */
206 COSTS_N_INSNS (10), /* MSGR */
207 COSTS_N_INSNS (10), /* MSR */
208 COSTS_N_INSNS (1) , /* multiplication in DFmode */
209 COSTS_N_INSNS (50), /* MXBR */
210 COSTS_N_INSNS (120), /* SQXBR */
211 COSTS_N_INSNS (52), /* SQDBR */
212 COSTS_N_INSNS (38), /* SQEBR */
213 COSTS_N_INSNS (1), /* MADBR */
214 COSTS_N_INSNS (1), /* MAEBR */
215 COSTS_N_INSNS (111), /* DXBR */
216 COSTS_N_INSNS (39), /* DDBR */
217 COSTS_N_INSNS (32), /* DEBR */
218 COSTS_N_INSNS (160), /* DLGR */
219 COSTS_N_INSNS (71), /* DLR */
220 COSTS_N_INSNS (71), /* DR */
221 COSTS_N_INSNS (71), /* DSGFR */
222 COSTS_N_INSNS (71), /* DSGR */
225 extern int reload_completed;
227 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */
228 static rtx last_scheduled_insn;
230 /* Structure used to hold the components of a S/390 memory
231 address. A legitimate address on S/390 is of the general
233 base + index + displacement
234 where any of the components is optional.
236 base and index are registers of the class ADDR_REGS,
237 displacement is an unsigned 12-bit immediate constant. */
248 /* Which cpu are we tuning for. */
249 enum processor_type s390_tune = PROCESSOR_max;
251 /* Which instruction set architecture to use. */
252 enum processor_type s390_arch;
255 HOST_WIDE_INT s390_warn_framesize = 0;
256 HOST_WIDE_INT s390_stack_size = 0;
257 HOST_WIDE_INT s390_stack_guard = 0;
259 /* The following structure is embedded in the machine
260 specific part of struct function. */
262 struct GTY (()) s390_frame_layout
264 /* Offset within stack frame. */
265 HOST_WIDE_INT gprs_offset;
266 HOST_WIDE_INT f0_offset;
267 HOST_WIDE_INT f4_offset;
268 HOST_WIDE_INT f8_offset;
269 HOST_WIDE_INT backchain_offset;
271 /* Number of first and last gpr where slots in the register
272 save area are reserved for. */
273 int first_save_gpr_slot;
274 int last_save_gpr_slot;
276 /* Number of first and last gpr to be saved, restored. */
278 int first_restore_gpr;
280 int last_restore_gpr;
282 /* Bits standing for floating point registers. Set, if the
283 respective register has to be saved. Starting with reg 16 (f0)
284 at the rightmost bit.
285 Bit 15 - 8 7 6 5 4 3 2 1 0
286 fpr 15 - 8 7 5 3 1 6 4 2 0
287 reg 31 - 24 23 22 21 20 19 18 17 16 */
288 unsigned int fpr_bitmap;
290 /* Number of floating point registers f8-f15 which must be saved. */
293 /* Set if return address needs to be saved.
294 This flag is set by s390_return_addr_rtx if it could not use
295 the initial value of r14 and therefore depends on r14 saved
297 bool save_return_addr_p;
299 /* Size of stack frame. */
300 HOST_WIDE_INT frame_size;
303 /* Define the structure for the machine field in struct function. */
305 struct GTY(()) machine_function
307 struct s390_frame_layout frame_layout;
309 /* Literal pool base register. */
312 /* True if we may need to perform branch splitting. */
313 bool split_branches_pending_p;
315 /* Some local-dynamic TLS symbol name. */
316 const char *some_ld_name;
318 bool has_landing_pad_p;
321 /* Few accessor macros for struct cfun->machine->s390_frame_layout. */
323 #define cfun_frame_layout (cfun->machine->frame_layout)
324 #define cfun_save_high_fprs_p (!!cfun_frame_layout.high_fprs)
325 #define cfun_gprs_save_area_size ((cfun_frame_layout.last_save_gpr_slot - \
326 cfun_frame_layout.first_save_gpr_slot + 1) * UNITS_PER_WORD)
327 #define cfun_set_fpr_bit(BITNUM) (cfun->machine->frame_layout.fpr_bitmap |= \
329 #define cfun_fpr_bit_p(BITNUM) (!!(cfun->machine->frame_layout.fpr_bitmap & \
332 /* Number of GPRs and FPRs used for argument passing. */
333 #define GP_ARG_NUM_REG 5
334 #define FP_ARG_NUM_REG (TARGET_64BIT? 4 : 2)
336 /* A couple of shortcuts. */
337 #define CONST_OK_FOR_J(x) \
338 CONST_OK_FOR_CONSTRAINT_P((x), 'J', "J")
339 #define CONST_OK_FOR_K(x) \
340 CONST_OK_FOR_CONSTRAINT_P((x), 'K', "K")
341 #define CONST_OK_FOR_Os(x) \
342 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Os")
343 #define CONST_OK_FOR_Op(x) \
344 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "Op")
345 #define CONST_OK_FOR_On(x) \
346 CONST_OK_FOR_CONSTRAINT_P((x), 'O', "On")
348 #define REGNO_PAIR_OK(REGNO, MODE) \
349 (HARD_REGNO_NREGS ((REGNO), (MODE)) == 1 || !((REGNO) & 1))
351 /* That's the read ahead of the dynamic branch prediction unit in
352 bytes on a z10 CPU. */
353 #define Z10_PREDICT_DISTANCE 384
355 static enum machine_mode
356 s390_libgcc_cmp_return_mode (void)
358 return TARGET_64BIT ? DImode : SImode;
361 static enum machine_mode
362 s390_libgcc_shift_count_mode (void)
364 return TARGET_64BIT ? DImode : SImode;
367 /* Return true if the back end supports mode MODE. */
369 s390_scalar_mode_supported_p (enum machine_mode mode)
371 if (DECIMAL_FLOAT_MODE_P (mode))
372 return default_decimal_float_supported_p ();
374 return default_scalar_mode_supported_p (mode);
377 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */
380 s390_set_has_landing_pad_p (bool value)
382 cfun->machine->has_landing_pad_p = value;
385 /* If two condition code modes are compatible, return a condition code
386 mode which is compatible with both. Otherwise, return
389 static enum machine_mode
390 s390_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
398 if (m2 == CCUmode || m2 == CCTmode || m2 == CCZ1mode
399 || m2 == CCSmode || m2 == CCSRmode || m2 == CCURmode)
420 /* Return true if SET either doesn't set the CC register, or else
421 the source and destination have matching CC modes and that
422 CC mode is at least as constrained as REQ_MODE. */
425 s390_match_ccmode_set (rtx set, enum machine_mode req_mode)
427 enum machine_mode set_mode;
429 gcc_assert (GET_CODE (set) == SET);
431 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
434 set_mode = GET_MODE (SET_DEST (set));
448 if (req_mode != set_mode)
453 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
454 && req_mode != CCSRmode && req_mode != CCURmode)
460 if (req_mode != CCAmode)
468 return (GET_MODE (SET_SRC (set)) == set_mode);
471 /* Return true if every SET in INSN that sets the CC register
472 has source and destination with matching CC modes and that
473 CC mode is at least as constrained as REQ_MODE.
474 If REQ_MODE is VOIDmode, always return false. */
477 s390_match_ccmode (rtx insn, enum machine_mode req_mode)
481 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
482 if (req_mode == VOIDmode)
485 if (GET_CODE (PATTERN (insn)) == SET)
486 return s390_match_ccmode_set (PATTERN (insn), req_mode);
488 if (GET_CODE (PATTERN (insn)) == PARALLEL)
489 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
491 rtx set = XVECEXP (PATTERN (insn), 0, i);
492 if (GET_CODE (set) == SET)
493 if (!s390_match_ccmode_set (set, req_mode))
500 /* If a test-under-mask instruction can be used to implement
501 (compare (and ... OP1) OP2), return the CC mode required
502 to do that. Otherwise, return VOIDmode.
503 MIXED is true if the instruction can distinguish between
504 CC1 and CC2 for mixed selected bits (TMxx), it is false
505 if the instruction cannot (TM). */
508 s390_tm_ccmode (rtx op1, rtx op2, bool mixed)
512 /* ??? Fixme: should work on CONST_DOUBLE as well. */
513 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
516 /* Selected bits all zero: CC0.
517 e.g.: int a; if ((a & (16 + 128)) == 0) */
518 if (INTVAL (op2) == 0)
521 /* Selected bits all one: CC3.
522 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */
523 if (INTVAL (op2) == INTVAL (op1))
526 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.:
528 if ((a & (16 + 128)) == 16) -> CCT1
529 if ((a & (16 + 128)) == 128) -> CCT2 */
532 bit1 = exact_log2 (INTVAL (op2));
533 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
534 if (bit0 != -1 && bit1 != -1)
535 return bit0 > bit1 ? CCT1mode : CCT2mode;
541 /* Given a comparison code OP (EQ, NE, etc.) and the operands
542 OP0 and OP1 of a COMPARE, return the mode to be used for the
546 s390_select_ccmode (enum rtx_code code, rtx op0, rtx op1)
552 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
553 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
555 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
556 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
558 if ((GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
559 || GET_CODE (op1) == NEG)
560 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
563 if (GET_CODE (op0) == AND)
565 /* Check whether we can potentially do it via TM. */
566 enum machine_mode ccmode;
567 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
568 if (ccmode != VOIDmode)
570 /* Relax CCTmode to CCZmode to allow fall-back to AND
571 if that turns out to be beneficial. */
572 return ccmode == CCTmode ? CCZmode : ccmode;
576 if (register_operand (op0, HImode)
577 && GET_CODE (op1) == CONST_INT
578 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
580 if (register_operand (op0, QImode)
581 && GET_CODE (op1) == CONST_INT
582 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
591 /* The only overflow condition of NEG and ABS happens when
592 -INT_MAX is used as parameter, which stays negative. So
593 we have an overflow from a positive value to a negative.
594 Using CCAP mode the resulting cc can be used for comparisons. */
595 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS)
596 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
599 /* If constants are involved in an add instruction it is possible to use
600 the resulting cc for comparisons with zero. Knowing the sign of the
601 constant the overflow behavior gets predictable. e.g.:
602 int a, b; if ((b = a + c) > 0)
603 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */
604 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
605 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1))))
607 if (INTVAL (XEXP((op0), 1)) < 0)
621 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
622 && GET_CODE (op1) != CONST_INT)
628 if (GET_CODE (op0) == PLUS
629 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
632 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
633 && GET_CODE (op1) != CONST_INT)
639 if (GET_CODE (op0) == MINUS
640 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
643 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
644 && GET_CODE (op1) != CONST_INT)
653 /* Replace the comparison OP0 CODE OP1 by a semantically equivalent one
654 that we can implement more efficiently. */
657 s390_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1)
659 /* Convert ZERO_EXTRACT back to AND to enable TM patterns. */
660 if ((*code == EQ || *code == NE)
661 && *op1 == const0_rtx
662 && GET_CODE (*op0) == ZERO_EXTRACT
663 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
664 && GET_CODE (XEXP (*op0, 2)) == CONST_INT
665 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
667 rtx inner = XEXP (*op0, 0);
668 HOST_WIDE_INT modesize = GET_MODE_BITSIZE (GET_MODE (inner));
669 HOST_WIDE_INT len = INTVAL (XEXP (*op0, 1));
670 HOST_WIDE_INT pos = INTVAL (XEXP (*op0, 2));
672 if (len > 0 && len < modesize
673 && pos >= 0 && pos + len <= modesize
674 && modesize <= HOST_BITS_PER_WIDE_INT)
676 unsigned HOST_WIDE_INT block;
677 block = ((unsigned HOST_WIDE_INT) 1 << len) - 1;
678 block <<= modesize - pos - len;
680 *op0 = gen_rtx_AND (GET_MODE (inner), inner,
681 gen_int_mode (block, GET_MODE (inner)));
685 /* Narrow AND of memory against immediate to enable TM. */
686 if ((*code == EQ || *code == NE)
687 && *op1 == const0_rtx
688 && GET_CODE (*op0) == AND
689 && GET_CODE (XEXP (*op0, 1)) == CONST_INT
690 && SCALAR_INT_MODE_P (GET_MODE (XEXP (*op0, 0))))
692 rtx inner = XEXP (*op0, 0);
693 rtx mask = XEXP (*op0, 1);
695 /* Ignore paradoxical SUBREGs if all extra bits are masked out. */
696 if (GET_CODE (inner) == SUBREG
697 && SCALAR_INT_MODE_P (GET_MODE (SUBREG_REG (inner)))
698 && (GET_MODE_SIZE (GET_MODE (inner))
699 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner))))
701 & GET_MODE_MASK (GET_MODE (inner))
702 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (inner))))
704 inner = SUBREG_REG (inner);
706 /* Do not change volatile MEMs. */
707 if (MEM_P (inner) && !MEM_VOLATILE_P (inner))
709 int part = s390_single_part (XEXP (*op0, 1),
710 GET_MODE (inner), QImode, 0);
713 mask = gen_int_mode (s390_extract_part (mask, QImode, 0), QImode);
714 inner = adjust_address_nv (inner, QImode, part);
715 *op0 = gen_rtx_AND (QImode, inner, mask);
720 /* Narrow comparisons against 0xffff to HImode if possible. */
721 if ((*code == EQ || *code == NE)
722 && GET_CODE (*op1) == CONST_INT
723 && INTVAL (*op1) == 0xffff
724 && SCALAR_INT_MODE_P (GET_MODE (*op0))
725 && (nonzero_bits (*op0, GET_MODE (*op0))
726 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0)
728 *op0 = gen_lowpart (HImode, *op0);
732 /* Remove redundant UNSPEC_CCU_TO_INT conversions if possible. */
733 if (GET_CODE (*op0) == UNSPEC
734 && XINT (*op0, 1) == UNSPEC_CCU_TO_INT
735 && XVECLEN (*op0, 0) == 1
736 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCUmode
737 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
738 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
739 && *op1 == const0_rtx)
741 enum rtx_code new_code = UNKNOWN;
744 case EQ: new_code = EQ; break;
745 case NE: new_code = NE; break;
746 case LT: new_code = GTU; break;
747 case GT: new_code = LTU; break;
748 case LE: new_code = GEU; break;
749 case GE: new_code = LEU; break;
753 if (new_code != UNKNOWN)
755 *op0 = XVECEXP (*op0, 0, 0);
760 /* Remove redundant UNSPEC_CCZ_TO_INT conversions if possible. */
761 if (GET_CODE (*op0) == UNSPEC
762 && XINT (*op0, 1) == UNSPEC_CCZ_TO_INT
763 && XVECLEN (*op0, 0) == 1
764 && GET_MODE (XVECEXP (*op0, 0, 0)) == CCZmode
765 && GET_CODE (XVECEXP (*op0, 0, 0)) == REG
766 && REGNO (XVECEXP (*op0, 0, 0)) == CC_REGNUM
767 && *op1 == const0_rtx)
769 enum rtx_code new_code = UNKNOWN;
772 case EQ: new_code = EQ; break;
773 case NE: new_code = NE; break;
777 if (new_code != UNKNOWN)
779 *op0 = XVECEXP (*op0, 0, 0);
784 /* Simplify cascaded EQ, NE with const0_rtx. */
785 if ((*code == NE || *code == EQ)
786 && (GET_CODE (*op0) == EQ || GET_CODE (*op0) == NE)
787 && GET_MODE (*op0) == SImode
788 && GET_MODE (XEXP (*op0, 0)) == CCZ1mode
789 && REG_P (XEXP (*op0, 0))
790 && XEXP (*op0, 1) == const0_rtx
791 && *op1 == const0_rtx)
793 if ((*code == EQ && GET_CODE (*op0) == NE)
794 || (*code == NE && GET_CODE (*op0) == EQ))
798 *op0 = XEXP (*op0, 0);
801 /* Prefer register over memory as first operand. */
802 if (MEM_P (*op0) && REG_P (*op1))
804 rtx tem = *op0; *op0 = *op1; *op1 = tem;
805 *code = swap_condition (*code);
809 /* Emit a compare instruction suitable to implement the comparison
810 OP0 CODE OP1. Return the correct condition RTL to be placed in
811 the IF_THEN_ELSE of the conditional branch testing the result. */
814 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1)
816 enum machine_mode mode = s390_select_ccmode (code, op0, op1);
819 /* Do not output a redundant compare instruction if a compare_and_swap
820 pattern already computed the result and the machine modes are compatible. */
821 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
823 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0), mode)
829 cc = gen_rtx_REG (mode, CC_REGNUM);
830 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1)));
833 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx);
836 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD
838 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the
839 conditional branch testing the result. */
842 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new_rtx)
844 emit_insn (gen_sync_compare_and_swapsi (old, mem, cmp, new_rtx));
845 return s390_emit_compare (code, gen_rtx_REG (CCZ1mode, CC_REGNUM), const0_rtx);
848 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an
849 unconditional jump, else a conditional jump under condition COND. */
852 s390_emit_jump (rtx target, rtx cond)
856 target = gen_rtx_LABEL_REF (VOIDmode, target);
858 target = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, target, pc_rtx);
860 insn = gen_rtx_SET (VOIDmode, pc_rtx, target);
861 emit_jump_insn (insn);
864 /* Return branch condition mask to implement a branch
865 specified by CODE. Return -1 for invalid comparisons. */
868 s390_branch_condition_mask (rtx code)
870 const int CC0 = 1 << 3;
871 const int CC1 = 1 << 2;
872 const int CC2 = 1 << 1;
873 const int CC3 = 1 << 0;
875 gcc_assert (GET_CODE (XEXP (code, 0)) == REG);
876 gcc_assert (REGNO (XEXP (code, 0)) == CC_REGNUM);
877 gcc_assert (XEXP (code, 1) == const0_rtx);
879 switch (GET_MODE (XEXP (code, 0)))
883 switch (GET_CODE (code))
886 case NE: return CC1 | CC2 | CC3;
892 switch (GET_CODE (code))
895 case NE: return CC0 | CC2 | CC3;
901 switch (GET_CODE (code))
904 case NE: return CC0 | CC1 | CC3;
910 switch (GET_CODE (code))
913 case NE: return CC0 | CC1 | CC2;
919 switch (GET_CODE (code))
921 case EQ: return CC0 | CC2;
922 case NE: return CC1 | CC3;
928 switch (GET_CODE (code))
930 case LTU: return CC2 | CC3; /* carry */
931 case GEU: return CC0 | CC1; /* no carry */
937 switch (GET_CODE (code))
939 case GTU: return CC0 | CC1; /* borrow */
940 case LEU: return CC2 | CC3; /* no borrow */
946 switch (GET_CODE (code))
948 case EQ: return CC0 | CC2;
949 case NE: return CC1 | CC3;
950 case LTU: return CC1;
951 case GTU: return CC3;
952 case LEU: return CC1 | CC2;
953 case GEU: return CC2 | CC3;
958 switch (GET_CODE (code))
961 case NE: return CC1 | CC2 | CC3;
962 case LTU: return CC1;
963 case GTU: return CC2;
964 case LEU: return CC0 | CC1;
965 case GEU: return CC0 | CC2;
971 switch (GET_CODE (code))
974 case NE: return CC2 | CC1 | CC3;
975 case LTU: return CC2;
976 case GTU: return CC1;
977 case LEU: return CC0 | CC2;
978 case GEU: return CC0 | CC1;
984 switch (GET_CODE (code))
987 case NE: return CC1 | CC2 | CC3;
988 case LT: return CC1 | CC3;
990 case LE: return CC0 | CC1 | CC3;
991 case GE: return CC0 | CC2;
997 switch (GET_CODE (code))
1000 case NE: return CC1 | CC2 | CC3;
1001 case LT: return CC1;
1002 case GT: return CC2 | CC3;
1003 case LE: return CC0 | CC1;
1004 case GE: return CC0 | CC2 | CC3;
1010 switch (GET_CODE (code))
1012 case EQ: return CC0;
1013 case NE: return CC1 | CC2 | CC3;
1014 case LT: return CC1;
1015 case GT: return CC2;
1016 case LE: return CC0 | CC1;
1017 case GE: return CC0 | CC2;
1018 case UNORDERED: return CC3;
1019 case ORDERED: return CC0 | CC1 | CC2;
1020 case UNEQ: return CC0 | CC3;
1021 case UNLT: return CC1 | CC3;
1022 case UNGT: return CC2 | CC3;
1023 case UNLE: return CC0 | CC1 | CC3;
1024 case UNGE: return CC0 | CC2 | CC3;
1025 case LTGT: return CC1 | CC2;
1031 switch (GET_CODE (code))
1033 case EQ: return CC0;
1034 case NE: return CC2 | CC1 | CC3;
1035 case LT: return CC2;
1036 case GT: return CC1;
1037 case LE: return CC0 | CC2;
1038 case GE: return CC0 | CC1;
1039 case UNORDERED: return CC3;
1040 case ORDERED: return CC0 | CC2 | CC1;
1041 case UNEQ: return CC0 | CC3;
1042 case UNLT: return CC2 | CC3;
1043 case UNGT: return CC1 | CC3;
1044 case UNLE: return CC0 | CC2 | CC3;
1045 case UNGE: return CC0 | CC1 | CC3;
1046 case LTGT: return CC2 | CC1;
1057 /* Return branch condition mask to implement a compare and branch
1058 specified by CODE. Return -1 for invalid comparisons. */
1061 s390_compare_and_branch_condition_mask (rtx code)
1063 const int CC0 = 1 << 3;
1064 const int CC1 = 1 << 2;
1065 const int CC2 = 1 << 1;
1067 switch (GET_CODE (code))
1091 /* If INV is false, return assembler mnemonic string to implement
1092 a branch specified by CODE. If INV is true, return mnemonic
1093 for the corresponding inverted branch. */
1096 s390_branch_condition_mnemonic (rtx code, int inv)
1100 static const char *const mnemonic[16] =
1102 NULL, "o", "h", "nle",
1103 "l", "nhe", "lh", "ne",
1104 "e", "nlh", "he", "nl",
1105 "le", "nh", "no", NULL
1108 if (GET_CODE (XEXP (code, 0)) == REG
1109 && REGNO (XEXP (code, 0)) == CC_REGNUM
1110 && XEXP (code, 1) == const0_rtx)
1111 mask = s390_branch_condition_mask (code);
1113 mask = s390_compare_and_branch_condition_mask (code);
1115 gcc_assert (mask >= 0);
1120 gcc_assert (mask >= 1 && mask <= 14);
1122 return mnemonic[mask];
1125 /* Return the part of op which has a value different from def.
1126 The size of the part is determined by mode.
1127 Use this function only if you already know that op really
1128 contains such a part. */
1130 unsigned HOST_WIDE_INT
1131 s390_extract_part (rtx op, enum machine_mode mode, int def)
1133 unsigned HOST_WIDE_INT value = 0;
1134 int max_parts = HOST_BITS_PER_WIDE_INT / GET_MODE_BITSIZE (mode);
1135 int part_bits = GET_MODE_BITSIZE (mode);
1136 unsigned HOST_WIDE_INT part_mask
1137 = ((unsigned HOST_WIDE_INT)1 << part_bits) - 1;
1140 for (i = 0; i < max_parts; i++)
1143 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1145 value >>= part_bits;
1147 if ((value & part_mask) != (def & part_mask))
1148 return value & part_mask;
1154 /* If OP is an integer constant of mode MODE with exactly one
1155 part of mode PART_MODE unequal to DEF, return the number of that
1156 part. Otherwise, return -1. */
1159 s390_single_part (rtx op,
1160 enum machine_mode mode,
1161 enum machine_mode part_mode,
1164 unsigned HOST_WIDE_INT value = 0;
1165 int n_parts = GET_MODE_SIZE (mode) / GET_MODE_SIZE (part_mode);
1166 unsigned HOST_WIDE_INT part_mask
1167 = ((unsigned HOST_WIDE_INT)1 << GET_MODE_BITSIZE (part_mode)) - 1;
1170 if (GET_CODE (op) != CONST_INT)
1173 for (i = 0; i < n_parts; i++)
1176 value = (unsigned HOST_WIDE_INT) INTVAL (op);
1178 value >>= GET_MODE_BITSIZE (part_mode);
1180 if ((value & part_mask) != (def & part_mask))
1188 return part == -1 ? -1 : n_parts - 1 - part;
1191 /* Return true if IN contains a contiguous bitfield in the lower SIZE
1192 bits and no other bits are set in IN. POS and LENGTH can be used
1193 to obtain the start position and the length of the bitfield.
1195 POS gives the position of the first bit of the bitfield counting
1196 from the lowest order bit starting with zero. In order to use this
1197 value for S/390 instructions this has to be converted to "bits big
1201 s390_contiguous_bitmask_p (unsigned HOST_WIDE_INT in, int size,
1202 int *pos, int *length)
1207 unsigned HOST_WIDE_INT mask = 1ULL;
1208 bool contiguous = false;
1210 for (i = 0; i < size; mask <<= 1, i++)
1234 /* Calculate a mask for all bits beyond the contiguous bits. */
1235 mask = (-1LL & ~(((1ULL << (tmp_length + tmp_pos - 1)) << 1) - 1));
1240 if (tmp_length + tmp_pos - 1 > size)
1244 *length = tmp_length;
1252 /* Check whether we can (and want to) split a double-word
1253 move in mode MODE from SRC to DST into two single-word
1254 moves, moving the subword FIRST_SUBWORD first. */
1257 s390_split_ok_p (rtx dst, rtx src, enum machine_mode mode, int first_subword)
1259 /* Floating point registers cannot be split. */
1260 if (FP_REG_P (src) || FP_REG_P (dst))
1263 /* We don't need to split if operands are directly accessible. */
1264 if (s_operand (src, mode) || s_operand (dst, mode))
1267 /* Non-offsettable memory references cannot be split. */
1268 if ((GET_CODE (src) == MEM && !offsettable_memref_p (src))
1269 || (GET_CODE (dst) == MEM && !offsettable_memref_p (dst)))
1272 /* Moving the first subword must not clobber a register
1273 needed to move the second subword. */
1274 if (register_operand (dst, mode))
1276 rtx subreg = operand_subword (dst, first_subword, 0, mode);
1277 if (reg_overlap_mentioned_p (subreg, src))
1284 /* Return true if it can be proven that [MEM1, MEM1 + SIZE]
1285 and [MEM2, MEM2 + SIZE] do overlap and false
1289 s390_overlap_p (rtx mem1, rtx mem2, HOST_WIDE_INT size)
1291 rtx addr1, addr2, addr_delta;
1292 HOST_WIDE_INT delta;
1294 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1300 addr1 = XEXP (mem1, 0);
1301 addr2 = XEXP (mem2, 0);
1303 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1305 /* This overlapping check is used by peepholes merging memory block operations.
1306 Overlapping operations would otherwise be recognized by the S/390 hardware
1307 and would fall back to a slower implementation. Allowing overlapping
1308 operations would lead to slow code but not to wrong code. Therefore we are
1309 somewhat optimistic if we cannot prove that the memory blocks are
1311 That's why we return false here although this may accept operations on
1312 overlapping memory areas. */
1313 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT)
1316 delta = INTVAL (addr_delta);
1319 || (delta > 0 && delta < size)
1320 || (delta < 0 && -delta < size))
1326 /* Check whether the address of memory reference MEM2 equals exactly
1327 the address of memory reference MEM1 plus DELTA. Return true if
1328 we can prove this to be the case, false otherwise. */
1331 s390_offset_p (rtx mem1, rtx mem2, rtx delta)
1333 rtx addr1, addr2, addr_delta;
1335 if (GET_CODE (mem1) != MEM || GET_CODE (mem2) != MEM)
1338 addr1 = XEXP (mem1, 0);
1339 addr2 = XEXP (mem2, 0);
1341 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1);
1342 if (!addr_delta || !rtx_equal_p (addr_delta, delta))
1348 /* Expand logical operator CODE in mode MODE with operands OPERANDS. */
1351 s390_expand_logical_operator (enum rtx_code code, enum machine_mode mode,
1354 enum machine_mode wmode = mode;
1355 rtx dst = operands[0];
1356 rtx src1 = operands[1];
1357 rtx src2 = operands[2];
1360 /* If we cannot handle the operation directly, use a temp register. */
1361 if (!s390_logical_operator_ok_p (operands))
1362 dst = gen_reg_rtx (mode);
1364 /* QImode and HImode patterns make sense only if we have a destination
1365 in memory. Otherwise perform the operation in SImode. */
1366 if ((mode == QImode || mode == HImode) && GET_CODE (dst) != MEM)
1369 /* Widen operands if required. */
1372 if (GET_CODE (dst) == SUBREG
1373 && (tem = simplify_subreg (wmode, dst, mode, 0)) != 0)
1375 else if (REG_P (dst))
1376 dst = gen_rtx_SUBREG (wmode, dst, 0);
1378 dst = gen_reg_rtx (wmode);
1380 if (GET_CODE (src1) == SUBREG
1381 && (tem = simplify_subreg (wmode, src1, mode, 0)) != 0)
1383 else if (GET_MODE (src1) != VOIDmode)
1384 src1 = gen_rtx_SUBREG (wmode, force_reg (mode, src1), 0);
1386 if (GET_CODE (src2) == SUBREG
1387 && (tem = simplify_subreg (wmode, src2, mode, 0)) != 0)
1389 else if (GET_MODE (src2) != VOIDmode)
1390 src2 = gen_rtx_SUBREG (wmode, force_reg (mode, src2), 0);
1393 /* Emit the instruction. */
1394 op = gen_rtx_SET (VOIDmode, dst, gen_rtx_fmt_ee (code, wmode, src1, src2));
1395 clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
1396 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clob)));
1398 /* Fix up the destination if needed. */
1399 if (dst != operands[0])
1400 emit_move_insn (operands[0], gen_lowpart (mode, dst));
1403 /* Check whether OPERANDS are OK for a logical operation (AND, IOR, XOR). */
1406 s390_logical_operator_ok_p (rtx *operands)
1408 /* If the destination operand is in memory, it needs to coincide
1409 with one of the source operands. After reload, it has to be
1410 the first source operand. */
1411 if (GET_CODE (operands[0]) == MEM)
1412 return rtx_equal_p (operands[0], operands[1])
1413 || (!reload_completed && rtx_equal_p (operands[0], operands[2]));
1418 /* Narrow logical operation CODE of memory operand MEMOP with immediate
1419 operand IMMOP to switch from SS to SI type instructions. */
1422 s390_narrow_logical_operator (enum rtx_code code, rtx *memop, rtx *immop)
1424 int def = code == AND ? -1 : 0;
1428 gcc_assert (GET_CODE (*memop) == MEM);
1429 gcc_assert (!MEM_VOLATILE_P (*memop));
1431 mask = s390_extract_part (*immop, QImode, def);
1432 part = s390_single_part (*immop, GET_MODE (*memop), QImode, def);
1433 gcc_assert (part >= 0);
1435 *memop = adjust_address (*memop, QImode, part);
1436 *immop = gen_int_mode (mask, QImode);
1440 /* How to allocate a 'struct machine_function'. */
1442 static struct machine_function *
1443 s390_init_machine_status (void)
1445 return GGC_CNEW (struct machine_function);
1448 /* Change optimizations to be performed, depending on the
1451 LEVEL is the optimization level specified; 2 if `-O2' is
1452 specified, 1 if `-O' is specified, and 0 if neither is specified.
1454 SIZE is nonzero if `-Os' is specified and zero otherwise. */
1457 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1459 /* ??? There are apparently still problems with -fcaller-saves. */
1460 flag_caller_saves = 0;
1462 /* By default, always emit DWARF-2 unwind info. This allows debugging
1463 without maintaining a stack frame back-chain. */
1464 flag_asynchronous_unwind_tables = 1;
1466 /* Use MVCLE instructions to decrease code size if requested. */
1468 target_flags |= MASK_MVCLE;
1471 /* Return true if ARG is the name of a processor. Set *TYPE and *FLAGS
1472 to the associated processor_type and processor_flags if so. */
1475 s390_handle_arch_option (const char *arg,
1476 enum processor_type *type,
1481 const char *const name; /* processor name or nickname. */
1482 const enum processor_type processor;
1483 const int flags; /* From enum processor_flags. */
1485 const processor_alias_table[] =
1487 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT},
1488 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT},
1489 {"z900", PROCESSOR_2064_Z900, PF_IEEE_FLOAT | PF_ZARCH},
1490 {"z990", PROCESSOR_2084_Z990, PF_IEEE_FLOAT | PF_ZARCH
1491 | PF_LONG_DISPLACEMENT},
1492 {"z9-109", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1493 | PF_LONG_DISPLACEMENT | PF_EXTIMM},
1494 {"z9-ec", PROCESSOR_2094_Z9_109, PF_IEEE_FLOAT | PF_ZARCH
1495 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP },
1496 {"z10", PROCESSOR_2097_Z10, PF_IEEE_FLOAT | PF_ZARCH
1497 | PF_LONG_DISPLACEMENT | PF_EXTIMM | PF_DFP | PF_Z10},
1501 for (i = 0; i < ARRAY_SIZE (processor_alias_table); i++)
1502 if (strcmp (arg, processor_alias_table[i].name) == 0)
1504 *type = processor_alias_table[i].processor;
1505 *flags = processor_alias_table[i].flags;
1511 /* Implement TARGET_HANDLE_OPTION. */
1514 s390_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
1519 return s390_handle_arch_option (arg, &s390_arch, &s390_arch_flags);
1521 case OPT_mstack_guard_:
1522 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_guard) != 1)
1524 if (exact_log2 (s390_stack_guard) == -1)
1525 error ("stack guard value must be an exact power of 2");
1528 case OPT_mstack_size_:
1529 if (sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_stack_size) != 1)
1531 if (exact_log2 (s390_stack_size) == -1)
1532 error ("stack size must be an exact power of 2");
1536 return s390_handle_arch_option (arg, &s390_tune, &s390_tune_flags);
1538 case OPT_mwarn_framesize_:
1539 return sscanf (arg, HOST_WIDE_INT_PRINT_DEC, &s390_warn_framesize) == 1;
1547 override_options (void)
1549 /* Set up function hooks. */
1550 init_machine_status = s390_init_machine_status;
1552 /* Architecture mode defaults according to ABI. */
1553 if (!(target_flags_explicit & MASK_ZARCH))
1556 target_flags |= MASK_ZARCH;
1558 target_flags &= ~MASK_ZARCH;
1561 /* Determine processor architectural level. */
1562 if (!s390_arch_string)
1564 s390_arch_string = TARGET_ZARCH? "z900" : "g5";
1565 s390_handle_arch_option (s390_arch_string, &s390_arch, &s390_arch_flags);
1568 /* Determine processor to tune for. */
1569 if (s390_tune == PROCESSOR_max)
1571 s390_tune = s390_arch;
1572 s390_tune_flags = s390_arch_flags;
1575 /* Sanity checks. */
1576 if (TARGET_ZARCH && !TARGET_CPU_ZARCH)
1577 error ("z/Architecture mode not supported on %s", s390_arch_string);
1578 if (TARGET_64BIT && !TARGET_ZARCH)
1579 error ("64-bit ABI not supported in ESA/390 mode");
1581 if (TARGET_HARD_DFP && !TARGET_DFP)
1583 if (target_flags_explicit & MASK_HARD_DFP)
1585 if (!TARGET_CPU_DFP)
1586 error ("Hardware decimal floating point instructions"
1587 " not available on %s", s390_arch_string);
1589 error ("Hardware decimal floating point instructions"
1590 " not available in ESA/390 mode");
1593 target_flags &= ~MASK_HARD_DFP;
1596 if ((target_flags_explicit & MASK_SOFT_FLOAT) && TARGET_SOFT_FLOAT)
1598 if ((target_flags_explicit & MASK_HARD_DFP) && TARGET_HARD_DFP)
1599 error ("-mhard-dfp can't be used in conjunction with -msoft-float");
1601 target_flags &= ~MASK_HARD_DFP;
1604 /* Set processor cost function. */
1607 case PROCESSOR_2084_Z990:
1608 s390_cost = &z990_cost;
1610 case PROCESSOR_2094_Z9_109:
1611 s390_cost = &z9_109_cost;
1613 case PROCESSOR_2097_Z10:
1614 s390_cost = &z10_cost;
1617 s390_cost = &z900_cost;
1620 if (TARGET_BACKCHAIN && TARGET_PACKED_STACK && TARGET_HARD_FLOAT)
1621 error ("-mbackchain -mpacked-stack -mhard-float are not supported "
1624 if (s390_stack_size)
1626 if (s390_stack_guard >= s390_stack_size)
1627 error ("stack size must be greater than the stack guard value");
1628 else if (s390_stack_size > 1 << 16)
1629 error ("stack size must not be greater than 64k");
1631 else if (s390_stack_guard)
1632 error ("-mstack-guard implies use of -mstack-size");
1634 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
1635 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
1636 target_flags |= MASK_LONG_DOUBLE_128;
1639 if (s390_tune == PROCESSOR_2097_Z10
1640 && !PARAM_SET_P (PARAM_MAX_UNROLLED_INSNS))
1641 set_param_value ("max-unrolled-insns", 100);
1643 set_param_value ("max-pending-list-length", 256);
1647 /* Map for smallest class containing reg regno. */
1649 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
1650 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1651 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1652 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1653 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
1654 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1655 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1656 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1657 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
1658 ADDR_REGS, CC_REGS, ADDR_REGS, ADDR_REGS,
1659 ACCESS_REGS, ACCESS_REGS
1662 /* Return attribute type of insn. */
1664 static enum attr_type
1665 s390_safe_attr_type (rtx insn)
1667 if (recog_memoized (insn) >= 0)
1668 return get_attr_type (insn);
1673 /* Return true if DISP is a valid short displacement. */
1676 s390_short_displacement (rtx disp)
1678 /* No displacement is OK. */
1682 /* Without the long displacement facility we don't need to
1683 distingiush between long and short displacement. */
1684 if (!TARGET_LONG_DISPLACEMENT)
1687 /* Integer displacement in range. */
1688 if (GET_CODE (disp) == CONST_INT)
1689 return INTVAL (disp) >= 0 && INTVAL (disp) < 4096;
1691 /* GOT offset is not OK, the GOT can be large. */
1692 if (GET_CODE (disp) == CONST
1693 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1694 && (XINT (XEXP (disp, 0), 1) == UNSPEC_GOT
1695 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1698 /* All other symbolic constants are literal pool references,
1699 which are OK as the literal pool must be small. */
1700 if (GET_CODE (disp) == CONST)
1706 /* Decompose a RTL expression ADDR for a memory address into
1707 its components, returned in OUT.
1709 Returns false if ADDR is not a valid memory address, true
1710 otherwise. If OUT is NULL, don't return the components,
1711 but check for validity only.
1713 Note: Only addresses in canonical form are recognized.
1714 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1715 canonical form so that they will be recognized. */
1718 s390_decompose_address (rtx addr, struct s390_address *out)
1720 HOST_WIDE_INT offset = 0;
1721 rtx base = NULL_RTX;
1722 rtx indx = NULL_RTX;
1723 rtx disp = NULL_RTX;
1725 bool pointer = false;
1726 bool base_ptr = false;
1727 bool indx_ptr = false;
1728 bool literal_pool = false;
1730 /* We may need to substitute the literal pool base register into the address
1731 below. However, at this point we do not know which register is going to
1732 be used as base, so we substitute the arg pointer register. This is going
1733 to be treated as holding a pointer below -- it shouldn't be used for any
1735 rtx fake_pool_base = gen_rtx_REG (Pmode, ARG_POINTER_REGNUM);
1737 /* Decompose address into base + index + displacement. */
1739 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1742 else if (GET_CODE (addr) == PLUS)
1744 rtx op0 = XEXP (addr, 0);
1745 rtx op1 = XEXP (addr, 1);
1746 enum rtx_code code0 = GET_CODE (op0);
1747 enum rtx_code code1 = GET_CODE (op1);
1749 if (code0 == REG || code0 == UNSPEC)
1751 if (code1 == REG || code1 == UNSPEC)
1753 indx = op0; /* index + base */
1759 base = op0; /* base + displacement */
1764 else if (code0 == PLUS)
1766 indx = XEXP (op0, 0); /* index + base + disp */
1767 base = XEXP (op0, 1);
1778 disp = addr; /* displacement */
1780 /* Extract integer part of displacement. */
1784 if (GET_CODE (disp) == CONST_INT)
1786 offset = INTVAL (disp);
1789 else if (GET_CODE (disp) == CONST
1790 && GET_CODE (XEXP (disp, 0)) == PLUS
1791 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1793 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1794 disp = XEXP (XEXP (disp, 0), 0);
1798 /* Strip off CONST here to avoid special case tests later. */
1799 if (disp && GET_CODE (disp) == CONST)
1800 disp = XEXP (disp, 0);
1802 /* We can convert literal pool addresses to
1803 displacements by basing them off the base register. */
1804 if (disp && GET_CODE (disp) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (disp))
1806 /* Either base or index must be free to hold the base register. */
1808 base = fake_pool_base, literal_pool = true;
1810 indx = fake_pool_base, literal_pool = true;
1814 /* Mark up the displacement. */
1815 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp),
1816 UNSPEC_LTREL_OFFSET);
1819 /* Validate base register. */
1822 if (GET_CODE (base) == UNSPEC)
1823 switch (XINT (base, 1))
1827 disp = gen_rtx_UNSPEC (Pmode,
1828 gen_rtvec (1, XVECEXP (base, 0, 0)),
1829 UNSPEC_LTREL_OFFSET);
1833 base = XVECEXP (base, 0, 1);
1836 case UNSPEC_LTREL_BASE:
1837 if (XVECLEN (base, 0) == 1)
1838 base = fake_pool_base, literal_pool = true;
1840 base = XVECEXP (base, 0, 1);
1848 || (GET_MODE (base) != SImode
1849 && GET_MODE (base) != Pmode))
1852 if (REGNO (base) == STACK_POINTER_REGNUM
1853 || REGNO (base) == FRAME_POINTER_REGNUM
1854 || ((reload_completed || reload_in_progress)
1855 && frame_pointer_needed
1856 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1857 || REGNO (base) == ARG_POINTER_REGNUM
1859 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1860 pointer = base_ptr = true;
1862 if ((reload_completed || reload_in_progress)
1863 && base == cfun->machine->base_reg)
1864 pointer = base_ptr = literal_pool = true;
1867 /* Validate index register. */
1870 if (GET_CODE (indx) == UNSPEC)
1871 switch (XINT (indx, 1))
1875 disp = gen_rtx_UNSPEC (Pmode,
1876 gen_rtvec (1, XVECEXP (indx, 0, 0)),
1877 UNSPEC_LTREL_OFFSET);
1881 indx = XVECEXP (indx, 0, 1);
1884 case UNSPEC_LTREL_BASE:
1885 if (XVECLEN (indx, 0) == 1)
1886 indx = fake_pool_base, literal_pool = true;
1888 indx = XVECEXP (indx, 0, 1);
1896 || (GET_MODE (indx) != SImode
1897 && GET_MODE (indx) != Pmode))
1900 if (REGNO (indx) == STACK_POINTER_REGNUM
1901 || REGNO (indx) == FRAME_POINTER_REGNUM
1902 || ((reload_completed || reload_in_progress)
1903 && frame_pointer_needed
1904 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1905 || REGNO (indx) == ARG_POINTER_REGNUM
1907 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1908 pointer = indx_ptr = true;
1910 if ((reload_completed || reload_in_progress)
1911 && indx == cfun->machine->base_reg)
1912 pointer = indx_ptr = literal_pool = true;
1915 /* Prefer to use pointer as base, not index. */
1916 if (base && indx && !base_ptr
1917 && (indx_ptr || (!REG_POINTER (base) && REG_POINTER (indx))))
1924 /* Validate displacement. */
1927 /* If virtual registers are involved, the displacement will change later
1928 anyway as the virtual registers get eliminated. This could make a
1929 valid displacement invalid, but it is more likely to make an invalid
1930 displacement valid, because we sometimes access the register save area
1931 via negative offsets to one of those registers.
1932 Thus we don't check the displacement for validity here. If after
1933 elimination the displacement turns out to be invalid after all,
1934 this is fixed up by reload in any case. */
1935 if (base != arg_pointer_rtx
1936 && indx != arg_pointer_rtx
1937 && base != return_address_pointer_rtx
1938 && indx != return_address_pointer_rtx
1939 && base != frame_pointer_rtx
1940 && indx != frame_pointer_rtx
1941 && base != virtual_stack_vars_rtx
1942 && indx != virtual_stack_vars_rtx)
1943 if (!DISP_IN_RANGE (offset))
1948 /* All the special cases are pointers. */
1951 /* In the small-PIC case, the linker converts @GOT
1952 and @GOTNTPOFF offsets to possible displacements. */
1953 if (GET_CODE (disp) == UNSPEC
1954 && (XINT (disp, 1) == UNSPEC_GOT
1955 || XINT (disp, 1) == UNSPEC_GOTNTPOFF)
1961 /* Accept pool label offsets. */
1962 else if (GET_CODE (disp) == UNSPEC
1963 && XINT (disp, 1) == UNSPEC_POOL_OFFSET)
1966 /* Accept literal pool references. */
1967 else if (GET_CODE (disp) == UNSPEC
1968 && XINT (disp, 1) == UNSPEC_LTREL_OFFSET)
1970 orig_disp = gen_rtx_CONST (Pmode, disp);
1973 /* If we have an offset, make sure it does not
1974 exceed the size of the constant pool entry. */
1975 rtx sym = XVECEXP (disp, 0, 0);
1976 if (offset >= GET_MODE_SIZE (get_pool_mode (sym)))
1979 orig_disp = plus_constant (orig_disp, offset);
1994 out->disp = orig_disp;
1995 out->pointer = pointer;
1996 out->literal_pool = literal_pool;
2002 /* Decompose a RTL expression OP for a shift count into its components,
2003 and return the base register in BASE and the offset in OFFSET.
2005 Return true if OP is a valid shift count, false if not. */
2008 s390_decompose_shift_count (rtx op, rtx *base, HOST_WIDE_INT *offset)
2010 HOST_WIDE_INT off = 0;
2012 /* We can have an integer constant, an address register,
2013 or a sum of the two. */
2014 if (GET_CODE (op) == CONST_INT)
2019 if (op && GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
2021 off = INTVAL (XEXP (op, 1));
2024 while (op && GET_CODE (op) == SUBREG)
2025 op = SUBREG_REG (op);
2027 if (op && GET_CODE (op) != REG)
2039 /* Return true if CODE is a valid address without index. */
2042 s390_legitimate_address_without_index_p (rtx op)
2044 struct s390_address addr;
2046 if (!s390_decompose_address (XEXP (op, 0), &addr))
2055 /* Return true if ADDR is of kind symbol_ref or symbol_ref + const_int
2056 and return these parts in SYMREF and ADDEND. You can pass NULL in
2057 SYMREF and/or ADDEND if you are not interested in these values. */
2060 s390_symref_operand_p (rtx addr, rtx *symref, HOST_WIDE_INT *addend)
2062 HOST_WIDE_INT tmpaddend = 0;
2064 if (GET_CODE (addr) == CONST)
2065 addr = XEXP (addr, 0);
2067 if (GET_CODE (addr) == PLUS)
2069 if (GET_CODE (XEXP (addr, 0)) == SYMBOL_REF
2070 && CONST_INT_P (XEXP (addr, 1)))
2072 tmpaddend = INTVAL (XEXP (addr, 1));
2073 addr = XEXP (addr, 0);
2079 if (GET_CODE (addr) != SYMBOL_REF)
2085 *addend = tmpaddend;
2091 /* Return true if the address in OP is valid for constraint letter C
2092 if wrapped in a MEM rtx. Set LIT_POOL_OK to true if it literal
2093 pool MEMs should be accepted. Only the Q, R, S, T constraint
2094 letters are allowed for C. */
2097 s390_check_qrst_address (char c, rtx op, bool lit_pool_ok)
2099 struct s390_address addr;
2100 bool decomposed = false;
2102 /* This check makes sure that no symbolic address (except literal
2103 pool references) are accepted by the R or T constraints. */
2104 if (s390_symref_operand_p (op, NULL, NULL))
2108 if (!s390_decompose_address (op, &addr))
2110 if (!addr.literal_pool)
2117 case 'Q': /* no index short displacement */
2118 if (!decomposed && !s390_decompose_address (op, &addr))
2122 if (!s390_short_displacement (addr.disp))
2126 case 'R': /* with index short displacement */
2127 if (TARGET_LONG_DISPLACEMENT)
2129 if (!decomposed && !s390_decompose_address (op, &addr))
2131 if (!s390_short_displacement (addr.disp))
2134 /* Any invalid address here will be fixed up by reload,
2135 so accept it for the most generic constraint. */
2138 case 'S': /* no index long displacement */
2139 if (!TARGET_LONG_DISPLACEMENT)
2141 if (!decomposed && !s390_decompose_address (op, &addr))
2145 if (s390_short_displacement (addr.disp))
2149 case 'T': /* with index long displacement */
2150 if (!TARGET_LONG_DISPLACEMENT)
2152 /* Any invalid address here will be fixed up by reload,
2153 so accept it for the most generic constraint. */
2154 if ((decomposed || s390_decompose_address (op, &addr))
2155 && s390_short_displacement (addr.disp))
2165 /* Evaluates constraint strings described by the regular expression
2166 ([A|B|Z](Q|R|S|T))|U|W|Y and returns 1 if OP is a valid operand for
2167 the constraint given in STR, or 0 else. */
2170 s390_mem_constraint (const char *str, rtx op)
2177 /* Check for offsettable variants of memory constraints. */
2178 if (!MEM_P (op) || MEM_VOLATILE_P (op))
2180 if ((reload_completed || reload_in_progress)
2181 ? !offsettable_memref_p (op) : !offsettable_nonstrict_memref_p (op))
2183 return s390_check_qrst_address (str[1], XEXP (op, 0), true);
2185 /* Check for non-literal-pool variants of memory constraints. */
2188 return s390_check_qrst_address (str[1], XEXP (op, 0), false);
2193 if (GET_CODE (op) != MEM)
2195 return s390_check_qrst_address (c, XEXP (op, 0), true);
2197 return (s390_check_qrst_address ('Q', op, true)
2198 || s390_check_qrst_address ('R', op, true));
2200 return (s390_check_qrst_address ('S', op, true)
2201 || s390_check_qrst_address ('T', op, true));
2203 /* Simply check for the basic form of a shift count. Reload will
2204 take care of making sure we have a proper base register. */
2205 if (!s390_decompose_shift_count (op, NULL, NULL))
2209 return s390_check_qrst_address (str[1], op, true);
2217 /* Evaluates constraint strings starting with letter O. Input
2218 parameter C is the second letter following the "O" in the constraint
2219 string. Returns 1 if VALUE meets the respective constraint and 0
2223 s390_O_constraint_str (const char c, HOST_WIDE_INT value)
2231 return trunc_int_for_mode (value, SImode) == value;
2235 || s390_single_part (GEN_INT (value), DImode, SImode, 0) == 1;
2238 return s390_single_part (GEN_INT (value - 1), DImode, SImode, -1) == 1;
2246 /* Evaluates constraint strings starting with letter N. Parameter STR
2247 contains the letters following letter "N" in the constraint string.
2248 Returns true if VALUE matches the constraint. */
2251 s390_N_constraint_str (const char *str, HOST_WIDE_INT value)
2253 enum machine_mode mode, part_mode;
2255 int part, part_goal;
2261 part_goal = str[0] - '0';
2305 if (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (part_mode))
2308 part = s390_single_part (GEN_INT (value), mode, part_mode, def);
2311 if (part_goal != -1 && part_goal != part)
2318 /* Returns true if the input parameter VALUE is a float zero. */
2321 s390_float_const_zero_p (rtx value)
2323 return (GET_MODE_CLASS (GET_MODE (value)) == MODE_FLOAT
2324 && value == CONST0_RTX (GET_MODE (value)));
2328 /* Compute a (partial) cost for rtx X. Return true if the complete
2329 cost has been computed, and false if subexpressions should be
2330 scanned. In either case, *TOTAL contains the cost result.
2331 CODE contains GET_CODE (x), OUTER_CODE contains the code
2332 of the superexpression of x. */
2335 s390_rtx_costs (rtx x, int code, int outer_code, int *total,
2336 bool speed ATTRIBUTE_UNUSED)
2359 *total = COSTS_N_INSNS (1);
2364 /* Check for multiply and add. */
2365 if ((GET_MODE (x) == DFmode || GET_MODE (x) == SFmode)
2366 && GET_CODE (XEXP (x, 0)) == MULT
2367 && TARGET_HARD_FLOAT && TARGET_FUSED_MADD)
2369 /* This is the multiply and add case. */
2370 if (GET_MODE (x) == DFmode)
2371 *total = s390_cost->madbr;
2373 *total = s390_cost->maebr;
2374 *total += (rtx_cost (XEXP (XEXP (x, 0), 0), MULT, speed)
2375 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT, speed)
2376 + rtx_cost (XEXP (x, 1), (enum rtx_code) code, speed));
2377 return true; /* Do not do an additional recursive descent. */
2379 *total = COSTS_N_INSNS (1);
2383 switch (GET_MODE (x))
2387 rtx left = XEXP (x, 0);
2388 rtx right = XEXP (x, 1);
2389 if (GET_CODE (right) == CONST_INT
2390 && CONST_OK_FOR_K (INTVAL (right)))
2391 *total = s390_cost->mhi;
2392 else if (GET_CODE (left) == SIGN_EXTEND)
2393 *total = s390_cost->mh;
2395 *total = s390_cost->ms; /* msr, ms, msy */
2400 rtx left = XEXP (x, 0);
2401 rtx right = XEXP (x, 1);
2404 if (GET_CODE (right) == CONST_INT
2405 && CONST_OK_FOR_K (INTVAL (right)))
2406 *total = s390_cost->mghi;
2407 else if (GET_CODE (left) == SIGN_EXTEND)
2408 *total = s390_cost->msgf;
2410 *total = s390_cost->msg; /* msgr, msg */
2412 else /* TARGET_31BIT */
2414 if (GET_CODE (left) == SIGN_EXTEND
2415 && GET_CODE (right) == SIGN_EXTEND)
2416 /* mulsidi case: mr, m */
2417 *total = s390_cost->m;
2418 else if (GET_CODE (left) == ZERO_EXTEND
2419 && GET_CODE (right) == ZERO_EXTEND
2420 && TARGET_CPU_ZARCH)
2421 /* umulsidi case: ml, mlr */
2422 *total = s390_cost->ml;
2424 /* Complex calculation is required. */
2425 *total = COSTS_N_INSNS (40);
2431 *total = s390_cost->mult_df;
2434 *total = s390_cost->mxbr;
2443 if (GET_MODE (x) == TImode) /* 128 bit division */
2444 *total = s390_cost->dlgr;
2445 else if (GET_MODE (x) == DImode)
2447 rtx right = XEXP (x, 1);
2448 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2449 *total = s390_cost->dlr;
2450 else /* 64 by 64 bit division */
2451 *total = s390_cost->dlgr;
2453 else if (GET_MODE (x) == SImode) /* 32 bit division */
2454 *total = s390_cost->dlr;
2459 if (GET_MODE (x) == DImode)
2461 rtx right = XEXP (x, 1);
2462 if (GET_CODE (right) == ZERO_EXTEND) /* 64 by 32 bit division */
2464 *total = s390_cost->dsgfr;
2466 *total = s390_cost->dr;
2467 else /* 64 by 64 bit division */
2468 *total = s390_cost->dsgr;
2470 else if (GET_MODE (x) == SImode) /* 32 bit division */
2471 *total = s390_cost->dlr;
2472 else if (GET_MODE (x) == SFmode)
2474 *total = s390_cost->debr;
2476 else if (GET_MODE (x) == DFmode)
2478 *total = s390_cost->ddbr;
2480 else if (GET_MODE (x) == TFmode)
2482 *total = s390_cost->dxbr;
2487 if (GET_MODE (x) == SFmode)
2488 *total = s390_cost->sqebr;
2489 else if (GET_MODE (x) == DFmode)
2490 *total = s390_cost->sqdbr;
2492 *total = s390_cost->sqxbr;
2497 if (outer_code == MULT || outer_code == DIV || outer_code == MOD
2498 || outer_code == PLUS || outer_code == MINUS
2499 || outer_code == COMPARE)
2504 *total = COSTS_N_INSNS (1);
2505 if (GET_CODE (XEXP (x, 0)) == AND
2506 && GET_CODE (XEXP (x, 1)) == CONST_INT
2507 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2509 rtx op0 = XEXP (XEXP (x, 0), 0);
2510 rtx op1 = XEXP (XEXP (x, 0), 1);
2511 rtx op2 = XEXP (x, 1);
2513 if (memory_operand (op0, GET_MODE (op0))
2514 && s390_tm_ccmode (op1, op2, 0) != VOIDmode)
2516 if (register_operand (op0, GET_MODE (op0))
2517 && s390_tm_ccmode (op1, op2, 1) != VOIDmode)
2527 /* Return the cost of an address rtx ADDR. */
2530 s390_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
2532 struct s390_address ad;
2533 if (!s390_decompose_address (addr, &ad))
2536 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
2539 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
2540 otherwise return 0. */
2543 tls_symbolic_operand (rtx op)
2545 if (GET_CODE (op) != SYMBOL_REF)
2547 return SYMBOL_REF_TLS_MODEL (op);
2550 /* Split DImode access register reference REG (on 64-bit) into its constituent
2551 low and high parts, and store them into LO and HI. Note that gen_lowpart/
2552 gen_highpart cannot be used as they assume all registers are word-sized,
2553 while our access registers have only half that size. */
2556 s390_split_access_reg (rtx reg, rtx *lo, rtx *hi)
2558 gcc_assert (TARGET_64BIT);
2559 gcc_assert (ACCESS_REG_P (reg));
2560 gcc_assert (GET_MODE (reg) == DImode);
2561 gcc_assert (!(REGNO (reg) & 1));
2563 *lo = gen_rtx_REG (SImode, REGNO (reg) + 1);
2564 *hi = gen_rtx_REG (SImode, REGNO (reg));
2567 /* Return true if OP contains a symbol reference */
2570 symbolic_reference_mentioned_p (rtx op)
2575 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
2578 fmt = GET_RTX_FORMAT (GET_CODE (op));
2579 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2585 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2586 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2590 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
2597 /* Return true if OP contains a reference to a thread-local symbol. */
2600 tls_symbolic_reference_mentioned_p (rtx op)
2605 if (GET_CODE (op) == SYMBOL_REF)
2606 return tls_symbolic_operand (op);
2608 fmt = GET_RTX_FORMAT (GET_CODE (op));
2609 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
2615 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
2616 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
2620 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
2628 /* Return true if OP is a legitimate general operand when
2629 generating PIC code. It is given that flag_pic is on
2630 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2633 legitimate_pic_operand_p (rtx op)
2635 /* Accept all non-symbolic constants. */
2636 if (!SYMBOLIC_CONST (op))
2639 /* Reject everything else; must be handled
2640 via emit_symbolic_move. */
2644 /* Returns true if the constant value OP is a legitimate general operand.
2645 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
2648 legitimate_constant_p (rtx op)
2650 /* Accept all non-symbolic constants. */
2651 if (!SYMBOLIC_CONST (op))
2654 /* Accept immediate LARL operands. */
2655 if (TARGET_CPU_ZARCH && larl_operand (op, VOIDmode))
2658 /* Thread-local symbols are never legal constants. This is
2659 so that emit_call knows that computing such addresses
2660 might require a function call. */
2661 if (TLS_SYMBOLIC_CONST (op))
2664 /* In the PIC case, symbolic constants must *not* be
2665 forced into the literal pool. We accept them here,
2666 so that they will be handled by emit_symbolic_move. */
2670 /* All remaining non-PIC symbolic constants are
2671 forced into the literal pool. */
2675 /* Determine if it's legal to put X into the constant pool. This
2676 is not possible if X contains the address of a symbol that is
2677 not constant (TLS) or not known at final link time (PIC). */
2680 s390_cannot_force_const_mem (rtx x)
2682 switch (GET_CODE (x))
2686 /* Accept all non-symbolic constants. */
2690 /* Labels are OK iff we are non-PIC. */
2691 return flag_pic != 0;
2694 /* 'Naked' TLS symbol references are never OK,
2695 non-TLS symbols are OK iff we are non-PIC. */
2696 if (tls_symbolic_operand (x))
2699 return flag_pic != 0;
2702 return s390_cannot_force_const_mem (XEXP (x, 0));
2705 return s390_cannot_force_const_mem (XEXP (x, 0))
2706 || s390_cannot_force_const_mem (XEXP (x, 1));
2709 switch (XINT (x, 1))
2711 /* Only lt-relative or GOT-relative UNSPECs are OK. */
2712 case UNSPEC_LTREL_OFFSET:
2720 case UNSPEC_GOTNTPOFF:
2721 case UNSPEC_INDNTPOFF:
2724 /* If the literal pool shares the code section, be put
2725 execute template placeholders into the pool as well. */
2727 return TARGET_CPU_ZARCH;
2739 /* Returns true if the constant value OP is a legitimate general
2740 operand during and after reload. The difference to
2741 legitimate_constant_p is that this function will not accept
2742 a constant that would need to be forced to the literal pool
2743 before it can be used as operand. */
2746 legitimate_reload_constant_p (rtx op)
2748 /* Accept la(y) operands. */
2749 if (GET_CODE (op) == CONST_INT
2750 && DISP_IN_RANGE (INTVAL (op)))
2753 /* Accept l(g)hi/l(g)fi operands. */
2754 if (GET_CODE (op) == CONST_INT
2755 && (CONST_OK_FOR_K (INTVAL (op)) || CONST_OK_FOR_Os (INTVAL (op))))
2758 /* Accept lliXX operands. */
2760 && GET_CODE (op) == CONST_INT
2761 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2762 && s390_single_part (op, word_mode, HImode, 0) >= 0)
2766 && GET_CODE (op) == CONST_INT
2767 && trunc_int_for_mode (INTVAL (op), word_mode) == INTVAL (op)
2768 && s390_single_part (op, word_mode, SImode, 0) >= 0)
2771 /* Accept larl operands. */
2772 if (TARGET_CPU_ZARCH
2773 && larl_operand (op, VOIDmode))
2776 /* Accept lzXX operands. */
2777 if (GET_CODE (op) == CONST_DOUBLE
2778 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', "G"))
2781 /* Accept double-word operands that can be split. */
2782 if (GET_CODE (op) == CONST_INT
2783 && trunc_int_for_mode (INTVAL (op), word_mode) != INTVAL (op))
2785 enum machine_mode dword_mode = word_mode == SImode ? DImode : TImode;
2786 rtx hi = operand_subword (op, 0, 0, dword_mode);
2787 rtx lo = operand_subword (op, 1, 0, dword_mode);
2788 return legitimate_reload_constant_p (hi)
2789 && legitimate_reload_constant_p (lo);
2792 /* Everything else cannot be handled without reload. */
2796 /* Given an rtx OP being reloaded into a reg required to be in class RCLASS,
2797 return the class of reg to actually use. */
2800 s390_preferred_reload_class (rtx op, enum reg_class rclass)
2802 switch (GET_CODE (op))
2804 /* Constants we cannot reload must be forced into the
2809 if (legitimate_reload_constant_p (op))
2814 /* If a symbolic constant or a PLUS is reloaded,
2815 it is most likely being used as an address, so
2816 prefer ADDR_REGS. If 'class' is not a superset
2817 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
2822 if (reg_class_subset_p (ADDR_REGS, rclass))
2834 /* Return true if ADDR is SYMBOL_REF + addend with addend being a
2835 multiple of ALIGNMENT and the SYMBOL_REF being naturally
2839 s390_check_symref_alignment (rtx addr, HOST_WIDE_INT alignment)
2841 HOST_WIDE_INT addend;
2844 if (!s390_symref_operand_p (addr, &symref, &addend))
2847 return (!SYMBOL_REF_NOT_NATURALLY_ALIGNED_P (symref)
2848 && !(addend & (alignment - 1)));
2851 /* ADDR is moved into REG using larl. If ADDR isn't a valid larl
2852 operand SCRATCH is used to reload the even part of the address and
2856 s390_reload_larl_operand (rtx reg, rtx addr, rtx scratch)
2858 HOST_WIDE_INT addend;
2861 if (!s390_symref_operand_p (addr, &symref, &addend))
2865 /* Easy case. The addend is even so larl will do fine. */
2866 emit_move_insn (reg, addr);
2869 /* We can leave the scratch register untouched if the target
2870 register is a valid base register. */
2871 if (REGNO (reg) < FIRST_PSEUDO_REGISTER
2872 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS)
2875 gcc_assert (REGNO (scratch) < FIRST_PSEUDO_REGISTER);
2876 gcc_assert (REGNO_REG_CLASS (REGNO (scratch)) == ADDR_REGS);
2879 emit_move_insn (scratch,
2880 gen_rtx_CONST (Pmode,
2881 gen_rtx_PLUS (Pmode, symref,
2882 GEN_INT (addend - 1))));
2884 emit_move_insn (scratch, symref);
2886 /* Increment the address using la in order to avoid clobbering cc. */
2887 emit_move_insn (reg, gen_rtx_PLUS (Pmode, scratch, const1_rtx));
2891 /* Generate what is necessary to move between REG and MEM using
2892 SCRATCH. The direction is given by TOMEM. */
2895 s390_reload_symref_address (rtx reg, rtx mem, rtx scratch, bool tomem)
2897 /* Reload might have pulled a constant out of the literal pool.
2898 Force it back in. */
2899 if (CONST_INT_P (mem) || GET_CODE (mem) == CONST_DOUBLE
2900 || GET_CODE (mem) == CONST)
2901 mem = force_const_mem (GET_MODE (reg), mem);
2903 gcc_assert (MEM_P (mem));
2905 /* For a load from memory we can leave the scratch register
2906 untouched if the target register is a valid base register. */
2908 && REGNO (reg) < FIRST_PSEUDO_REGISTER
2909 && REGNO_REG_CLASS (REGNO (reg)) == ADDR_REGS
2910 && GET_MODE (reg) == GET_MODE (scratch))
2913 /* Load address into scratch register. Since we can't have a
2914 secondary reload for a secondary reload we have to cover the case
2915 where larl would need a secondary reload here as well. */
2916 s390_reload_larl_operand (scratch, XEXP (mem, 0), scratch);
2918 /* Now we can use a standard load/store to do the move. */
2920 emit_move_insn (replace_equiv_address (mem, scratch), reg);
2922 emit_move_insn (reg, replace_equiv_address (mem, scratch));
2925 /* Inform reload about cases where moving X with a mode MODE to a register in
2926 RCLASS requires an extra scratch or immediate register. Return the class
2927 needed for the immediate register. */
2929 static enum reg_class
2930 s390_secondary_reload (bool in_p, rtx x, enum reg_class rclass,
2931 enum machine_mode mode, secondary_reload_info *sri)
2933 /* Intermediate register needed. */
2934 if (reg_classes_intersect_p (CC_REGS, rclass))
2935 return GENERAL_REGS;
2939 /* On z10 several optimizer steps may generate larl operands with
2942 && s390_symref_operand_p (x, NULL, NULL)
2944 && !s390_check_symref_alignment (x, 2))
2945 sri->icode = ((mode == DImode) ? CODE_FOR_reloaddi_larl_odd_addend_z10
2946 : CODE_FOR_reloadsi_larl_odd_addend_z10);
2948 /* On z10 we need a scratch register when moving QI, TI or floating
2949 point mode values from or to a memory location with a SYMBOL_REF
2950 or if the symref addend of a SI or DI move is not aligned to the
2951 width of the access. */
2953 && s390_symref_operand_p (XEXP (x, 0), NULL, NULL)
2954 && (mode == QImode || mode == TImode || FLOAT_MODE_P (mode)
2955 || (!TARGET_64BIT && mode == DImode)
2956 || ((mode == HImode || mode == SImode || mode == DImode)
2957 && (!s390_check_symref_alignment (XEXP (x, 0),
2958 GET_MODE_SIZE (mode))))))
2960 #define __SECONDARY_RELOAD_CASE(M,m) \
2963 sri->icode = in_p ? CODE_FOR_reload##m##di_toreg_z10 : \
2964 CODE_FOR_reload##m##di_tomem_z10; \
2966 sri->icode = in_p ? CODE_FOR_reload##m##si_toreg_z10 : \
2967 CODE_FOR_reload##m##si_tomem_z10; \
2970 switch (GET_MODE (x))
2972 __SECONDARY_RELOAD_CASE (QI, qi);
2973 __SECONDARY_RELOAD_CASE (HI, hi);
2974 __SECONDARY_RELOAD_CASE (SI, si);
2975 __SECONDARY_RELOAD_CASE (DI, di);
2976 __SECONDARY_RELOAD_CASE (TI, ti);
2977 __SECONDARY_RELOAD_CASE (SF, sf);
2978 __SECONDARY_RELOAD_CASE (DF, df);
2979 __SECONDARY_RELOAD_CASE (TF, tf);
2980 __SECONDARY_RELOAD_CASE (SD, sd);
2981 __SECONDARY_RELOAD_CASE (DD, dd);
2982 __SECONDARY_RELOAD_CASE (TD, td);
2987 #undef __SECONDARY_RELOAD_CASE
2991 /* We need a scratch register when loading a PLUS expression which
2992 is not a legitimate operand of the LOAD ADDRESS instruction. */
2993 if (in_p && s390_plus_operand (x, mode))
2994 sri->icode = (TARGET_64BIT ?
2995 CODE_FOR_reloaddi_plus : CODE_FOR_reloadsi_plus);
2997 /* Performing a multiword move from or to memory we have to make sure the
2998 second chunk in memory is addressable without causing a displacement
2999 overflow. If that would be the case we calculate the address in
3000 a scratch register. */
3002 && GET_CODE (XEXP (x, 0)) == PLUS
3003 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3004 && !DISP_IN_RANGE (INTVAL (XEXP (XEXP (x, 0), 1))
3005 + GET_MODE_SIZE (mode) - 1))
3007 /* For GENERAL_REGS a displacement overflow is no problem if occurring
3008 in a s_operand address since we may fallback to lm/stm. So we only
3009 have to care about overflows in the b+i+d case. */
3010 if ((reg_classes_intersect_p (GENERAL_REGS, rclass)
3011 && s390_class_max_nregs (GENERAL_REGS, mode) > 1
3012 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS)
3013 /* For FP_REGS no lm/stm is available so this check is triggered
3014 for displacement overflows in b+i+d and b+d like addresses. */
3015 || (reg_classes_intersect_p (FP_REGS, rclass)
3016 && s390_class_max_nregs (FP_REGS, mode) > 1))
3019 sri->icode = (TARGET_64BIT ?
3020 CODE_FOR_reloaddi_nonoffmem_in :
3021 CODE_FOR_reloadsi_nonoffmem_in);
3023 sri->icode = (TARGET_64BIT ?
3024 CODE_FOR_reloaddi_nonoffmem_out :
3025 CODE_FOR_reloadsi_nonoffmem_out);
3029 /* A scratch address register is needed when a symbolic constant is
3030 copied to r0 compiling with -fPIC. In other cases the target
3031 register might be used as temporary (see legitimize_pic_address). */
3032 if (in_p && SYMBOLIC_CONST (x) && flag_pic == 2 && rclass != ADDR_REGS)
3033 sri->icode = (TARGET_64BIT ?
3034 CODE_FOR_reloaddi_PIC_addr :
3035 CODE_FOR_reloadsi_PIC_addr);
3037 /* Either scratch or no register needed. */
3041 /* Generate code to load SRC, which is PLUS that is not a
3042 legitimate operand for the LA instruction, into TARGET.
3043 SCRATCH may be used as scratch register. */
3046 s390_expand_plus_operand (rtx target, rtx src,
3050 struct s390_address ad;
3052 /* src must be a PLUS; get its two operands. */
3053 gcc_assert (GET_CODE (src) == PLUS);
3054 gcc_assert (GET_MODE (src) == Pmode);
3056 /* Check if any of the two operands is already scheduled
3057 for replacement by reload. This can happen e.g. when
3058 float registers occur in an address. */
3059 sum1 = find_replacement (&XEXP (src, 0));
3060 sum2 = find_replacement (&XEXP (src, 1));
3061 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3063 /* If the address is already strictly valid, there's nothing to do. */
3064 if (!s390_decompose_address (src, &ad)
3065 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3066 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
3068 /* Otherwise, one of the operands cannot be an address register;
3069 we reload its value into the scratch register. */
3070 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
3072 emit_move_insn (scratch, sum1);
3075 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
3077 emit_move_insn (scratch, sum2);
3081 /* According to the way these invalid addresses are generated
3082 in reload.c, it should never happen (at least on s390) that
3083 *neither* of the PLUS components, after find_replacements
3084 was applied, is an address register. */
3085 if (sum1 == scratch && sum2 == scratch)
3091 src = gen_rtx_PLUS (Pmode, sum1, sum2);
3094 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
3095 is only ever performed on addresses, so we can mark the
3096 sum as legitimate for LA in any case. */
3097 s390_load_address (target, src);
3101 /* Return true if ADDR is a valid memory address.
3102 STRICT specifies whether strict register checking applies. */
3105 s390_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict)
3107 struct s390_address ad;
3110 && larl_operand (addr, VOIDmode)
3111 && (mode == VOIDmode
3112 || s390_check_symref_alignment (addr, GET_MODE_SIZE (mode))))
3115 if (!s390_decompose_address (addr, &ad))
3120 if (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
3123 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx)))
3129 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER
3130 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS))
3134 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER
3135 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS))
3141 /* Return true if OP is a valid operand for the LA instruction.
3142 In 31-bit, we need to prove that the result is used as an
3143 address, as LA performs only a 31-bit addition. */
3146 legitimate_la_operand_p (rtx op)
3148 struct s390_address addr;
3149 if (!s390_decompose_address (op, &addr))
3152 return (TARGET_64BIT || addr.pointer);
3155 /* Return true if it is valid *and* preferable to use LA to
3156 compute the sum of OP1 and OP2. */
3159 preferred_la_operand_p (rtx op1, rtx op2)
3161 struct s390_address addr;
3163 if (op2 != const0_rtx)
3164 op1 = gen_rtx_PLUS (Pmode, op1, op2);
3166 if (!s390_decompose_address (op1, &addr))
3168 if (addr.base && !REGNO_OK_FOR_BASE_P (REGNO (addr.base)))
3170 if (addr.indx && !REGNO_OK_FOR_INDEX_P (REGNO (addr.indx)))
3173 if (!TARGET_64BIT && !addr.pointer)
3179 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
3180 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
3186 /* Emit a forced load-address operation to load SRC into DST.
3187 This will use the LOAD ADDRESS instruction even in situations
3188 where legitimate_la_operand_p (SRC) returns false. */
3191 s390_load_address (rtx dst, rtx src)
3194 emit_move_insn (dst, src);
3196 emit_insn (gen_force_la_31 (dst, src));
3199 /* Return a legitimate reference for ORIG (an address) using the
3200 register REG. If REG is 0, a new pseudo is generated.
3202 There are two types of references that must be handled:
3204 1. Global data references must load the address from the GOT, via
3205 the PIC reg. An insn is emitted to do this load, and the reg is
3208 2. Static data references, constant pool addresses, and code labels
3209 compute the address as an offset from the GOT, whose base is in
3210 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to
3211 differentiate them from global data objects. The returned
3212 address is the PIC reg + an unspec constant.
3214 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC
3215 reg also appears in the address. */
3218 legitimize_pic_address (rtx orig, rtx reg)
3224 gcc_assert (!TLS_SYMBOLIC_CONST (addr));
3226 if (GET_CODE (addr) == LABEL_REF
3227 || (GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (addr)))
3229 /* This is a local symbol. */
3230 if (TARGET_CPU_ZARCH && larl_operand (addr, VOIDmode))
3232 /* Access local symbols PC-relative via LARL.
3233 This is the same as in the non-PIC case, so it is
3234 handled automatically ... */
3238 /* Access local symbols relative to the GOT. */
3240 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3242 if (reload_in_progress || reload_completed)
3243 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3245 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTOFF);
3246 addr = gen_rtx_CONST (Pmode, addr);
3247 addr = force_const_mem (Pmode, addr);
3248 emit_move_insn (temp, addr);
3250 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3253 s390_load_address (reg, new_rtx);
3258 else if (GET_CODE (addr) == SYMBOL_REF)
3261 reg = gen_reg_rtx (Pmode);
3265 /* Assume GOT offset < 4k. This is handled the same way
3266 in both 31- and 64-bit code (@GOT). */
3268 if (reload_in_progress || reload_completed)
3269 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3271 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3272 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3273 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
3274 new_rtx = gen_const_mem (Pmode, new_rtx);
3275 emit_move_insn (reg, new_rtx);
3278 else if (TARGET_CPU_ZARCH)
3280 /* If the GOT offset might be >= 4k, we determine the position
3281 of the GOT entry via a PC-relative LARL (@GOTENT). */
3283 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3285 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3286 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3288 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTENT);
3289 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3290 emit_move_insn (temp, new_rtx);
3292 new_rtx = gen_const_mem (Pmode, temp);
3293 emit_move_insn (reg, new_rtx);
3298 /* If the GOT offset might be >= 4k, we have to load it
3299 from the literal pool (@GOT). */
3301 rtx temp = reg ? reg : gen_reg_rtx (Pmode);
3303 gcc_assert (REGNO (temp) >= FIRST_PSEUDO_REGISTER
3304 || REGNO_REG_CLASS (REGNO (temp)) == ADDR_REGS);
3306 if (reload_in_progress || reload_completed)
3307 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3309 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOT);
3310 addr = gen_rtx_CONST (Pmode, addr);
3311 addr = force_const_mem (Pmode, addr);
3312 emit_move_insn (temp, addr);
3314 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3315 new_rtx = gen_const_mem (Pmode, new_rtx);
3316 emit_move_insn (reg, new_rtx);
3322 if (GET_CODE (addr) == CONST)
3324 addr = XEXP (addr, 0);
3325 if (GET_CODE (addr) == UNSPEC)
3327 gcc_assert (XVECLEN (addr, 0) == 1);
3328 switch (XINT (addr, 1))
3330 /* If someone moved a GOT-relative UNSPEC
3331 out of the literal pool, force them back in. */
3334 new_rtx = force_const_mem (Pmode, orig);
3337 /* @GOT is OK as is if small. */
3340 new_rtx = force_const_mem (Pmode, orig);
3343 /* @GOTENT is OK as is. */
3347 /* @PLT is OK as is on 64-bit, must be converted to
3348 GOT-relative @PLTOFF on 31-bit. */
3350 if (!TARGET_CPU_ZARCH)
3352 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3354 if (reload_in_progress || reload_completed)
3355 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3357 addr = XVECEXP (addr, 0, 0);
3358 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr),
3360 addr = gen_rtx_CONST (Pmode, addr);
3361 addr = force_const_mem (Pmode, addr);
3362 emit_move_insn (temp, addr);
3364 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3367 s390_load_address (reg, new_rtx);
3373 /* Everything else cannot happen. */
3379 gcc_assert (GET_CODE (addr) == PLUS);
3381 if (GET_CODE (addr) == PLUS)
3383 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
3385 gcc_assert (!TLS_SYMBOLIC_CONST (op0));
3386 gcc_assert (!TLS_SYMBOLIC_CONST (op1));
3388 /* Check first to see if this is a constant offset
3389 from a local symbol reference. */
3390 if ((GET_CODE (op0) == LABEL_REF
3391 || (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (op0)))
3392 && GET_CODE (op1) == CONST_INT)
3394 if (TARGET_CPU_ZARCH
3395 && larl_operand (op0, VOIDmode)
3396 && INTVAL (op1) < (HOST_WIDE_INT)1 << 31
3397 && INTVAL (op1) >= -((HOST_WIDE_INT)1 << 31))
3399 if (INTVAL (op1) & 1)
3401 /* LARL can't handle odd offsets, so emit a
3402 pair of LARL and LA. */
3403 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3405 if (!DISP_IN_RANGE (INTVAL (op1)))
3407 HOST_WIDE_INT even = INTVAL (op1) - 1;
3408 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
3409 op0 = gen_rtx_CONST (Pmode, op0);
3413 emit_move_insn (temp, op0);
3414 new_rtx = gen_rtx_PLUS (Pmode, temp, op1);
3418 s390_load_address (reg, new_rtx);
3424 /* If the offset is even, we can just use LARL.
3425 This will happen automatically. */
3430 /* Access local symbols relative to the GOT. */
3432 rtx temp = reg? reg : gen_reg_rtx (Pmode);
3434 if (reload_in_progress || reload_completed)
3435 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3437 addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op0),
3439 addr = gen_rtx_PLUS (Pmode, addr, op1);
3440 addr = gen_rtx_CONST (Pmode, addr);
3441 addr = force_const_mem (Pmode, addr);
3442 emit_move_insn (temp, addr);
3444 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3447 s390_load_address (reg, new_rtx);
3453 /* Now, check whether it is a GOT relative symbol plus offset
3454 that was pulled out of the literal pool. Force it back in. */
3456 else if (GET_CODE (op0) == UNSPEC
3457 && GET_CODE (op1) == CONST_INT
3458 && XINT (op0, 1) == UNSPEC_GOTOFF)
3460 gcc_assert (XVECLEN (op0, 0) == 1);
3462 new_rtx = force_const_mem (Pmode, orig);
3465 /* Otherwise, compute the sum. */
3468 base = legitimize_pic_address (XEXP (addr, 0), reg);
3469 new_rtx = legitimize_pic_address (XEXP (addr, 1),
3470 base == reg ? NULL_RTX : reg);
3471 if (GET_CODE (new_rtx) == CONST_INT)
3472 new_rtx = plus_constant (base, INTVAL (new_rtx));
3475 if (GET_CODE (new_rtx) == PLUS && CONSTANT_P (XEXP (new_rtx, 1)))
3477 base = gen_rtx_PLUS (Pmode, base, XEXP (new_rtx, 0));
3478 new_rtx = XEXP (new_rtx, 1);
3480 new_rtx = gen_rtx_PLUS (Pmode, base, new_rtx);
3483 if (GET_CODE (new_rtx) == CONST)
3484 new_rtx = XEXP (new_rtx, 0);
3485 new_rtx = force_operand (new_rtx, 0);
3492 /* Load the thread pointer into a register. */
3495 s390_get_thread_pointer (void)
3497 rtx tp = gen_reg_rtx (Pmode);
3499 emit_move_insn (tp, gen_rtx_REG (Pmode, TP_REGNUM));
3500 mark_reg_pointer (tp, BITS_PER_WORD);
3505 /* Emit a tls call insn. The call target is the SYMBOL_REF stored
3506 in s390_tls_symbol which always refers to __tls_get_offset.
3507 The returned offset is written to RESULT_REG and an USE rtx is
3508 generated for TLS_CALL. */
3510 static GTY(()) rtx s390_tls_symbol;
3513 s390_emit_tls_call_insn (rtx result_reg, rtx tls_call)
3517 gcc_assert (flag_pic);
3519 if (!s390_tls_symbol)
3520 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
3522 insn = s390_emit_call (s390_tls_symbol, tls_call, result_reg,
3523 gen_rtx_REG (Pmode, RETURN_REGNUM));
3525 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), result_reg);
3526 RTL_CONST_CALL_P (insn) = 1;
3529 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3530 this (thread-local) address. REG may be used as temporary. */
3533 legitimize_tls_address (rtx addr, rtx reg)
3535 rtx new_rtx, tls_call, temp, base, r2, insn;
3537 if (GET_CODE (addr) == SYMBOL_REF)
3538 switch (tls_symbolic_operand (addr))
3540 case TLS_MODEL_GLOBAL_DYNAMIC:
3542 r2 = gen_rtx_REG (Pmode, 2);
3543 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
3544 new_rtx = gen_rtx_CONST (Pmode, tls_call);
3545 new_rtx = force_const_mem (Pmode, new_rtx);
3546 emit_move_insn (r2, new_rtx);
3547 s390_emit_tls_call_insn (r2, tls_call);
3548 insn = get_insns ();
3551 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3552 temp = gen_reg_rtx (Pmode);
3553 emit_libcall_block (insn, temp, r2, new_rtx);
3555 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3558 s390_load_address (reg, new_rtx);
3563 case TLS_MODEL_LOCAL_DYNAMIC:
3565 r2 = gen_rtx_REG (Pmode, 2);
3566 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
3567 new_rtx = gen_rtx_CONST (Pmode, tls_call);
3568 new_rtx = force_const_mem (Pmode, new_rtx);
3569 emit_move_insn (r2, new_rtx);
3570 s390_emit_tls_call_insn (r2, tls_call);
3571 insn = get_insns ();
3574 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
3575 temp = gen_reg_rtx (Pmode);
3576 emit_libcall_block (insn, temp, r2, new_rtx);
3578 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3579 base = gen_reg_rtx (Pmode);
3580 s390_load_address (base, new_rtx);
3582 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
3583 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3584 new_rtx = force_const_mem (Pmode, new_rtx);
3585 temp = gen_reg_rtx (Pmode);
3586 emit_move_insn (temp, new_rtx);
3588 new_rtx = gen_rtx_PLUS (Pmode, base, temp);
3591 s390_load_address (reg, new_rtx);
3596 case TLS_MODEL_INITIAL_EXEC:
3599 /* Assume GOT offset < 4k. This is handled the same way
3600 in both 31- and 64-bit code. */
3602 if (reload_in_progress || reload_completed)
3603 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3605 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3606 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3607 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new_rtx);
3608 new_rtx = gen_const_mem (Pmode, new_rtx);
3609 temp = gen_reg_rtx (Pmode);
3610 emit_move_insn (temp, new_rtx);
3612 else if (TARGET_CPU_ZARCH)
3614 /* If the GOT offset might be >= 4k, we determine the position
3615 of the GOT entry via a PC-relative LARL. */
3617 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3618 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3619 temp = gen_reg_rtx (Pmode);
3620 emit_move_insn (temp, new_rtx);
3622 new_rtx = gen_const_mem (Pmode, temp);
3623 temp = gen_reg_rtx (Pmode);
3624 emit_move_insn (temp, new_rtx);
3628 /* If the GOT offset might be >= 4k, we have to load it
3629 from the literal pool. */
3631 if (reload_in_progress || reload_completed)
3632 df_set_regs_ever_live (PIC_OFFSET_TABLE_REGNUM, true);
3634 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
3635 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3636 new_rtx = force_const_mem (Pmode, new_rtx);
3637 temp = gen_reg_rtx (Pmode);
3638 emit_move_insn (temp, new_rtx);
3640 new_rtx = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
3641 new_rtx = gen_const_mem (Pmode, new_rtx);
3643 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
3644 temp = gen_reg_rtx (Pmode);
3645 emit_insn (gen_rtx_SET (Pmode, temp, new_rtx));
3649 /* In position-dependent code, load the absolute address of
3650 the GOT entry from the literal pool. */
3652 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
3653 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3654 new_rtx = force_const_mem (Pmode, new_rtx);
3655 temp = gen_reg_rtx (Pmode);
3656 emit_move_insn (temp, new_rtx);
3659 new_rtx = gen_const_mem (Pmode, new_rtx);
3660 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new_rtx, addr), UNSPEC_TLS_LOAD);
3661 temp = gen_reg_rtx (Pmode);
3662 emit_insn (gen_rtx_SET (Pmode, temp, new_rtx));
3665 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3668 s390_load_address (reg, new_rtx);
3673 case TLS_MODEL_LOCAL_EXEC:
3674 new_rtx = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
3675 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3676 new_rtx = force_const_mem (Pmode, new_rtx);
3677 temp = gen_reg_rtx (Pmode);
3678 emit_move_insn (temp, new_rtx);
3680 new_rtx = gen_rtx_PLUS (Pmode, s390_get_thread_pointer (), temp);
3683 s390_load_address (reg, new_rtx);
3692 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
3694 switch (XINT (XEXP (addr, 0), 1))
3696 case UNSPEC_INDNTPOFF:
3697 gcc_assert (TARGET_CPU_ZARCH);
3706 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3707 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3709 new_rtx = XEXP (XEXP (addr, 0), 0);
3710 if (GET_CODE (new_rtx) != SYMBOL_REF)
3711 new_rtx = gen_rtx_CONST (Pmode, new_rtx);
3713 new_rtx = legitimize_tls_address (new_rtx, reg);
3714 new_rtx = plus_constant (new_rtx, INTVAL (XEXP (XEXP (addr, 0), 1)));
3715 new_rtx = force_operand (new_rtx, 0);
3719 gcc_unreachable (); /* for now ... */
3724 /* Emit insns making the address in operands[1] valid for a standard
3725 move to operands[0]. operands[1] is replaced by an address which
3726 should be used instead of the former RTX to emit the move
3730 emit_symbolic_move (rtx *operands)
3732 rtx temp = !can_create_pseudo_p () ? operands[0] : gen_reg_rtx (Pmode);
3734 if (GET_CODE (operands[0]) == MEM)
3735 operands[1] = force_reg (Pmode, operands[1]);
3736 else if (TLS_SYMBOLIC_CONST (operands[1]))
3737 operands[1] = legitimize_tls_address (operands[1], temp);
3739 operands[1] = legitimize_pic_address (operands[1], temp);
3742 /* Try machine-dependent ways of modifying an illegitimate address X
3743 to be legitimate. If we find one, return the new, valid address.
3745 OLDX is the address as it was before break_out_memory_refs was called.
3746 In some cases it is useful to look at this to decide what needs to be done.
3748 MODE is the mode of the operand pointed to by X.
3750 When -fpic is used, special handling is needed for symbolic references.
3751 See comments by legitimize_pic_address for details. */
3754 s390_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
3755 enum machine_mode mode ATTRIBUTE_UNUSED)
3757 rtx constant_term = const0_rtx;
3759 if (TLS_SYMBOLIC_CONST (x))
3761 x = legitimize_tls_address (x, 0);
3763 if (s390_legitimate_address_p (mode, x, FALSE))
3766 else if (GET_CODE (x) == PLUS
3767 && (TLS_SYMBOLIC_CONST (XEXP (x, 0))
3768 || TLS_SYMBOLIC_CONST (XEXP (x, 1))))
3774 if (SYMBOLIC_CONST (x)
3775 || (GET_CODE (x) == PLUS
3776 && (SYMBOLIC_CONST (XEXP (x, 0))
3777 || SYMBOLIC_CONST (XEXP (x, 1)))))
3778 x = legitimize_pic_address (x, 0);
3780 if (s390_legitimate_address_p (mode, x, FALSE))
3784 x = eliminate_constant_term (x, &constant_term);
3786 /* Optimize loading of large displacements by splitting them
3787 into the multiple of 4K and the rest; this allows the
3788 former to be CSE'd if possible.
3790 Don't do this if the displacement is added to a register
3791 pointing into the stack frame, as the offsets will
3792 change later anyway. */
3794 if (GET_CODE (constant_term) == CONST_INT
3795 && !TARGET_LONG_DISPLACEMENT
3796 && !DISP_IN_RANGE (INTVAL (constant_term))
3797 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
3799 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
3800 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
3802 rtx temp = gen_reg_rtx (Pmode);
3803 rtx val = force_operand (GEN_INT (upper), temp);
3805 emit_move_insn (temp, val);
3807 x = gen_rtx_PLUS (Pmode, x, temp);
3808 constant_term = GEN_INT (lower);
3811 if (GET_CODE (x) == PLUS)
3813 if (GET_CODE (XEXP (x, 0)) == REG)
3815 rtx temp = gen_reg_rtx (Pmode);
3816 rtx val = force_operand (XEXP (x, 1), temp);
3818 emit_move_insn (temp, val);
3820 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
3823 else if (GET_CODE (XEXP (x, 1)) == REG)
3825 rtx temp = gen_reg_rtx (Pmode);
3826 rtx val = force_operand (XEXP (x, 0), temp);
3828 emit_move_insn (temp, val);
3830 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
3834 if (constant_term != const0_rtx)
3835 x = gen_rtx_PLUS (Pmode, x, constant_term);
3840 /* Try a machine-dependent way of reloading an illegitimate address AD
3841 operand. If we find one, push the reload and and return the new address.
3843 MODE is the mode of the enclosing MEM. OPNUM is the operand number
3844 and TYPE is the reload type of the current reload. */
3847 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED,
3848 int opnum, int type)
3850 if (!optimize || TARGET_LONG_DISPLACEMENT)
3853 if (GET_CODE (ad) == PLUS)
3855 rtx tem = simplify_binary_operation (PLUS, Pmode,
3856 XEXP (ad, 0), XEXP (ad, 1));
3861 if (GET_CODE (ad) == PLUS
3862 && GET_CODE (XEXP (ad, 0)) == REG
3863 && GET_CODE (XEXP (ad, 1)) == CONST_INT
3864 && !DISP_IN_RANGE (INTVAL (XEXP (ad, 1))))
3866 HOST_WIDE_INT lower = INTVAL (XEXP (ad, 1)) & 0xfff;
3867 HOST_WIDE_INT upper = INTVAL (XEXP (ad, 1)) ^ lower;
3868 rtx cst, tem, new_rtx;
3870 cst = GEN_INT (upper);
3871 if (!legitimate_reload_constant_p (cst))
3872 cst = force_const_mem (Pmode, cst);
3874 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst);
3875 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower));
3877 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0,
3878 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3879 opnum, (enum reload_type) type);
3886 /* Emit code to move LEN bytes from DST to SRC. */
3889 s390_expand_movmem (rtx dst, rtx src, rtx len)
3891 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
3893 if (INTVAL (len) > 0)
3894 emit_insn (gen_movmem_short (dst, src, GEN_INT (INTVAL (len) - 1)));
3897 else if (TARGET_MVCLE)
3899 emit_insn (gen_movmem_long (dst, src, convert_to_mode (Pmode, len, 1)));
3904 rtx dst_addr, src_addr, count, blocks, temp;
3905 rtx loop_start_label = gen_label_rtx ();
3906 rtx loop_end_label = gen_label_rtx ();
3907 rtx end_label = gen_label_rtx ();
3908 enum machine_mode mode;
3910 mode = GET_MODE (len);
3911 if (mode == VOIDmode)
3914 dst_addr = gen_reg_rtx (Pmode);
3915 src_addr = gen_reg_rtx (Pmode);
3916 count = gen_reg_rtx (mode);
3917 blocks = gen_reg_rtx (mode);
3919 convert_move (count, len, 1);
3920 emit_cmp_and_jump_insns (count, const0_rtx,
3921 EQ, NULL_RTX, mode, 1, end_label);
3923 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
3924 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
3925 dst = change_address (dst, VOIDmode, dst_addr);
3926 src = change_address (src, VOIDmode, src_addr);
3928 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
3931 emit_move_insn (count, temp);
3933 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
3936 emit_move_insn (blocks, temp);
3938 emit_cmp_and_jump_insns (blocks, const0_rtx,
3939 EQ, NULL_RTX, mode, 1, loop_end_label);
3941 emit_label (loop_start_label);
3944 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 768))
3948 /* Issue a read prefetch for the +3 cache line. */
3949 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, src_addr, GEN_INT (768)),
3950 const0_rtx, const0_rtx);
3951 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
3952 emit_insn (prefetch);
3954 /* Issue a write prefetch for the +3 cache line. */
3955 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (768)),
3956 const1_rtx, const0_rtx);
3957 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
3958 emit_insn (prefetch);
3961 emit_insn (gen_movmem_short (dst, src, GEN_INT (255)));
3962 s390_load_address (dst_addr,
3963 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
3964 s390_load_address (src_addr,
3965 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
3967 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
3970 emit_move_insn (blocks, temp);
3972 emit_cmp_and_jump_insns (blocks, const0_rtx,
3973 EQ, NULL_RTX, mode, 1, loop_end_label);
3975 emit_jump (loop_start_label);
3976 emit_label (loop_end_label);
3978 emit_insn (gen_movmem_short (dst, src,
3979 convert_to_mode (Pmode, count, 1)));
3980 emit_label (end_label);
3984 /* Emit code to set LEN bytes at DST to VAL.
3985 Make use of clrmem if VAL is zero. */
3988 s390_expand_setmem (rtx dst, rtx len, rtx val)
3990 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0)
3993 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode);
3995 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257)
3997 if (val == const0_rtx && INTVAL (len) <= 256)
3998 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1)));
4001 /* Initialize memory by storing the first byte. */
4002 emit_move_insn (adjust_address (dst, QImode, 0), val);
4004 if (INTVAL (len) > 1)
4006 /* Initiate 1 byte overlap move.
4007 The first byte of DST is propagated through DSTP1.
4008 Prepare a movmem for: DST+1 = DST (length = LEN - 1).
4009 DST is set to size 1 so the rest of the memory location
4010 does not count as source operand. */
4011 rtx dstp1 = adjust_address (dst, VOIDmode, 1);
4012 set_mem_size (dst, const1_rtx);
4014 emit_insn (gen_movmem_short (dstp1, dst,
4015 GEN_INT (INTVAL (len) - 2)));
4020 else if (TARGET_MVCLE)
4022 val = force_not_mem (convert_modes (Pmode, QImode, val, 1));
4023 emit_insn (gen_setmem_long (dst, convert_to_mode (Pmode, len, 1), val));
4028 rtx dst_addr, src_addr, count, blocks, temp, dstp1 = NULL_RTX;
4029 rtx loop_start_label = gen_label_rtx ();
4030 rtx loop_end_label = gen_label_rtx ();
4031 rtx end_label = gen_label_rtx ();
4032 enum machine_mode mode;
4034 mode = GET_MODE (len);
4035 if (mode == VOIDmode)
4038 dst_addr = gen_reg_rtx (Pmode);
4039 src_addr = gen_reg_rtx (Pmode);
4040 count = gen_reg_rtx (mode);
4041 blocks = gen_reg_rtx (mode);
4043 convert_move (count, len, 1);
4044 emit_cmp_and_jump_insns (count, const0_rtx,
4045 EQ, NULL_RTX, mode, 1, end_label);
4047 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
4048 dst = change_address (dst, VOIDmode, dst_addr);
4050 if (val == const0_rtx)
4051 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
4055 dstp1 = adjust_address (dst, VOIDmode, 1);
4056 set_mem_size (dst, const1_rtx);
4058 /* Initialize memory by storing the first byte. */
4059 emit_move_insn (adjust_address (dst, QImode, 0), val);
4061 /* If count is 1 we are done. */
4062 emit_cmp_and_jump_insns (count, const1_rtx,
4063 EQ, NULL_RTX, mode, 1, end_label);
4065 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1,
4069 emit_move_insn (count, temp);
4071 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
4074 emit_move_insn (blocks, temp);
4076 emit_cmp_and_jump_insns (blocks, const0_rtx,
4077 EQ, NULL_RTX, mode, 1, loop_end_label);
4079 emit_label (loop_start_label);
4082 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 1024))
4084 /* Issue a write prefetch for the +4 cache line. */
4085 rtx prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, dst_addr,
4087 const1_rtx, const0_rtx);
4088 emit_insn (prefetch);
4089 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
4092 if (val == const0_rtx)
4093 emit_insn (gen_clrmem_short (dst, GEN_INT (255)));
4095 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255)));
4096 s390_load_address (dst_addr,
4097 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
4099 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
4102 emit_move_insn (blocks, temp);
4104 emit_cmp_and_jump_insns (blocks, const0_rtx,
4105 EQ, NULL_RTX, mode, 1, loop_end_label);
4107 emit_jump (loop_start_label);
4108 emit_label (loop_end_label);
4110 if (val == const0_rtx)
4111 emit_insn (gen_clrmem_short (dst, convert_to_mode (Pmode, count, 1)));
4113 emit_insn (gen_movmem_short (dstp1, dst, convert_to_mode (Pmode, count, 1)));
4114 emit_label (end_label);
4118 /* Emit code to compare LEN bytes at OP0 with those at OP1,
4119 and return the result in TARGET. */
4122 s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
4124 rtx ccreg = gen_rtx_REG (CCUmode, CC_REGNUM);
4127 /* As the result of CMPINT is inverted compared to what we need,
4128 we have to swap the operands. */
4129 tmp = op0; op0 = op1; op1 = tmp;
4131 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
4133 if (INTVAL (len) > 0)
4135 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (INTVAL (len) - 1)));
4136 emit_insn (gen_cmpint (target, ccreg));
4139 emit_move_insn (target, const0_rtx);
4141 else if (TARGET_MVCLE)
4143 emit_insn (gen_cmpmem_long (op0, op1, convert_to_mode (Pmode, len, 1)));
4144 emit_insn (gen_cmpint (target, ccreg));
4148 rtx addr0, addr1, count, blocks, temp;
4149 rtx loop_start_label = gen_label_rtx ();
4150 rtx loop_end_label = gen_label_rtx ();
4151 rtx end_label = gen_label_rtx ();
4152 enum machine_mode mode;
4154 mode = GET_MODE (len);
4155 if (mode == VOIDmode)
4158 addr0 = gen_reg_rtx (Pmode);
4159 addr1 = gen_reg_rtx (Pmode);
4160 count = gen_reg_rtx (mode);
4161 blocks = gen_reg_rtx (mode);
4163 convert_move (count, len, 1);
4164 emit_cmp_and_jump_insns (count, const0_rtx,
4165 EQ, NULL_RTX, mode, 1, end_label);
4167 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
4168 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
4169 op0 = change_address (op0, VOIDmode, addr0);
4170 op1 = change_address (op1, VOIDmode, addr1);
4172 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1,
4175 emit_move_insn (count, temp);
4177 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1,
4180 emit_move_insn (blocks, temp);
4182 emit_cmp_and_jump_insns (blocks, const0_rtx,
4183 EQ, NULL_RTX, mode, 1, loop_end_label);
4185 emit_label (loop_start_label);
4188 && (GET_CODE (len) != CONST_INT || INTVAL (len) > 512))
4192 /* Issue a read prefetch for the +2 cache line of operand 1. */
4193 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr0, GEN_INT (512)),
4194 const0_rtx, const0_rtx);
4195 emit_insn (prefetch);
4196 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
4198 /* Issue a read prefetch for the +2 cache line of operand 2. */
4199 prefetch = gen_prefetch (gen_rtx_PLUS (Pmode, addr1, GEN_INT (512)),
4200 const0_rtx, const0_rtx);
4201 emit_insn (prefetch);
4202 PREFETCH_SCHEDULE_BARRIER_P (prefetch) = true;
4205 emit_insn (gen_cmpmem_short (op0, op1, GEN_INT (255)));
4206 temp = gen_rtx_NE (VOIDmode, ccreg, const0_rtx);
4207 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
4208 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
4209 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
4210 emit_jump_insn (temp);
4212 s390_load_address (addr0,
4213 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
4214 s390_load_address (addr1,
4215 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
4217 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1,
4220 emit_move_insn (blocks, temp);
4222 emit_cmp_and_jump_insns (blocks, const0_rtx,
4223 EQ, NULL_RTX, mode, 1, loop_end_label);
4225 emit_jump (loop_start_label);
4226 emit_label (loop_end_label);
4228 emit_insn (gen_cmpmem_short (op0, op1,
4229 convert_to_mode (Pmode, count, 1)));
4230 emit_label (end_label);
4232 emit_insn (gen_cmpint (target, ccreg));
4237 /* Expand conditional increment or decrement using alc/slb instructions.
4238 Should generate code setting DST to either SRC or SRC + INCREMENT,
4239 depending on the result of the comparison CMP_OP0 CMP_CODE CMP_OP1.
4240 Returns true if successful, false otherwise.
4242 That makes it possible to implement some if-constructs without jumps e.g.:
4243 (borrow = CC0 | CC1 and carry = CC2 | CC3)
4244 unsigned int a, b, c;
4245 if (a < b) c++; -> CCU b > a -> CC2; c += carry;
4246 if (a < b) c--; -> CCL3 a - b -> borrow; c -= borrow;
4247 if (a <= b) c++; -> CCL3 b - a -> borrow; c += carry;
4248 if (a <= b) c--; -> CCU a <= b -> borrow; c -= borrow;
4250 Checks for EQ and NE with a nonzero value need an additional xor e.g.:
4251 if (a == b) c++; -> CCL3 a ^= b; 0 - a -> borrow; c += carry;
4252 if (a == b) c--; -> CCU a ^= b; a <= 0 -> CC0 | CC1; c -= borrow;
4253 if (a != b) c++; -> CCU a ^= b; a > 0 -> CC2; c += carry;
4254 if (a != b) c--; -> CCL3 a ^= b; 0 - a -> borrow; c -= borrow; */
4257 s390_expand_addcc (enum rtx_code cmp_code, rtx cmp_op0, rtx cmp_op1,
4258 rtx dst, rtx src, rtx increment)
4260 enum machine_mode cmp_mode;
4261 enum machine_mode cc_mode;
4267 if ((GET_MODE (cmp_op0) == SImode || GET_MODE (cmp_op0) == VOIDmode)
4268 && (GET_MODE (cmp_op1) == SImode || GET_MODE (cmp_op1) == VOIDmode))
4270 else if ((GET_MODE (cmp_op0) == DImode || GET_MODE (cmp_op0) == VOIDmode)
4271 && (GET_MODE (cmp_op1) == DImode || GET_MODE (cmp_op1) == VOIDmode))
4276 /* Try ADD LOGICAL WITH CARRY. */
4277 if (increment == const1_rtx)
4279 /* Determine CC mode to use. */
4280 if (cmp_code == EQ || cmp_code == NE)
4282 if (cmp_op1 != const0_rtx)
4284 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4285 NULL_RTX, 0, OPTAB_WIDEN);
4286 cmp_op1 = const0_rtx;
4289 cmp_code = cmp_code == EQ ? LEU : GTU;
4292 if (cmp_code == LTU || cmp_code == LEU)
4297 cmp_code = swap_condition (cmp_code);
4314 /* Emit comparison instruction pattern. */
4315 if (!register_operand (cmp_op0, cmp_mode))
4316 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4318 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4319 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4320 /* We use insn_invalid_p here to add clobbers if required. */
4321 ret = insn_invalid_p (emit_insn (insn));
4324 /* Emit ALC instruction pattern. */
4325 op_res = gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4326 gen_rtx_REG (cc_mode, CC_REGNUM),
4329 if (src != const0_rtx)
4331 if (!register_operand (src, GET_MODE (dst)))
4332 src = force_reg (GET_MODE (dst), src);
4334 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src);
4335 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx);
4338 p = rtvec_alloc (2);
4340 gen_rtx_SET (VOIDmode, dst, op_res);
4342 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4343 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4348 /* Try SUBTRACT LOGICAL WITH BORROW. */
4349 if (increment == constm1_rtx)
4351 /* Determine CC mode to use. */
4352 if (cmp_code == EQ || cmp_code == NE)
4354 if (cmp_op1 != const0_rtx)
4356 cmp_op0 = expand_simple_binop (cmp_mode, XOR, cmp_op0, cmp_op1,
4357 NULL_RTX, 0, OPTAB_WIDEN);
4358 cmp_op1 = const0_rtx;
4361 cmp_code = cmp_code == EQ ? LEU : GTU;
4364 if (cmp_code == GTU || cmp_code == GEU)
4369 cmp_code = swap_condition (cmp_code);
4386 /* Emit comparison instruction pattern. */
4387 if (!register_operand (cmp_op0, cmp_mode))
4388 cmp_op0 = force_reg (cmp_mode, cmp_op0);
4390 insn = gen_rtx_SET (VOIDmode, gen_rtx_REG (cc_mode, CC_REGNUM),
4391 gen_rtx_COMPARE (cc_mode, cmp_op0, cmp_op1));
4392 /* We use insn_invalid_p here to add clobbers if required. */
4393 ret = insn_invalid_p (emit_insn (insn));
4396 /* Emit SLB instruction pattern. */
4397 if (!register_operand (src, GET_MODE (dst)))
4398 src = force_reg (GET_MODE (dst), src);
4400 op_res = gen_rtx_MINUS (GET_MODE (dst),
4401 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx),
4402 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst),
4403 gen_rtx_REG (cc_mode, CC_REGNUM),
4405 p = rtvec_alloc (2);
4407 gen_rtx_SET (VOIDmode, dst, op_res);
4409 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4410 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
4418 /* Expand code for the insv template. Return true if successful. */
4421 s390_expand_insv (rtx dest, rtx op1, rtx op2, rtx src)
4423 int bitsize = INTVAL (op1);
4424 int bitpos = INTVAL (op2);
4426 /* On z10 we can use the risbg instruction to implement insv. */
4428 && ((GET_MODE (dest) == DImode && GET_MODE (src) == DImode)
4429 || (GET_MODE (dest) == SImode && GET_MODE (src) == SImode)))
4434 op = gen_rtx_SET (GET_MODE(src),
4435 gen_rtx_ZERO_EXTRACT (GET_MODE (dest), dest, op1, op2),
4437 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM));
4438 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, op, clobber)));
4443 /* We need byte alignment. */
4444 if (bitsize % BITS_PER_UNIT)
4448 && memory_operand (dest, VOIDmode)
4449 && (register_operand (src, word_mode)
4450 || const_int_operand (src, VOIDmode)))
4452 /* Emit standard pattern if possible. */
4453 enum machine_mode mode = smallest_mode_for_size (bitsize, MODE_INT);
4454 if (GET_MODE_BITSIZE (mode) == bitsize)
4455 emit_move_insn (adjust_address (dest, mode, 0), gen_lowpart (mode, src));
4457 /* (set (ze (mem)) (const_int)). */
4458 else if (const_int_operand (src, VOIDmode))
4460 int size = bitsize / BITS_PER_UNIT;
4461 rtx src_mem = adjust_address (force_const_mem (word_mode, src), BLKmode,
4462 GET_MODE_SIZE (word_mode) - size);
4464 dest = adjust_address (dest, BLKmode, 0);
4465 set_mem_size (dest, GEN_INT (size));
4466 s390_expand_movmem (dest, src_mem, GEN_INT (size));
4469 /* (set (ze (mem)) (reg)). */
4470 else if (register_operand (src, word_mode))
4472 if (bitsize <= GET_MODE_BITSIZE (SImode))
4473 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1,
4477 /* Emit st,stcmh sequence. */
4478 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode);
4479 int size = stcmh_width / BITS_PER_UNIT;
4481 emit_move_insn (adjust_address (dest, SImode, size),
4482 gen_lowpart (SImode, src));
4483 set_mem_size (dest, GEN_INT (size));
4484 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT
4485 (stcmh_width), const0_rtx),
4486 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT
4487 (GET_MODE_BITSIZE (SImode))));
4496 /* (set (ze (reg)) (const_int)). */
4498 && register_operand (dest, word_mode)
4499 && (bitpos % 16) == 0
4500 && (bitsize % 16) == 0
4501 && const_int_operand (src, VOIDmode))
4503 HOST_WIDE_INT val = INTVAL (src);
4504 int regpos = bitpos + bitsize;
4506 while (regpos > bitpos)
4508 enum machine_mode putmode;
4511 if (TARGET_EXTIMM && (regpos % 32 == 0) && (regpos >= bitpos + 32))
4516 putsize = GET_MODE_BITSIZE (putmode);
4518 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest,
4521 gen_int_mode (val, putmode));
4524 gcc_assert (regpos == bitpos);
4531 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic which returns a
4532 register that holds VAL of mode MODE shifted by COUNT bits. */
4535 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count)
4537 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
4538 NULL_RTX, 1, OPTAB_DIRECT);
4539 return expand_simple_binop (SImode, ASHIFT, val, count,
4540 NULL_RTX, 1, OPTAB_DIRECT);
4543 /* Structure to hold the initial parameters for a compare_and_swap operation
4544 in HImode and QImode. */
4546 struct alignment_context
4548 rtx memsi; /* SI aligned memory location. */
4549 rtx shift; /* Bit offset with regard to lsb. */
4550 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */
4551 rtx modemaski; /* ~modemask */
4552 bool aligned; /* True if memory is aligned, false else. */
4555 /* A subroutine of s390_expand_cs_hqi and s390_expand_atomic to initialize
4556 structure AC for transparent simplifying, if the memory alignment is known
4557 to be at least 32bit. MEM is the memory location for the actual operation
4558 and MODE its mode. */
4561 init_alignment_context (struct alignment_context *ac, rtx mem,
4562 enum machine_mode mode)
4564 ac->shift = GEN_INT (GET_MODE_SIZE (SImode) - GET_MODE_SIZE (mode));
4565 ac->aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
4568 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
4571 /* Alignment is unknown. */
4572 rtx byteoffset, addr, align;
4574 /* Force the address into a register. */
4575 addr = force_reg (Pmode, XEXP (mem, 0));
4577 /* Align it to SImode. */
4578 align = expand_simple_binop (Pmode, AND, addr,
4579 GEN_INT (-GET_MODE_SIZE (SImode)),
4580 NULL_RTX, 1, OPTAB_DIRECT);
4582 ac->memsi = gen_rtx_MEM (SImode, align);
4583 MEM_VOLATILE_P (ac->memsi) = MEM_VOLATILE_P (mem);
4584 set_mem_alias_set (ac->memsi, ALIAS_SET_MEMORY_BARRIER);
4585 set_mem_align (ac->memsi, GET_MODE_BITSIZE (SImode));
4587 /* Calculate shiftcount. */
4588 byteoffset = expand_simple_binop (Pmode, AND, addr,
4589 GEN_INT (GET_MODE_SIZE (SImode) - 1),
4590 NULL_RTX, 1, OPTAB_DIRECT);
4591 /* As we already have some offset, evaluate the remaining distance. */
4592 ac->shift = expand_simple_binop (SImode, MINUS, ac->shift, byteoffset,
4593 NULL_RTX, 1, OPTAB_DIRECT);
4596 /* Shift is the byte count, but we need the bitcount. */
4597 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT),
4598 NULL_RTX, 1, OPTAB_DIRECT);
4599 /* Calculate masks. */
4600 ac->modemask = expand_simple_binop (SImode, ASHIFT,
4601 GEN_INT (GET_MODE_MASK (mode)), ac->shift,
4602 NULL_RTX, 1, OPTAB_DIRECT);
4603 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1);
4606 /* Expand an atomic compare and swap operation for HImode and QImode. MEM is
4607 the memory location, CMP the old value to compare MEM with and NEW_RTX the value
4608 to set if CMP == MEM.
4609 CMP is never in memory for compare_and_swap_cc because
4610 expand_bool_compare_and_swap puts it into a register for later compare. */
4613 s390_expand_cs_hqi (enum machine_mode mode, rtx target, rtx mem, rtx cmp, rtx new_rtx)
4615 struct alignment_context ac;
4616 rtx cmpv, newv, val, resv, cc;
4617 rtx res = gen_reg_rtx (SImode);
4618 rtx csloop = gen_label_rtx ();
4619 rtx csend = gen_label_rtx ();
4621 gcc_assert (register_operand (target, VOIDmode));
4622 gcc_assert (MEM_P (mem));
4624 init_alignment_context (&ac, mem, mode);
4626 /* Shift the values to the correct bit positions. */
4627 if (!(ac.aligned && MEM_P (cmp)))
4628 cmp = s390_expand_mask_and_shift (cmp, mode, ac.shift);
4629 if (!(ac.aligned && MEM_P (new_rtx)))
4630 new_rtx = s390_expand_mask_and_shift (new_rtx, mode, ac.shift);
4632 /* Load full word. Subsequent loads are performed by CS. */
4633 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski,
4634 NULL_RTX, 1, OPTAB_DIRECT);
4636 /* Start CS loop. */
4637 emit_label (csloop);
4638 /* val = "<mem>00..0<mem>"
4639 * cmp = "00..0<cmp>00..0"
4640 * new = "00..0<new>00..0"
4643 /* Patch cmp and new with val at correct position. */
4644 if (ac.aligned && MEM_P (cmp))
4646 cmpv = force_reg (SImode, val);
4647 store_bit_field (cmpv, GET_MODE_BITSIZE (mode), 0, SImode, cmp);
4650 cmpv = force_reg (SImode, expand_simple_binop (SImode, IOR, cmp, val,
4651 NULL_RTX, 1, OPTAB_DIRECT));
4652 if (ac.aligned && MEM_P (new_rtx))
4654 newv = force_reg (SImode, val);
4655 store_bit_field (newv, GET_MODE_BITSIZE (mode), 0, SImode, new_rtx);
4658 newv = force_reg (SImode, expand_simple_binop (SImode, IOR, new_rtx, val,
4659 NULL_RTX, 1, OPTAB_DIRECT));
4661 /* Jump to end if we're done (likely?). */
4662 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi,
4665 /* Check for changes outside mode. */
4666 resv = expand_simple_binop (SImode, AND, res, ac.modemaski,
4667 NULL_RTX, 1, OPTAB_DIRECT);
4668 cc = s390_emit_compare (NE, resv, val);
4669 emit_move_insn (val, resv);
4670 /* Loop internal if so. */
4671 s390_emit_jump (csloop, cc);
4675 /* Return the correct part of the bitfield. */
4676 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift,
4677 NULL_RTX, 1, OPTAB_DIRECT), 1);
4680 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location
4681 and VAL the value to play with. If AFTER is true then store the value
4682 MEM holds after the operation, if AFTER is false then store the value MEM
4683 holds before the operation. If TARGET is zero then discard that value, else
4684 store it to TARGET. */
4687 s390_expand_atomic (enum machine_mode mode, enum rtx_code code,
4688 rtx target, rtx mem, rtx val, bool after)
4690 struct alignment_context ac;
4692 rtx new_rtx = gen_reg_rtx (SImode);
4693 rtx orig = gen_reg_rtx (SImode);
4694 rtx csloop = gen_label_rtx ();
4696 gcc_assert (!target || register_operand (target, VOIDmode));
4697 gcc_assert (MEM_P (mem));
4699 init_alignment_context (&ac, mem, mode);
4701 /* Shift val to the correct bit positions.
4702 Preserve "icm", but prevent "ex icm". */
4703 if (!(ac.aligned && code == SET && MEM_P (val)))
4704 val = s390_expand_mask_and_shift (val, mode, ac.shift);
4706 /* Further preparation insns. */
4707 if (code == PLUS || code == MINUS)
4708 emit_move_insn (orig, val);
4709 else if (code == MULT || code == AND) /* val = "11..1<val>11..1" */
4710 val = expand_simple_binop (SImode, XOR, val, ac.modemaski,
4711 NULL_RTX, 1, OPTAB_DIRECT);
4713 /* Load full word. Subsequent loads are performed by CS. */
4714 cmp = force_reg (SImode, ac.memsi);
4716 /* Start CS loop. */
4717 emit_label (csloop);
4718 emit_move_insn (new_rtx, cmp);
4720 /* Patch new with val at correct position. */
4725 val = expand_simple_binop (SImode, code, new_rtx, orig,
4726 NULL_RTX, 1, OPTAB_DIRECT);
4727 val = expand_simple_binop (SImode, AND, val, ac.modemask,
4728 NULL_RTX, 1, OPTAB_DIRECT);
4731 if (ac.aligned && MEM_P (val))
4732 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0, SImode, val);
4735 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski,
4736 NULL_RTX, 1, OPTAB_DIRECT);
4737 new_rtx = expand_simple_binop (SImode, IOR, new_rtx, val,
4738 NULL_RTX, 1, OPTAB_DIRECT);
4744 new_rtx = expand_simple_binop (SImode, code, new_rtx, val,
4745 NULL_RTX, 1, OPTAB_DIRECT);
4747 case MULT: /* NAND */
4748 new_rtx = expand_simple_binop (SImode, AND, new_rtx, val,
4749 NULL_RTX, 1, OPTAB_DIRECT);
4750 new_rtx = expand_simple_binop (SImode, XOR, new_rtx, ac.modemask,
4751 NULL_RTX, 1, OPTAB_DIRECT);
4757 s390_emit_jump (csloop, s390_emit_compare_and_swap (NE, cmp,
4758 ac.memsi, cmp, new_rtx));
4760 /* Return the correct part of the bitfield. */
4762 convert_move (target, expand_simple_binop (SImode, LSHIFTRT,
4763 after ? new_rtx : cmp, ac.shift,
4764 NULL_RTX, 1, OPTAB_DIRECT), 1);
4767 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
4768 We need to emit DTP-relative relocations. */
4770 static void s390_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
4773 s390_output_dwarf_dtprel (FILE *file, int size, rtx x)
4778 fputs ("\t.long\t", file);
4781 fputs ("\t.quad\t", file);
4786 output_addr_const (file, x);
4787 fputs ("@DTPOFF", file);
4790 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
4791 /* Implement TARGET_MANGLE_TYPE. */
4794 s390_mangle_type (const_tree type)
4796 if (TYPE_MAIN_VARIANT (type) == long_double_type_node
4797 && TARGET_LONG_DOUBLE_128)
4800 /* For all other types, use normal C++ mangling. */
4805 /* In the name of slightly smaller debug output, and to cater to
4806 general assembler lossage, recognize various UNSPEC sequences
4807 and turn them back into a direct symbol reference. */
4810 s390_delegitimize_address (rtx orig_x)
4814 orig_x = delegitimize_mem_from_attrs (orig_x);
4816 if (GET_CODE (x) != MEM)
4820 if (GET_CODE (x) == PLUS
4821 && GET_CODE (XEXP (x, 1)) == CONST
4822 && GET_CODE (XEXP (x, 0)) == REG
4823 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
4825 y = XEXP (XEXP (x, 1), 0);
4826 if (GET_CODE (y) == UNSPEC
4827 && XINT (y, 1) == UNSPEC_GOT)
4828 return XVECEXP (y, 0, 0);
4832 if (GET_CODE (x) == CONST)
4835 if (GET_CODE (y) == UNSPEC
4836 && XINT (y, 1) == UNSPEC_GOTENT)
4837 return XVECEXP (y, 0, 0);
4844 /* Output operand OP to stdio stream FILE.
4845 OP is an address (register + offset) which is not used to address data;
4846 instead the rightmost bits are interpreted as the value. */
4849 print_shift_count_operand (FILE *file, rtx op)
4851 HOST_WIDE_INT offset;
4854 /* Extract base register and offset. */
4855 if (!s390_decompose_shift_count (op, &base, &offset))
4861 gcc_assert (GET_CODE (base) == REG);
4862 gcc_assert (REGNO (base) < FIRST_PSEUDO_REGISTER);
4863 gcc_assert (REGNO_REG_CLASS (REGNO (base)) == ADDR_REGS);
4866 /* Offsets are constricted to twelve bits. */
4867 fprintf (file, HOST_WIDE_INT_PRINT_DEC, offset & ((1 << 12) - 1));
4869 fprintf (file, "(%s)", reg_names[REGNO (base)]);
4872 /* See 'get_some_local_dynamic_name'. */
4875 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
4879 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
4881 x = get_pool_constant (x);
4882 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
4885 if (GET_CODE (x) == SYMBOL_REF
4886 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
4888 cfun->machine->some_ld_name = XSTR (x, 0);
4895 /* Locate some local-dynamic symbol still in use by this function
4896 so that we can print its name in local-dynamic base patterns. */
4899 get_some_local_dynamic_name (void)
4903 if (cfun->machine->some_ld_name)
4904 return cfun->machine->some_ld_name;
4906 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
4908 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
4909 return cfun->machine->some_ld_name;
4914 /* Output machine-dependent UNSPECs occurring in address constant X
4915 in assembler syntax to stdio stream FILE. Returns true if the
4916 constant X could be recognized, false otherwise. */
4919 s390_output_addr_const_extra (FILE *file, rtx x)
4921 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 1)
4922 switch (XINT (x, 1))
4925 output_addr_const (file, XVECEXP (x, 0, 0));
4926 fprintf (file, "@GOTENT");
4929 output_addr_const (file, XVECEXP (x, 0, 0));
4930 fprintf (file, "@GOT");
4933 output_addr_const (file, XVECEXP (x, 0, 0));
4934 fprintf (file, "@GOTOFF");
4937 output_addr_const (file, XVECEXP (x, 0, 0));
4938 fprintf (file, "@PLT");
4941 output_addr_const (file, XVECEXP (x, 0, 0));
4942 fprintf (file, "@PLTOFF");
4945 output_addr_const (file, XVECEXP (x, 0, 0));
4946 fprintf (file, "@TLSGD");
4949 assemble_name (file, get_some_local_dynamic_name ());
4950 fprintf (file, "@TLSLDM");
4953 output_addr_const (file, XVECEXP (x, 0, 0));
4954 fprintf (file, "@DTPOFF");
4957 output_addr_const (file, XVECEXP (x, 0, 0));
4958 fprintf (file, "@NTPOFF");
4960 case UNSPEC_GOTNTPOFF:
4961 output_addr_const (file, XVECEXP (x, 0, 0));
4962 fprintf (file, "@GOTNTPOFF");
4964 case UNSPEC_INDNTPOFF:
4965 output_addr_const (file, XVECEXP (x, 0, 0));
4966 fprintf (file, "@INDNTPOFF");
4970 if (GET_CODE (x) == UNSPEC && XVECLEN (x, 0) == 2)
4971 switch (XINT (x, 1))
4973 case UNSPEC_POOL_OFFSET:
4974 x = gen_rtx_MINUS (GET_MODE (x), XVECEXP (x, 0, 0), XVECEXP (x, 0, 1));
4975 output_addr_const (file, x);
4981 /* Output address operand ADDR in assembler syntax to
4982 stdio stream FILE. */
4985 print_operand_address (FILE *file, rtx addr)
4987 struct s390_address ad;
4989 if (s390_symref_operand_p (addr, NULL, NULL))
4991 gcc_assert (TARGET_Z10);
4992 output_addr_const (file, addr);
4996 if (!s390_decompose_address (addr, &ad)
4997 || (ad.base && !REGNO_OK_FOR_BASE_P (REGNO (ad.base)))
4998 || (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))))
4999 output_operand_lossage ("cannot decompose address");
5002 output_addr_const (file, ad.disp);
5004 fprintf (file, "0");
5006 if (ad.base && ad.indx)
5007 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
5008 reg_names[REGNO (ad.base)]);
5010 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
5013 /* Output operand X in assembler syntax to stdio stream FILE.
5014 CODE specified the format flag. The following format flags
5017 'C': print opcode suffix for branch condition.
5018 'D': print opcode suffix for inverse branch condition.
5019 'E': print opcode suffix for branch on index instruction.
5020 'J': print tls_load/tls_gdcall/tls_ldcall suffix
5021 'G': print the size of the operand in bytes.
5022 'O': print only the displacement of a memory reference.
5023 'R': print only the base register of a memory reference.
5024 'S': print S-type memory reference (base+displacement).
5025 'N': print the second word of a DImode operand.
5026 'M': print the second word of a TImode operand.
5027 'Y': print shift count operand.
5029 'b': print integer X as if it's an unsigned byte.
5030 'c': print integer X as if it's an signed byte.
5031 'x': print integer X as if it's an unsigned halfword.
5032 'h': print integer X as if it's a signed halfword.
5033 'i': print the first nonzero HImode part of X.
5034 'j': print the first HImode part unequal to -1 of X.
5035 'k': print the first nonzero SImode part of X.
5036 'm': print the first SImode part unequal to -1 of X.
5037 'o': print integer X as if it's an unsigned 32bit word. */
5040 print_operand (FILE *file, rtx x, int code)
5045 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
5049 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
5053 if (GET_CODE (x) == LE)
5054 fprintf (file, "l");
5055 else if (GET_CODE (x) == GT)
5056 fprintf (file, "h");
5062 if (GET_CODE (x) == SYMBOL_REF)
5064 fprintf (file, "%s", ":tls_load:");
5065 output_addr_const (file, x);
5067 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
5069 fprintf (file, "%s", ":tls_gdcall:");
5070 output_addr_const (file, XVECEXP (x, 0, 0));
5072 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
5074 fprintf (file, "%s", ":tls_ldcall:");
5075 assemble_name (file, get_some_local_dynamic_name ());
5082 fprintf (file, "%u", GET_MODE_SIZE (GET_MODE (x)));
5087 struct s390_address ad;
5090 gcc_assert (GET_CODE (x) == MEM);
5091 ret = s390_decompose_address (XEXP (x, 0), &ad);
5093 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5094 gcc_assert (!ad.indx);
5097 output_addr_const (file, ad.disp);
5099 fprintf (file, "0");
5105 struct s390_address ad;
5108 gcc_assert (GET_CODE (x) == MEM);
5109 ret = s390_decompose_address (XEXP (x, 0), &ad);
5111 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5112 gcc_assert (!ad.indx);
5115 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
5117 fprintf (file, "0");
5123 struct s390_address ad;
5126 gcc_assert (GET_CODE (x) == MEM);
5127 ret = s390_decompose_address (XEXP (x, 0), &ad);
5129 gcc_assert (!ad.base || REGNO_OK_FOR_BASE_P (REGNO (ad.base)));
5130 gcc_assert (!ad.indx);
5133 output_addr_const (file, ad.disp);
5135 fprintf (file, "0");
5138 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
5143 if (GET_CODE (x) == REG)
5144 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5145 else if (GET_CODE (x) == MEM)
5146 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
5152 if (GET_CODE (x) == REG)
5153 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
5154 else if (GET_CODE (x) == MEM)
5155 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
5161 print_shift_count_operand (file, x);
5165 switch (GET_CODE (x))
5168 fprintf (file, "%s", reg_names[REGNO (x)]);
5172 output_address (XEXP (x, 0));
5179 output_addr_const (file, x);
5184 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
5185 else if (code == 'c')
5186 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xff) ^ 0x80) - 0x80);
5187 else if (code == 'x')
5188 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
5189 else if (code == 'h')
5190 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
5191 else if (code == 'i')
5192 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5193 s390_extract_part (x, HImode, 0));
5194 else if (code == 'j')
5195 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5196 s390_extract_part (x, HImode, -1));
5197 else if (code == 'k')
5198 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5199 s390_extract_part (x, SImode, 0));
5200 else if (code == 'm')
5201 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
5202 s390_extract_part (x, SImode, -1));
5203 else if (code == 'o')
5204 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffffffff);
5206 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
5210 gcc_assert (GET_MODE (x) == VOIDmode);
5212 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
5213 else if (code == 'x')
5214 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
5215 else if (code == 'h')
5216 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
5222 fatal_insn ("UNKNOWN in print_operand !?", x);
5227 /* Target hook for assembling integer objects. We need to define it
5228 here to work a round a bug in some versions of GAS, which couldn't
5229 handle values smaller than INT_MIN when printed in decimal. */
5232 s390_assemble_integer (rtx x, unsigned int size, int aligned_p)
5234 if (size == 8 && aligned_p
5235 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
5237 fprintf (asm_out_file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n",
5241 return default_assemble_integer (x, size, aligned_p);
5244 /* Returns true if register REGNO is used for forming
5245 a memory address in expression X. */
5248 reg_used_in_mem_p (int regno, rtx x)
5250 enum rtx_code code = GET_CODE (x);
5256 if (refers_to_regno_p (regno, regno+1,
5260 else if (code == SET
5261 && GET_CODE (SET_DEST (x)) == PC)
5263 if (refers_to_regno_p (regno, regno+1,
5268 fmt = GET_RTX_FORMAT (code);
5269 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5272 && reg_used_in_mem_p (regno, XEXP (x, i)))
5275 else if (fmt[i] == 'E')
5276 for (j = 0; j < XVECLEN (x, i); j++)
5277 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
5283 /* Returns true if expression DEP_RTX sets an address register
5284 used by instruction INSN to address memory. */
5287 addr_generation_dependency_p (rtx dep_rtx, rtx insn)
5291 if (GET_CODE (dep_rtx) == INSN)
5292 dep_rtx = PATTERN (dep_rtx);
5294 if (GET_CODE (dep_rtx) == SET)
5296 target = SET_DEST (dep_rtx);
5297 if (GET_CODE (target) == STRICT_LOW_PART)
5298 target = XEXP (target, 0);
5299 while (GET_CODE (target) == SUBREG)
5300 target = SUBREG_REG (target);
5302 if (GET_CODE (target) == REG)
5304 int regno = REGNO (target);
5306 if (s390_safe_attr_type (insn) == TYPE_LA)
5308 pat = PATTERN (insn);
5309 if (GET_CODE (pat) == PARALLEL)
5311 gcc_assert (XVECLEN (pat, 0) == 2);
5312 pat = XVECEXP (pat, 0, 0);
5314 gcc_assert (GET_CODE (pat) == SET);
5315 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
5317 else if (get_attr_atype (insn) == ATYPE_AGEN)
5318 return reg_used_in_mem_p (regno, PATTERN (insn));
5324 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
5327 s390_agen_dep_p (rtx dep_insn, rtx insn)
5329 rtx dep_rtx = PATTERN (dep_insn);
5332 if (GET_CODE (dep_rtx) == SET
5333 && addr_generation_dependency_p (dep_rtx, insn))
5335 else if (GET_CODE (dep_rtx) == PARALLEL)
5337 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
5339 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
5347 /* A C statement (sans semicolon) to update the integer scheduling priority
5348 INSN_PRIORITY (INSN). Increase the priority to execute the INSN earlier,
5349 reduce the priority to execute INSN later. Do not define this macro if
5350 you do not need to adjust the scheduling priorities of insns.
5352 A STD instruction should be scheduled earlier,
5353 in order to use the bypass. */
5357 s390_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
5359 if (! INSN_P (insn))
5362 if (s390_tune != PROCESSOR_2084_Z990
5363 && s390_tune != PROCESSOR_2094_Z9_109
5364 && s390_tune != PROCESSOR_2097_Z10)
5367 switch (s390_safe_attr_type (insn))
5371 priority = priority << 3;
5375 priority = priority << 1;
5384 /* The number of instructions that can be issued per cycle. */
5387 s390_issue_rate (void)
5391 case PROCESSOR_2084_Z990:
5392 case PROCESSOR_2094_Z9_109:
5394 case PROCESSOR_2097_Z10:
5402 s390_first_cycle_multipass_dfa_lookahead (void)
5408 /* Annotate every literal pool reference in X by an UNSPEC_LTREF expression.
5409 Fix up MEMs as required. */
5412 annotate_constant_pool_refs (rtx *x)
5417 gcc_assert (GET_CODE (*x) != SYMBOL_REF
5418 || !CONSTANT_POOL_ADDRESS_P (*x));
5420 /* Literal pool references can only occur inside a MEM ... */
5421 if (GET_CODE (*x) == MEM)
5423 rtx memref = XEXP (*x, 0);
5425 if (GET_CODE (memref) == SYMBOL_REF
5426 && CONSTANT_POOL_ADDRESS_P (memref))
5428 rtx base = cfun->machine->base_reg;
5429 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, memref, base),
5432 *x = replace_equiv_address (*x, addr);
5436 if (GET_CODE (memref) == CONST
5437 && GET_CODE (XEXP (memref, 0)) == PLUS
5438 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
5439 && GET_CODE (XEXP (XEXP (memref, 0), 0)) == SYMBOL_REF
5440 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (memref, 0), 0)))
5442 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
5443 rtx sym = XEXP (XEXP (memref, 0), 0);
5444 rtx base = cfun->machine->base_reg;
5445 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5448 *x = replace_equiv_address (*x, plus_constant (addr, off));
5453 /* ... or a load-address type pattern. */
5454 if (GET_CODE (*x) == SET)
5456 rtx addrref = SET_SRC (*x);
5458 if (GET_CODE (addrref) == SYMBOL_REF
5459 && CONSTANT_POOL_ADDRESS_P (addrref))
5461 rtx base = cfun->machine->base_reg;
5462 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, addrref, base),
5465 SET_SRC (*x) = addr;
5469 if (GET_CODE (addrref) == CONST
5470 && GET_CODE (XEXP (addrref, 0)) == PLUS
5471 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
5472 && GET_CODE (XEXP (XEXP (addrref, 0), 0)) == SYMBOL_REF
5473 && CONSTANT_POOL_ADDRESS_P (XEXP (XEXP (addrref, 0), 0)))
5475 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
5476 rtx sym = XEXP (XEXP (addrref, 0), 0);
5477 rtx base = cfun->machine->base_reg;
5478 rtx addr = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, sym, base),
5481 SET_SRC (*x) = plus_constant (addr, off);
5486 /* Annotate LTREL_BASE as well. */
5487 if (GET_CODE (*x) == UNSPEC
5488 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5490 rtx base = cfun->machine->base_reg;
5491 *x = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XVECEXP (*x, 0, 0), base),
5496 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5497 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5501 annotate_constant_pool_refs (&XEXP (*x, i));
5503 else if (fmt[i] == 'E')
5505 for (j = 0; j < XVECLEN (*x, i); j++)
5506 annotate_constant_pool_refs (&XVECEXP (*x, i, j));
5511 /* Split all branches that exceed the maximum distance.
5512 Returns true if this created a new literal pool entry. */
5515 s390_split_branches (void)
5517 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5518 int new_literal = 0, ret;
5519 rtx insn, pat, tmp, target;
5522 /* We need correct insn addresses. */
5524 shorten_branches (get_insns ());
5526 /* Find all branches that exceed 64KB, and split them. */
5528 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5530 if (GET_CODE (insn) != JUMP_INSN)
5533 pat = PATTERN (insn);
5534 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
5535 pat = XVECEXP (pat, 0, 0);
5536 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
5539 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
5541 label = &SET_SRC (pat);
5543 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
5545 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
5546 label = &XEXP (SET_SRC (pat), 1);
5547 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
5548 label = &XEXP (SET_SRC (pat), 2);
5555 if (get_attr_length (insn) <= 4)
5558 /* We are going to use the return register as scratch register,
5559 make sure it will be saved/restored by the prologue/epilogue. */
5560 cfun_frame_layout.save_return_addr_p = 1;
5565 tmp = force_const_mem (Pmode, *label);
5566 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
5567 INSN_ADDRESSES_NEW (tmp, -1);
5568 annotate_constant_pool_refs (&PATTERN (tmp));
5575 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, *label),
5576 UNSPEC_LTREL_OFFSET);
5577 target = gen_rtx_CONST (Pmode, target);
5578 target = force_const_mem (Pmode, target);
5579 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, target), insn);
5580 INSN_ADDRESSES_NEW (tmp, -1);
5581 annotate_constant_pool_refs (&PATTERN (tmp));
5583 target = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, XEXP (target, 0),
5584 cfun->machine->base_reg),
5586 target = gen_rtx_PLUS (Pmode, temp_reg, target);
5589 ret = validate_change (insn, label, target, 0);
5597 /* Find an annotated literal pool symbol referenced in RTX X,
5598 and store it at REF. Will abort if X contains references to
5599 more than one such pool symbol; multiple references to the same
5600 symbol are allowed, however.
5602 The rtx pointed to by REF must be initialized to NULL_RTX
5603 by the caller before calling this routine. */
5606 find_constant_pool_ref (rtx x, rtx *ref)
5611 /* Ignore LTREL_BASE references. */
5612 if (GET_CODE (x) == UNSPEC
5613 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5615 /* Likewise POOL_ENTRY insns. */
5616 if (GET_CODE (x) == UNSPEC_VOLATILE
5617 && XINT (x, 1) == UNSPECV_POOL_ENTRY)
5620 gcc_assert (GET_CODE (x) != SYMBOL_REF
5621 || !CONSTANT_POOL_ADDRESS_P (x));
5623 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_LTREF)
5625 rtx sym = XVECEXP (x, 0, 0);
5626 gcc_assert (GET_CODE (sym) == SYMBOL_REF
5627 && CONSTANT_POOL_ADDRESS_P (sym));
5629 if (*ref == NULL_RTX)
5632 gcc_assert (*ref == sym);
5637 fmt = GET_RTX_FORMAT (GET_CODE (x));
5638 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5642 find_constant_pool_ref (XEXP (x, i), ref);
5644 else if (fmt[i] == 'E')
5646 for (j = 0; j < XVECLEN (x, i); j++)
5647 find_constant_pool_ref (XVECEXP (x, i, j), ref);
5652 /* Replace every reference to the annotated literal pool
5653 symbol REF in X by its base plus OFFSET. */
5656 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset)
5661 gcc_assert (*x != ref);
5663 if (GET_CODE (*x) == UNSPEC
5664 && XINT (*x, 1) == UNSPEC_LTREF
5665 && XVECEXP (*x, 0, 0) == ref)
5667 *x = gen_rtx_PLUS (Pmode, XVECEXP (*x, 0, 1), offset);
5671 if (GET_CODE (*x) == PLUS
5672 && GET_CODE (XEXP (*x, 1)) == CONST_INT
5673 && GET_CODE (XEXP (*x, 0)) == UNSPEC
5674 && XINT (XEXP (*x, 0), 1) == UNSPEC_LTREF
5675 && XVECEXP (XEXP (*x, 0), 0, 0) == ref)
5677 rtx addr = gen_rtx_PLUS (Pmode, XVECEXP (XEXP (*x, 0), 0, 1), offset);
5678 *x = plus_constant (addr, INTVAL (XEXP (*x, 1)));
5682 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5683 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5687 replace_constant_pool_ref (&XEXP (*x, i), ref, offset);
5689 else if (fmt[i] == 'E')
5691 for (j = 0; j < XVECLEN (*x, i); j++)
5692 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, offset);
5697 /* Check whether X contains an UNSPEC_LTREL_BASE.
5698 Return its constant pool symbol if found, NULL_RTX otherwise. */
5701 find_ltrel_base (rtx x)
5706 if (GET_CODE (x) == UNSPEC
5707 && XINT (x, 1) == UNSPEC_LTREL_BASE)
5708 return XVECEXP (x, 0, 0);
5710 fmt = GET_RTX_FORMAT (GET_CODE (x));
5711 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5715 rtx fnd = find_ltrel_base (XEXP (x, i));
5719 else if (fmt[i] == 'E')
5721 for (j = 0; j < XVECLEN (x, i); j++)
5723 rtx fnd = find_ltrel_base (XVECEXP (x, i, j));
5733 /* Replace any occurrence of UNSPEC_LTREL_BASE in X with its base. */
5736 replace_ltrel_base (rtx *x)
5741 if (GET_CODE (*x) == UNSPEC
5742 && XINT (*x, 1) == UNSPEC_LTREL_BASE)
5744 *x = XVECEXP (*x, 0, 1);
5748 fmt = GET_RTX_FORMAT (GET_CODE (*x));
5749 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
5753 replace_ltrel_base (&XEXP (*x, i));
5755 else if (fmt[i] == 'E')
5757 for (j = 0; j < XVECLEN (*x, i); j++)
5758 replace_ltrel_base (&XVECEXP (*x, i, j));
5764 /* We keep a list of constants which we have to add to internal
5765 constant tables in the middle of large functions. */
5767 #define NR_C_MODES 11
5768 enum machine_mode constant_modes[NR_C_MODES] =
5770 TFmode, TImode, TDmode,
5771 DFmode, DImode, DDmode,
5772 SFmode, SImode, SDmode,
5779 struct constant *next;
5784 struct constant_pool
5786 struct constant_pool *next;
5790 rtx emit_pool_after;
5792 struct constant *constants[NR_C_MODES];
5793 struct constant *execute;
5798 /* Allocate new constant_pool structure. */
5800 static struct constant_pool *
5801 s390_alloc_pool (void)
5803 struct constant_pool *pool;
5806 pool = (struct constant_pool *) xmalloc (sizeof *pool);
5808 for (i = 0; i < NR_C_MODES; i++)
5809 pool->constants[i] = NULL;
5811 pool->execute = NULL;
5812 pool->label = gen_label_rtx ();
5813 pool->first_insn = NULL_RTX;
5814 pool->pool_insn = NULL_RTX;
5815 pool->insns = BITMAP_ALLOC (NULL);
5817 pool->emit_pool_after = NULL_RTX;
5822 /* Create new constant pool covering instructions starting at INSN
5823 and chain it to the end of POOL_LIST. */
5825 static struct constant_pool *
5826 s390_start_pool (struct constant_pool **pool_list, rtx insn)
5828 struct constant_pool *pool, **prev;
5830 pool = s390_alloc_pool ();
5831 pool->first_insn = insn;
5833 for (prev = pool_list; *prev; prev = &(*prev)->next)
5840 /* End range of instructions covered by POOL at INSN and emit
5841 placeholder insn representing the pool. */
5844 s390_end_pool (struct constant_pool *pool, rtx insn)
5846 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
5849 insn = get_last_insn ();
5851 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
5852 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
5855 /* Add INSN to the list of insns covered by POOL. */
5858 s390_add_pool_insn (struct constant_pool *pool, rtx insn)
5860 bitmap_set_bit (pool->insns, INSN_UID (insn));
5863 /* Return pool out of POOL_LIST that covers INSN. */
5865 static struct constant_pool *
5866 s390_find_pool (struct constant_pool *pool_list, rtx insn)
5868 struct constant_pool *pool;
5870 for (pool = pool_list; pool; pool = pool->next)
5871 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
5877 /* Add constant VAL of mode MODE to the constant pool POOL. */
5880 s390_add_constant (struct constant_pool *pool, rtx val, enum machine_mode mode)
5885 for (i = 0; i < NR_C_MODES; i++)
5886 if (constant_modes[i] == mode)
5888 gcc_assert (i != NR_C_MODES);
5890 for (c = pool->constants[i]; c != NULL; c = c->next)
5891 if (rtx_equal_p (val, c->value))
5896 c = (struct constant *) xmalloc (sizeof *c);
5898 c->label = gen_label_rtx ();
5899 c->next = pool->constants[i];
5900 pool->constants[i] = c;
5901 pool->size += GET_MODE_SIZE (mode);
5905 /* Return an rtx that represents the offset of X from the start of
5909 s390_pool_offset (struct constant_pool *pool, rtx x)
5913 label = gen_rtx_LABEL_REF (GET_MODE (x), pool->label);
5914 x = gen_rtx_UNSPEC (GET_MODE (x), gen_rtvec (2, x, label),
5915 UNSPEC_POOL_OFFSET);
5916 return gen_rtx_CONST (GET_MODE (x), x);
5919 /* Find constant VAL of mode MODE in the constant pool POOL.
5920 Return an RTX describing the distance from the start of
5921 the pool to the location of the new constant. */
5924 s390_find_constant (struct constant_pool *pool, rtx val,
5925 enum machine_mode mode)
5930 for (i = 0; i < NR_C_MODES; i++)
5931 if (constant_modes[i] == mode)
5933 gcc_assert (i != NR_C_MODES);
5935 for (c = pool->constants[i]; c != NULL; c = c->next)
5936 if (rtx_equal_p (val, c->value))
5941 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
5944 /* Check whether INSN is an execute. Return the label_ref to its
5945 execute target template if so, NULL_RTX otherwise. */
5948 s390_execute_label (rtx insn)
5950 if (GET_CODE (insn) == INSN
5951 && GET_CODE (PATTERN (insn)) == PARALLEL
5952 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
5953 && XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
5954 return XVECEXP (XVECEXP (PATTERN (insn), 0, 0), 0, 2);
5959 /* Add execute target for INSN to the constant pool POOL. */
5962 s390_add_execute (struct constant_pool *pool, rtx insn)
5966 for (c = pool->execute; c != NULL; c = c->next)
5967 if (INSN_UID (insn) == INSN_UID (c->value))
5972 c = (struct constant *) xmalloc (sizeof *c);
5974 c->label = gen_label_rtx ();
5975 c->next = pool->execute;
5981 /* Find execute target for INSN in the constant pool POOL.
5982 Return an RTX describing the distance from the start of
5983 the pool to the location of the execute target. */
5986 s390_find_execute (struct constant_pool *pool, rtx insn)
5990 for (c = pool->execute; c != NULL; c = c->next)
5991 if (INSN_UID (insn) == INSN_UID (c->value))
5996 return s390_pool_offset (pool, gen_rtx_LABEL_REF (Pmode, c->label));
5999 /* For an execute INSN, extract the execute target template. */
6002 s390_execute_target (rtx insn)
6004 rtx pattern = PATTERN (insn);
6005 gcc_assert (s390_execute_label (insn));
6007 if (XVECLEN (pattern, 0) == 2)
6009 pattern = copy_rtx (XVECEXP (pattern, 0, 1));
6013 rtvec vec = rtvec_alloc (XVECLEN (pattern, 0) - 1);
6016 for (i = 0; i < XVECLEN (pattern, 0) - 1; i++)
6017 RTVEC_ELT (vec, i) = copy_rtx (XVECEXP (pattern, 0, i + 1));
6019 pattern = gen_rtx_PARALLEL (VOIDmode, vec);
6025 /* Indicate that INSN cannot be duplicated. This is the case for
6026 execute insns that carry a unique label. */
6029 s390_cannot_copy_insn_p (rtx insn)
6031 rtx label = s390_execute_label (insn);
6032 return label && label != const0_rtx;
6035 /* Dump out the constants in POOL. If REMOTE_LABEL is true,
6036 do not emit the pool base label. */
6039 s390_dump_pool (struct constant_pool *pool, bool remote_label)
6042 rtx insn = pool->pool_insn;
6045 /* Switch to rodata section. */
6046 if (TARGET_CPU_ZARCH)
6048 insn = emit_insn_after (gen_pool_section_start (), insn);
6049 INSN_ADDRESSES_NEW (insn, -1);
6052 /* Ensure minimum pool alignment. */
6053 if (TARGET_CPU_ZARCH)
6054 insn = emit_insn_after (gen_pool_align (GEN_INT (8)), insn);
6056 insn = emit_insn_after (gen_pool_align (GEN_INT (4)), insn);
6057 INSN_ADDRESSES_NEW (insn, -1);
6059 /* Emit pool base label. */
6062 insn = emit_label_after (pool->label, insn);
6063 INSN_ADDRESSES_NEW (insn, -1);
6066 /* Dump constants in descending alignment requirement order,
6067 ensuring proper alignment for every constant. */
6068 for (i = 0; i < NR_C_MODES; i++)
6069 for (c = pool->constants[i]; c; c = c->next)
6071 /* Convert UNSPEC_LTREL_OFFSET unspecs to pool-relative references. */
6072 rtx value = copy_rtx (c->value);
6073 if (GET_CODE (value) == CONST
6074 && GET_CODE (XEXP (value, 0)) == UNSPEC
6075 && XINT (XEXP (value, 0), 1) == UNSPEC_LTREL_OFFSET
6076 && XVECLEN (XEXP (value, 0), 0) == 1)
6077 value = s390_pool_offset (pool, XVECEXP (XEXP (value, 0), 0, 0));
6079 insn = emit_label_after (c->label, insn);
6080 INSN_ADDRESSES_NEW (insn, -1);
6082 value = gen_rtx_UNSPEC_VOLATILE (constant_modes[i],
6083 gen_rtvec (1, value),
6084 UNSPECV_POOL_ENTRY);
6085 insn = emit_insn_after (value, insn);
6086 INSN_ADDRESSES_NEW (insn, -1);
6089 /* Ensure minimum alignment for instructions. */
6090 insn = emit_insn_after (gen_pool_align (GEN_INT (2)), insn);
6091 INSN_ADDRESSES_NEW (insn, -1);
6093 /* Output in-pool execute template insns. */
6094 for (c = pool->execute; c; c = c->next)
6096 insn = emit_label_after (c->label, insn);
6097 INSN_ADDRESSES_NEW (insn, -1);
6099 insn = emit_insn_after (s390_execute_target (c->value), insn);
6100 INSN_ADDRESSES_NEW (insn, -1);
6103 /* Switch back to previous section. */
6104 if (TARGET_CPU_ZARCH)
6106 insn = emit_insn_after (gen_pool_section_end (), insn);
6107 INSN_ADDRESSES_NEW (insn, -1);
6110 insn = emit_barrier_after (insn);
6111 INSN_ADDRESSES_NEW (insn, -1);
6113 /* Remove placeholder insn. */
6114 remove_insn (pool->pool_insn);
6117 /* Free all memory used by POOL. */
6120 s390_free_pool (struct constant_pool *pool)
6122 struct constant *c, *next;
6125 for (i = 0; i < NR_C_MODES; i++)
6126 for (c = pool->constants[i]; c; c = next)
6132 for (c = pool->execute; c; c = next)
6138 BITMAP_FREE (pool->insns);
6143 /* Collect main literal pool. Return NULL on overflow. */
6145 static struct constant_pool *
6146 s390_mainpool_start (void)
6148 struct constant_pool *pool;
6151 pool = s390_alloc_pool ();
6153 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6155 if (GET_CODE (insn) == INSN
6156 && GET_CODE (PATTERN (insn)) == SET
6157 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
6158 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
6160 gcc_assert (!pool->pool_insn);
6161 pool->pool_insn = insn;
6164 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6166 s390_add_execute (pool, insn);
6168 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6170 rtx pool_ref = NULL_RTX;
6171 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6174 rtx constant = get_pool_constant (pool_ref);
6175 enum machine_mode mode = get_pool_mode (pool_ref);
6176 s390_add_constant (pool, constant, mode);
6180 /* If hot/cold partitioning is enabled we have to make sure that
6181 the literal pool is emitted in the same section where the
6182 initialization of the literal pool base pointer takes place.
6183 emit_pool_after is only used in the non-overflow case on non
6184 Z cpus where we can emit the literal pool at the end of the
6185 function body within the text section. */
6187 && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS
6188 && !pool->emit_pool_after)
6189 pool->emit_pool_after = PREV_INSN (insn);
6192 gcc_assert (pool->pool_insn || pool->size == 0);
6194 if (pool->size >= 4096)
6196 /* We're going to chunkify the pool, so remove the main
6197 pool placeholder insn. */
6198 remove_insn (pool->pool_insn);
6200 s390_free_pool (pool);
6204 /* If the functions ends with the section where the literal pool
6205 should be emitted set the marker to its end. */
6206 if (pool && !pool->emit_pool_after)
6207 pool->emit_pool_after = get_last_insn ();
6212 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6213 Modify the current function to output the pool constants as well as
6214 the pool register setup instruction. */
6217 s390_mainpool_finish (struct constant_pool *pool)
6219 rtx base_reg = cfun->machine->base_reg;
6222 /* If the pool is empty, we're done. */
6223 if (pool->size == 0)
6225 /* We don't actually need a base register after all. */
6226 cfun->machine->base_reg = NULL_RTX;
6228 if (pool->pool_insn)
6229 remove_insn (pool->pool_insn);
6230 s390_free_pool (pool);
6234 /* We need correct insn addresses. */
6235 shorten_branches (get_insns ());
6237 /* On zSeries, we use a LARL to load the pool register. The pool is
6238 located in the .rodata section, so we emit it after the function. */
6239 if (TARGET_CPU_ZARCH)
6241 insn = gen_main_base_64 (base_reg, pool->label);
6242 insn = emit_insn_after (insn, pool->pool_insn);
6243 INSN_ADDRESSES_NEW (insn, -1);
6244 remove_insn (pool->pool_insn);
6246 insn = get_last_insn ();
6247 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6248 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6250 s390_dump_pool (pool, 0);
6253 /* On S/390, if the total size of the function's code plus literal pool
6254 does not exceed 4096 bytes, we use BASR to set up a function base
6255 pointer, and emit the literal pool at the end of the function. */
6256 else if (INSN_ADDRESSES (INSN_UID (pool->emit_pool_after))
6257 + pool->size + 8 /* alignment slop */ < 4096)
6259 insn = gen_main_base_31_small (base_reg, pool->label);
6260 insn = emit_insn_after (insn, pool->pool_insn);
6261 INSN_ADDRESSES_NEW (insn, -1);
6262 remove_insn (pool->pool_insn);
6264 insn = emit_label_after (pool->label, insn);
6265 INSN_ADDRESSES_NEW (insn, -1);
6267 /* emit_pool_after will be set by s390_mainpool_start to the
6268 last insn of the section where the literal pool should be
6270 insn = pool->emit_pool_after;
6272 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6273 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6275 s390_dump_pool (pool, 1);
6278 /* Otherwise, we emit an inline literal pool and use BASR to branch
6279 over it, setting up the pool register at the same time. */
6282 rtx pool_end = gen_label_rtx ();
6284 insn = gen_main_base_31_large (base_reg, pool->label, pool_end);
6285 insn = emit_insn_after (insn, pool->pool_insn);
6286 INSN_ADDRESSES_NEW (insn, -1);
6287 remove_insn (pool->pool_insn);
6289 insn = emit_label_after (pool->label, insn);
6290 INSN_ADDRESSES_NEW (insn, -1);
6292 pool->pool_insn = emit_insn_after (gen_pool (const0_rtx), insn);
6293 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
6295 insn = emit_label_after (pool_end, pool->pool_insn);
6296 INSN_ADDRESSES_NEW (insn, -1);
6298 s390_dump_pool (pool, 1);
6302 /* Replace all literal pool references. */
6304 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6307 replace_ltrel_base (&PATTERN (insn));
6309 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6311 rtx addr, pool_ref = NULL_RTX;
6312 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6315 if (s390_execute_label (insn))
6316 addr = s390_find_execute (pool, insn);
6318 addr = s390_find_constant (pool, get_pool_constant (pool_ref),
6319 get_pool_mode (pool_ref));
6321 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6322 INSN_CODE (insn) = -1;
6328 /* Free the pool. */
6329 s390_free_pool (pool);
6332 /* POOL holds the main literal pool as collected by s390_mainpool_start.
6333 We have decided we cannot use this pool, so revert all changes
6334 to the current function that were done by s390_mainpool_start. */
6336 s390_mainpool_cancel (struct constant_pool *pool)
6338 /* We didn't actually change the instruction stream, so simply
6339 free the pool memory. */
6340 s390_free_pool (pool);
6344 /* Chunkify the literal pool. */
6346 #define S390_POOL_CHUNK_MIN 0xc00
6347 #define S390_POOL_CHUNK_MAX 0xe00
6349 static struct constant_pool *
6350 s390_chunkify_start (void)
6352 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
6355 rtx pending_ltrel = NULL_RTX;
6358 rtx (*gen_reload_base) (rtx, rtx) =
6359 TARGET_CPU_ZARCH? gen_reload_base_64 : gen_reload_base_31;
6362 /* We need correct insn addresses. */
6364 shorten_branches (get_insns ());
6366 /* Scan all insns and move literals to pool chunks. */
6368 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6370 bool section_switch_p = false;
6372 /* Check for pending LTREL_BASE. */
6375 rtx ltrel_base = find_ltrel_base (PATTERN (insn));
6378 gcc_assert (ltrel_base == pending_ltrel);
6379 pending_ltrel = NULL_RTX;
6383 if (!TARGET_CPU_ZARCH && s390_execute_label (insn))
6386 curr_pool = s390_start_pool (&pool_list, insn);
6388 s390_add_execute (curr_pool, insn);
6389 s390_add_pool_insn (curr_pool, insn);
6391 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6393 rtx pool_ref = NULL_RTX;
6394 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6397 rtx constant = get_pool_constant (pool_ref);
6398 enum machine_mode mode = get_pool_mode (pool_ref);
6401 curr_pool = s390_start_pool (&pool_list, insn);
6403 s390_add_constant (curr_pool, constant, mode);
6404 s390_add_pool_insn (curr_pool, insn);
6406 /* Don't split the pool chunk between a LTREL_OFFSET load
6407 and the corresponding LTREL_BASE. */
6408 if (GET_CODE (constant) == CONST
6409 && GET_CODE (XEXP (constant, 0)) == UNSPEC
6410 && XINT (XEXP (constant, 0), 1) == UNSPEC_LTREL_OFFSET)
6412 gcc_assert (!pending_ltrel);
6413 pending_ltrel = pool_ref;
6418 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
6421 s390_add_pool_insn (curr_pool, insn);
6422 /* An LTREL_BASE must follow within the same basic block. */
6423 gcc_assert (!pending_ltrel);
6426 if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
6427 section_switch_p = true;
6430 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
6431 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
6434 if (TARGET_CPU_ZARCH)
6436 if (curr_pool->size < S390_POOL_CHUNK_MAX)
6439 s390_end_pool (curr_pool, NULL_RTX);
6444 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
6445 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
6448 /* We will later have to insert base register reload insns.
6449 Those will have an effect on code size, which we need to
6450 consider here. This calculation makes rather pessimistic
6451 worst-case assumptions. */
6452 if (GET_CODE (insn) == CODE_LABEL)
6455 if (chunk_size < S390_POOL_CHUNK_MIN
6456 && curr_pool->size < S390_POOL_CHUNK_MIN
6457 && !section_switch_p)
6460 /* Pool chunks can only be inserted after BARRIERs ... */
6461 if (GET_CODE (insn) == BARRIER)
6463 s390_end_pool (curr_pool, insn);
6468 /* ... so if we don't find one in time, create one. */
6469 else if (chunk_size > S390_POOL_CHUNK_MAX
6470 || curr_pool->size > S390_POOL_CHUNK_MAX
6471 || section_switch_p)
6473 rtx label, jump, barrier;
6475 if (!section_switch_p)
6477 /* We can insert the barrier only after a 'real' insn. */
6478 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
6480 if (get_attr_length (insn) == 0)
6482 /* Don't separate LTREL_BASE from the corresponding
6483 LTREL_OFFSET load. */
6489 gcc_assert (!pending_ltrel);
6491 /* The old pool has to end before the section switch
6492 note in order to make it part of the current
6494 insn = PREV_INSN (insn);
6497 label = gen_label_rtx ();
6498 jump = emit_jump_insn_after (gen_jump (label), insn);
6499 barrier = emit_barrier_after (jump);
6500 insn = emit_label_after (label, barrier);
6501 JUMP_LABEL (jump) = label;
6502 LABEL_NUSES (label) = 1;
6504 INSN_ADDRESSES_NEW (jump, -1);
6505 INSN_ADDRESSES_NEW (barrier, -1);
6506 INSN_ADDRESSES_NEW (insn, -1);
6508 s390_end_pool (curr_pool, barrier);
6516 s390_end_pool (curr_pool, NULL_RTX);
6517 gcc_assert (!pending_ltrel);
6519 /* Find all labels that are branched into
6520 from an insn belonging to a different chunk. */
6522 far_labels = BITMAP_ALLOC (NULL);
6524 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6526 /* Labels marked with LABEL_PRESERVE_P can be target
6527 of non-local jumps, so we have to mark them.
6528 The same holds for named labels.
6530 Don't do that, however, if it is the label before
6533 if (GET_CODE (insn) == CODE_LABEL
6534 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
6536 rtx vec_insn = next_real_insn (insn);
6537 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6538 PATTERN (vec_insn) : NULL_RTX;
6540 || !(GET_CODE (vec_pat) == ADDR_VEC
6541 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6542 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
6545 /* If we have a direct jump (conditional or unconditional)
6546 or a casesi jump, check all potential targets. */
6547 else if (GET_CODE (insn) == JUMP_INSN)
6549 rtx pat = PATTERN (insn);
6550 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
6551 pat = XVECEXP (pat, 0, 0);
6553 if (GET_CODE (pat) == SET)
6555 rtx label = JUMP_LABEL (insn);
6558 if (s390_find_pool (pool_list, label)
6559 != s390_find_pool (pool_list, insn))
6560 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6563 else if (GET_CODE (pat) == PARALLEL
6564 && XVECLEN (pat, 0) == 2
6565 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
6566 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
6567 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
6569 /* Find the jump table used by this casesi jump. */
6570 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
6571 rtx vec_insn = next_real_insn (vec_label);
6572 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
6573 PATTERN (vec_insn) : NULL_RTX;
6575 && (GET_CODE (vec_pat) == ADDR_VEC
6576 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
6578 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
6580 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
6582 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
6584 if (s390_find_pool (pool_list, label)
6585 != s390_find_pool (pool_list, insn))
6586 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
6593 /* Insert base register reload insns before every pool. */
6595 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6597 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6599 rtx insn = curr_pool->first_insn;
6600 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
6603 /* Insert base register reload insns at every far label. */
6605 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6606 if (GET_CODE (insn) == CODE_LABEL
6607 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
6609 struct constant_pool *pool = s390_find_pool (pool_list, insn);
6612 rtx new_insn = gen_reload_base (cfun->machine->base_reg,
6614 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
6619 BITMAP_FREE (far_labels);
6622 /* Recompute insn addresses. */
6624 init_insn_lengths ();
6625 shorten_branches (get_insns ());
6630 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6631 After we have decided to use this list, finish implementing
6632 all changes to the current function as required. */
6635 s390_chunkify_finish (struct constant_pool *pool_list)
6637 struct constant_pool *curr_pool = NULL;
6641 /* Replace all literal pool references. */
6643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6646 replace_ltrel_base (&PATTERN (insn));
6648 curr_pool = s390_find_pool (pool_list, insn);
6652 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
6654 rtx addr, pool_ref = NULL_RTX;
6655 find_constant_pool_ref (PATTERN (insn), &pool_ref);
6658 if (s390_execute_label (insn))
6659 addr = s390_find_execute (curr_pool, insn);
6661 addr = s390_find_constant (curr_pool,
6662 get_pool_constant (pool_ref),
6663 get_pool_mode (pool_ref));
6665 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
6666 INSN_CODE (insn) = -1;
6671 /* Dump out all literal pools. */
6673 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6674 s390_dump_pool (curr_pool, 0);
6676 /* Free pool list. */
6680 struct constant_pool *next = pool_list->next;
6681 s390_free_pool (pool_list);
6686 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
6687 We have decided we cannot use this list, so revert all changes
6688 to the current function that were done by s390_chunkify_start. */
6691 s390_chunkify_cancel (struct constant_pool *pool_list)
6693 struct constant_pool *curr_pool = NULL;
6696 /* Remove all pool placeholder insns. */
6698 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
6700 /* Did we insert an extra barrier? Remove it. */
6701 rtx barrier = PREV_INSN (curr_pool->pool_insn);
6702 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
6703 rtx label = NEXT_INSN (curr_pool->pool_insn);
6705 if (jump && GET_CODE (jump) == JUMP_INSN
6706 && barrier && GET_CODE (barrier) == BARRIER
6707 && label && GET_CODE (label) == CODE_LABEL
6708 && GET_CODE (PATTERN (jump)) == SET
6709 && SET_DEST (PATTERN (jump)) == pc_rtx
6710 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
6711 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
6714 remove_insn (barrier);
6715 remove_insn (label);
6718 remove_insn (curr_pool->pool_insn);
6721 /* Remove all base register reload insns. */
6723 for (insn = get_insns (); insn; )
6725 rtx next_insn = NEXT_INSN (insn);
6727 if (GET_CODE (insn) == INSN
6728 && GET_CODE (PATTERN (insn)) == SET
6729 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
6730 && XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
6736 /* Free pool list. */
6740 struct constant_pool *next = pool_list->next;
6741 s390_free_pool (pool_list);
6746 /* Output the constant pool entry EXP in mode MODE with alignment ALIGN. */
6749 s390_output_pool_entry (rtx exp, enum machine_mode mode, unsigned int align)
6753 switch (GET_MODE_CLASS (mode))
6756 case MODE_DECIMAL_FLOAT:
6757 gcc_assert (GET_CODE (exp) == CONST_DOUBLE);
6759 REAL_VALUE_FROM_CONST_DOUBLE (r, exp);
6760 assemble_real (r, mode, align);
6764 assemble_integer (exp, GET_MODE_SIZE (mode), align, 1);
6765 mark_symbol_refs_as_used (exp);
6774 /* Return an RTL expression representing the value of the return address
6775 for the frame COUNT steps up from the current frame. FRAME is the
6776 frame pointer of that frame. */
6779 s390_return_addr_rtx (int count, rtx frame ATTRIBUTE_UNUSED)
6784 /* Without backchain, we fail for all but the current frame. */
6786 if (!TARGET_BACKCHAIN && count > 0)
6789 /* For the current frame, we need to make sure the initial
6790 value of RETURN_REGNUM is actually saved. */
6794 /* On non-z architectures branch splitting could overwrite r14. */
6795 if (TARGET_CPU_ZARCH)
6796 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
6799 cfun_frame_layout.save_return_addr_p = true;
6800 return gen_rtx_MEM (Pmode, return_address_pointer_rtx);
6804 if (TARGET_PACKED_STACK)
6805 offset = -2 * UNITS_PER_WORD;
6807 offset = RETURN_REGNUM * UNITS_PER_WORD;
6809 addr = plus_constant (frame, offset);
6810 addr = memory_address (Pmode, addr);
6811 return gen_rtx_MEM (Pmode, addr);
6814 /* Return an RTL expression representing the back chain stored in
6815 the current stack frame. */
6818 s390_back_chain_rtx (void)
6822 gcc_assert (TARGET_BACKCHAIN);
6824 if (TARGET_PACKED_STACK)
6825 chain = plus_constant (stack_pointer_rtx,
6826 STACK_POINTER_OFFSET - UNITS_PER_WORD);
6828 chain = stack_pointer_rtx;
6830 chain = gen_rtx_MEM (Pmode, chain);
6834 /* Find first call clobbered register unused in a function.
6835 This could be used as base register in a leaf function
6836 or for holding the return address before epilogue. */
6839 find_unused_clobbered_reg (void)
6842 for (i = 0; i < 6; i++)
6843 if (!df_regs_ever_live_p (i))
6849 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all
6850 clobbered hard regs in SETREG. */
6853 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data)
6855 int *regs_ever_clobbered = (int *)data;
6856 unsigned int i, regno;
6857 enum machine_mode mode = GET_MODE (setreg);
6859 if (GET_CODE (setreg) == SUBREG)
6861 rtx inner = SUBREG_REG (setreg);
6862 if (!GENERAL_REG_P (inner))
6864 regno = subreg_regno (setreg);
6866 else if (GENERAL_REG_P (setreg))
6867 regno = REGNO (setreg);
6872 i < regno + HARD_REGNO_NREGS (regno, mode);
6874 regs_ever_clobbered[i] = 1;
6877 /* Walks through all basic blocks of the current function looking
6878 for clobbered hard regs using s390_reg_clobbered_rtx. The fields
6879 of the passed integer array REGS_EVER_CLOBBERED are set to one for
6880 each of those regs. */
6883 s390_regs_ever_clobbered (int *regs_ever_clobbered)
6889 memset (regs_ever_clobbered, 0, 16 * sizeof (int));
6891 /* For non-leaf functions we have to consider all call clobbered regs to be
6893 if (!current_function_is_leaf)
6895 for (i = 0; i < 16; i++)
6896 regs_ever_clobbered[i] = call_really_used_regs[i];
6899 /* Make the "magic" eh_return registers live if necessary. For regs_ever_live
6900 this work is done by liveness analysis (mark_regs_live_at_end).
6901 Special care is needed for functions containing landing pads. Landing pads
6902 may use the eh registers, but the code which sets these registers is not
6903 contained in that function. Hence s390_regs_ever_clobbered is not able to
6904 deal with this automatically. */
6905 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p)
6906 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++)
6907 if (crtl->calls_eh_return
6908 || (cfun->machine->has_landing_pad_p
6909 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i))))
6910 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1;
6912 /* For nonlocal gotos all call-saved registers have to be saved.
6913 This flag is also set for the unwinding code in libgcc.
6914 See expand_builtin_unwind_init. For regs_ever_live this is done by
6916 if (cfun->has_nonlocal_label)
6917 for (i = 0; i < 16; i++)
6918 if (!call_really_used_regs[i])
6919 regs_ever_clobbered[i] = 1;
6921 FOR_EACH_BB (cur_bb)
6923 FOR_BB_INSNS (cur_bb, cur_insn)
6925 if (INSN_P (cur_insn))
6926 note_stores (PATTERN (cur_insn),
6927 s390_reg_clobbered_rtx,
6928 regs_ever_clobbered);
6933 /* Determine the frame area which actually has to be accessed
6934 in the function epilogue. The values are stored at the
6935 given pointers AREA_BOTTOM (address of the lowest used stack
6936 address) and AREA_TOP (address of the first item which does
6937 not belong to the stack frame). */
6940 s390_frame_area (int *area_bottom, int *area_top)
6948 if (cfun_frame_layout.first_restore_gpr != -1)
6950 b = (cfun_frame_layout.gprs_offset
6951 + cfun_frame_layout.first_restore_gpr * UNITS_PER_WORD);
6952 t = b + (cfun_frame_layout.last_restore_gpr
6953 - cfun_frame_layout.first_restore_gpr + 1) * UNITS_PER_WORD;
6956 if (TARGET_64BIT && cfun_save_high_fprs_p)
6958 b = MIN (b, cfun_frame_layout.f8_offset);
6959 t = MAX (t, (cfun_frame_layout.f8_offset
6960 + cfun_frame_layout.high_fprs * 8));
6964 for (i = 2; i < 4; i++)
6965 if (cfun_fpr_bit_p (i))
6967 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8);
6968 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8);
6975 /* Fill cfun->machine with info about register usage of current function.
6976 Return in CLOBBERED_REGS which GPRs are currently considered set. */
6979 s390_register_info (int clobbered_regs[])
6983 /* fprs 8 - 15 are call saved for 64 Bit ABI. */
6984 cfun_frame_layout.fpr_bitmap = 0;
6985 cfun_frame_layout.high_fprs = 0;
6987 for (i = 24; i < 32; i++)
6988 if (df_regs_ever_live_p (i) && !global_regs[i])
6990 cfun_set_fpr_bit (i - 16);
6991 cfun_frame_layout.high_fprs++;
6994 /* Find first and last gpr to be saved. We trust regs_ever_live
6995 data, except that we don't save and restore global registers.
6997 Also, all registers with special meaning to the compiler need
6998 to be handled extra. */
7000 s390_regs_ever_clobbered (clobbered_regs);
7002 for (i = 0; i < 16; i++)
7003 clobbered_regs[i] = clobbered_regs[i] && !global_regs[i] && !fixed_regs[i];
7005 if (frame_pointer_needed)
7006 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1;
7009 clobbered_regs[PIC_OFFSET_TABLE_REGNUM]
7010 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM);
7012 clobbered_regs[BASE_REGNUM]
7013 |= (cfun->machine->base_reg
7014 && REGNO (cfun->machine->base_reg) == BASE_REGNUM);
7016 clobbered_regs[RETURN_REGNUM]
7017 |= (!current_function_is_leaf
7018 || TARGET_TPF_PROFILING
7019 || cfun->machine->split_branches_pending_p
7020 || cfun_frame_layout.save_return_addr_p
7021 || crtl->calls_eh_return
7024 clobbered_regs[STACK_POINTER_REGNUM]
7025 |= (!current_function_is_leaf
7026 || TARGET_TPF_PROFILING
7027 || cfun_save_high_fprs_p
7028 || get_frame_size () > 0
7029 || cfun->calls_alloca
7032 for (i = 6; i < 16; i++)
7033 if (df_regs_ever_live_p (i) || clobbered_regs[i])
7035 for (j = 15; j > i; j--)
7036 if (df_regs_ever_live_p (j) || clobbered_regs[j])
7041 /* Nothing to save/restore. */
7042 cfun_frame_layout.first_save_gpr_slot = -1;
7043 cfun_frame_layout.last_save_gpr_slot = -1;
7044 cfun_frame_layout.first_save_gpr = -1;
7045 cfun_frame_layout.first_restore_gpr = -1;
7046 cfun_frame_layout.last_save_gpr = -1;
7047 cfun_frame_layout.last_restore_gpr = -1;
7051 /* Save slots for gprs from i to j. */
7052 cfun_frame_layout.first_save_gpr_slot = i;
7053 cfun_frame_layout.last_save_gpr_slot = j;
7055 for (i = cfun_frame_layout.first_save_gpr_slot;
7056 i < cfun_frame_layout.last_save_gpr_slot + 1;
7058 if (clobbered_regs[i])
7061 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--)
7062 if (clobbered_regs[j])
7065 if (i == cfun_frame_layout.last_save_gpr_slot + 1)
7067 /* Nothing to save/restore. */
7068 cfun_frame_layout.first_save_gpr = -1;
7069 cfun_frame_layout.first_restore_gpr = -1;
7070 cfun_frame_layout.last_save_gpr = -1;
7071 cfun_frame_layout.last_restore_gpr = -1;
7075 /* Save / Restore from gpr i to j. */
7076 cfun_frame_layout.first_save_gpr = i;
7077 cfun_frame_layout.first_restore_gpr = i;
7078 cfun_frame_layout.last_save_gpr = j;
7079 cfun_frame_layout.last_restore_gpr = j;
7085 /* Varargs functions need to save gprs 2 to 6. */
7086 if (cfun->va_list_gpr_size
7087 && crtl->args.info.gprs < GP_ARG_NUM_REG)
7089 int min_gpr = crtl->args.info.gprs;
7090 int max_gpr = min_gpr + cfun->va_list_gpr_size;
7091 if (max_gpr > GP_ARG_NUM_REG)
7092 max_gpr = GP_ARG_NUM_REG;
7094 if (cfun_frame_layout.first_save_gpr == -1
7095 || cfun_frame_layout.first_save_gpr > 2 + min_gpr)
7097 cfun_frame_layout.first_save_gpr = 2 + min_gpr;
7098 cfun_frame_layout.first_save_gpr_slot = 2 + min_gpr;
7101 if (cfun_frame_layout.last_save_gpr == -1
7102 || cfun_frame_layout.last_save_gpr < 2 + max_gpr - 1)
7104 cfun_frame_layout.last_save_gpr = 2 + max_gpr - 1;
7105 cfun_frame_layout.last_save_gpr_slot = 2 + max_gpr - 1;
7109 /* Mark f0, f2 for 31 bit and f0-f4 for 64 bit to be saved. */
7110 if (TARGET_HARD_FLOAT && cfun->va_list_fpr_size
7111 && crtl->args.info.fprs < FP_ARG_NUM_REG)
7113 int min_fpr = crtl->args.info.fprs;
7114 int max_fpr = min_fpr + cfun->va_list_fpr_size;
7115 if (max_fpr > FP_ARG_NUM_REG)
7116 max_fpr = FP_ARG_NUM_REG;
7118 /* ??? This is currently required to ensure proper location
7119 of the fpr save slots within the va_list save area. */
7120 if (TARGET_PACKED_STACK)
7123 for (i = min_fpr; i < max_fpr; i++)
7124 cfun_set_fpr_bit (i);
7129 for (i = 2; i < 4; i++)
7130 if (df_regs_ever_live_p (i + 16) && !global_regs[i + 16])
7131 cfun_set_fpr_bit (i);
7134 /* Fill cfun->machine with info about frame of current function. */
7137 s390_frame_info (void)
7141 cfun_frame_layout.frame_size = get_frame_size ();
7142 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000)
7143 fatal_error ("total size of local variables exceeds architecture limit");
7145 if (!TARGET_PACKED_STACK)
7147 cfun_frame_layout.backchain_offset = 0;
7148 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD;
7149 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8;
7150 cfun_frame_layout.f8_offset = -cfun_frame_layout.high_fprs * 8;
7151 cfun_frame_layout.gprs_offset = (cfun_frame_layout.first_save_gpr_slot
7154 else if (TARGET_BACKCHAIN) /* kernel stack layout */
7156 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET
7158 cfun_frame_layout.gprs_offset
7159 = (cfun_frame_layout.backchain_offset
7160 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1)
7165 cfun_frame_layout.f4_offset
7166 = (cfun_frame_layout.gprs_offset
7167 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7169 cfun_frame_layout.f0_offset
7170 = (cfun_frame_layout.f4_offset
7171 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7175 /* On 31 bit we have to care about alignment of the
7176 floating point regs to provide fastest access. */
7177 cfun_frame_layout.f0_offset
7178 = ((cfun_frame_layout.gprs_offset
7179 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1))
7180 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7182 cfun_frame_layout.f4_offset
7183 = (cfun_frame_layout.f0_offset
7184 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7187 else /* no backchain */
7189 cfun_frame_layout.f4_offset
7190 = (STACK_POINTER_OFFSET
7191 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3)));
7193 cfun_frame_layout.f0_offset
7194 = (cfun_frame_layout.f4_offset
7195 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1)));
7197 cfun_frame_layout.gprs_offset
7198 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size;
7201 if (current_function_is_leaf
7202 && !TARGET_TPF_PROFILING
7203 && cfun_frame_layout.frame_size == 0
7204 && !cfun_save_high_fprs_p
7205 && !cfun->calls_alloca
7209 if (!TARGET_PACKED_STACK)
7210 cfun_frame_layout.frame_size += (STACK_POINTER_OFFSET
7211 + crtl->outgoing_args_size
7212 + cfun_frame_layout.high_fprs * 8);
7215 if (TARGET_BACKCHAIN)
7216 cfun_frame_layout.frame_size += UNITS_PER_WORD;
7218 /* No alignment trouble here because f8-f15 are only saved under
7220 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset,
7221 cfun_frame_layout.f4_offset),
7222 cfun_frame_layout.gprs_offset)
7223 - cfun_frame_layout.high_fprs * 8);
7225 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8;
7227 for (i = 0; i < 8; i++)
7228 if (cfun_fpr_bit_p (i))
7229 cfun_frame_layout.frame_size += 8;
7231 cfun_frame_layout.frame_size += cfun_gprs_save_area_size;
7233 /* If under 31 bit an odd number of gprs has to be saved we have to adjust
7234 the frame size to sustain 8 byte alignment of stack frames. */
7235 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size +
7236 STACK_BOUNDARY / BITS_PER_UNIT - 1)
7237 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1));
7239 cfun_frame_layout.frame_size += crtl->outgoing_args_size;
7243 /* Generate frame layout. Fills in register and frame data for the current
7244 function in cfun->machine. This routine can be called multiple times;
7245 it will re-do the complete frame layout every time. */
7248 s390_init_frame_layout (void)
7250 HOST_WIDE_INT frame_size;
7252 int clobbered_regs[16];
7254 /* On S/390 machines, we may need to perform branch splitting, which
7255 will require both base and return address register. We have no
7256 choice but to assume we're going to need them until right at the
7257 end of the machine dependent reorg phase. */
7258 if (!TARGET_CPU_ZARCH)
7259 cfun->machine->split_branches_pending_p = true;
7263 frame_size = cfun_frame_layout.frame_size;
7265 /* Try to predict whether we'll need the base register. */
7266 base_used = cfun->machine->split_branches_pending_p
7267 || crtl->uses_const_pool
7268 || (!DISP_IN_RANGE (frame_size)
7269 && !CONST_OK_FOR_K (frame_size));
7271 /* Decide which register to use as literal pool base. In small
7272 leaf functions, try to use an unused call-clobbered register
7273 as base register to avoid save/restore overhead. */
7275 cfun->machine->base_reg = NULL_RTX;
7276 else if (current_function_is_leaf && !df_regs_ever_live_p (5))
7277 cfun->machine->base_reg = gen_rtx_REG (Pmode, 5);
7279 cfun->machine->base_reg = gen_rtx_REG (Pmode, BASE_REGNUM);
7281 s390_register_info (clobbered_regs);
7284 while (frame_size != cfun_frame_layout.frame_size);
7287 /* Update frame layout. Recompute actual register save data based on
7288 current info and update regs_ever_live for the special registers.
7289 May be called multiple times, but may never cause *more* registers
7290 to be saved than s390_init_frame_layout allocated room for. */
7293 s390_update_frame_layout (void)
7295 int clobbered_regs[16];
7297 s390_register_info (clobbered_regs);
7299 df_set_regs_ever_live (BASE_REGNUM,
7300 clobbered_regs[BASE_REGNUM] ? true : false);
7301 df_set_regs_ever_live (RETURN_REGNUM,
7302 clobbered_regs[RETURN_REGNUM] ? true : false);
7303 df_set_regs_ever_live (STACK_POINTER_REGNUM,
7304 clobbered_regs[STACK_POINTER_REGNUM] ? true : false);
7306 if (cfun->machine->base_reg)
7307 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true);
7310 /* Return true if it is legal to put a value with MODE into REGNO. */
7313 s390_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode)
7315 switch (REGNO_REG_CLASS (regno))
7318 if (REGNO_PAIR_OK (regno, mode))
7320 if (mode == SImode || mode == DImode)
7323 if (FLOAT_MODE_P (mode) && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
7328 if (FRAME_REGNO_P (regno) && mode == Pmode)
7333 if (REGNO_PAIR_OK (regno, mode))
7336 || (mode != TFmode && mode != TCmode && mode != TDmode))
7341 if (GET_MODE_CLASS (mode) == MODE_CC)
7345 if (REGNO_PAIR_OK (regno, mode))
7347 if (mode == SImode || mode == Pmode)
7358 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
7361 s390_hard_regno_rename_ok (unsigned int old_reg, unsigned int new_reg)
7363 /* Once we've decided upon a register to use as base register, it must
7364 no longer be used for any other purpose. */
7365 if (cfun->machine->base_reg)
7366 if (REGNO (cfun->machine->base_reg) == old_reg
7367 || REGNO (cfun->machine->base_reg) == new_reg)
7373 /* Maximum number of registers to represent a value of mode MODE
7374 in a register of class RCLASS. */
7377 s390_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
7382 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7383 return 2 * ((GET_MODE_SIZE (mode) / 2 + 8 - 1) / 8);
7385 return (GET_MODE_SIZE (mode) + 8 - 1) / 8;
7387 return (GET_MODE_SIZE (mode) + 4 - 1) / 4;
7391 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
7394 /* Return true if register FROM can be eliminated via register TO. */
7397 s390_can_eliminate (const int from, const int to)
7399 /* On zSeries machines, we have not marked the base register as fixed.
7400 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM.
7401 If a function requires the base register, we say here that this
7402 elimination cannot be performed. This will cause reload to free
7403 up the base register (as if it were fixed). On the other hand,
7404 if the current function does *not* require the base register, we
7405 say here the elimination succeeds, which in turn allows reload
7406 to allocate the base register for any other purpose. */
7407 if (from == BASE_REGNUM && to == BASE_REGNUM)
7409 if (TARGET_CPU_ZARCH)
7411 s390_init_frame_layout ();
7412 return cfun->machine->base_reg == NULL_RTX;
7418 /* Everything else must point into the stack frame. */
7419 gcc_assert (to == STACK_POINTER_REGNUM
7420 || to == HARD_FRAME_POINTER_REGNUM);
7422 gcc_assert (from == FRAME_POINTER_REGNUM
7423 || from == ARG_POINTER_REGNUM
7424 || from == RETURN_ADDRESS_POINTER_REGNUM);
7426 /* Make sure we actually saved the return address. */
7427 if (from == RETURN_ADDRESS_POINTER_REGNUM)
7428 if (!crtl->calls_eh_return
7430 && !cfun_frame_layout.save_return_addr_p)
7436 /* Return offset between register FROM and TO initially after prolog. */
7439 s390_initial_elimination_offset (int from, int to)
7441 HOST_WIDE_INT offset;
7444 /* ??? Why are we called for non-eliminable pairs? */
7445 if (!s390_can_eliminate (from, to))
7450 case FRAME_POINTER_REGNUM:
7451 offset = (get_frame_size()
7452 + STACK_POINTER_OFFSET
7453 + crtl->outgoing_args_size);
7456 case ARG_POINTER_REGNUM:
7457 s390_init_frame_layout ();
7458 offset = cfun_frame_layout.frame_size + STACK_POINTER_OFFSET;
7461 case RETURN_ADDRESS_POINTER_REGNUM:
7462 s390_init_frame_layout ();
7463 index = RETURN_REGNUM - cfun_frame_layout.first_save_gpr_slot;
7464 gcc_assert (index >= 0);
7465 offset = cfun_frame_layout.frame_size + cfun_frame_layout.gprs_offset;
7466 offset += index * UNITS_PER_WORD;
7480 /* Emit insn to save fpr REGNUM at offset OFFSET relative
7481 to register BASE. Return generated insn. */
7484 save_fpr (rtx base, int offset, int regnum)
7487 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7489 if (regnum >= 16 && regnum <= (16 + FP_ARG_NUM_REG))
7490 set_mem_alias_set (addr, get_varargs_alias_set ());
7492 set_mem_alias_set (addr, get_frame_alias_set ());
7494 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
7497 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
7498 to register BASE. Return generated insn. */
7501 restore_fpr (rtx base, int offset, int regnum)
7504 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
7505 set_mem_alias_set (addr, get_frame_alias_set ());
7507 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
7510 /* Return true if REGNO is a global register, but not one
7511 of the special ones that need to be saved/restored in anyway. */
7514 global_not_special_regno_p (int regno)
7516 return (global_regs[regno]
7517 /* These registers are special and need to be
7518 restored in any case. */
7519 && !(regno == STACK_POINTER_REGNUM
7520 || regno == RETURN_REGNUM
7521 || regno == BASE_REGNUM
7522 || (flag_pic && regno == (int)PIC_OFFSET_TABLE_REGNUM)));
7525 /* Generate insn to save registers FIRST to LAST into
7526 the register save area located at offset OFFSET
7527 relative to register BASE. */
7530 save_gprs (rtx base, int offset, int first, int last)
7532 rtx addr, insn, note;
7535 addr = plus_constant (base, offset);
7536 addr = gen_rtx_MEM (Pmode, addr);
7538 set_mem_alias_set (addr, get_frame_alias_set ());
7540 /* Special-case single register. */
7544 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
7546 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
7548 if (!global_not_special_regno_p (first))
7549 RTX_FRAME_RELATED_P (insn) = 1;
7554 insn = gen_store_multiple (addr,
7555 gen_rtx_REG (Pmode, first),
7556 GEN_INT (last - first + 1));
7558 if (first <= 6 && cfun->stdarg)
7559 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
7561 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0);
7564 set_mem_alias_set (mem, get_varargs_alias_set ());
7567 /* We need to set the FRAME_RELATED flag on all SETs
7568 inside the store-multiple pattern.
7570 However, we must not emit DWARF records for registers 2..5
7571 if they are stored for use by variable arguments ...
7573 ??? Unfortunately, it is not enough to simply not the
7574 FRAME_RELATED flags for those SETs, because the first SET
7575 of the PARALLEL is always treated as if it had the flag
7576 set, even if it does not. Therefore we emit a new pattern
7577 without those registers as REG_FRAME_RELATED_EXPR note. */
7579 if (first >= 6 && !global_not_special_regno_p (first))
7581 rtx pat = PATTERN (insn);
7583 for (i = 0; i < XVECLEN (pat, 0); i++)
7584 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
7585 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat,
7587 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
7589 RTX_FRAME_RELATED_P (insn) = 1;
7595 for (start = first >= 6 ? first : 6; start <= last; start++)
7596 if (!global_not_special_regno_p (start))
7602 addr = plus_constant (base, offset + (start - first) * UNITS_PER_WORD);
7603 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
7604 gen_rtx_REG (Pmode, start),
7605 GEN_INT (last - start + 1));
7606 note = PATTERN (note);
7608 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
7610 for (i = 0; i < XVECLEN (note, 0); i++)
7611 if (GET_CODE (XVECEXP (note, 0, i)) == SET
7612 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note,
7614 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
7616 RTX_FRAME_RELATED_P (insn) = 1;
7622 /* Generate insn to restore registers FIRST to LAST from
7623 the register save area located at offset OFFSET
7624 relative to register BASE. */
7627 restore_gprs (rtx base, int offset, int first, int last)
7631 addr = plus_constant (base, offset);
7632 addr = gen_rtx_MEM (Pmode, addr);
7633 set_mem_alias_set (addr, get_frame_alias_set ());
7635 /* Special-case single register. */
7639 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
7641 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
7646 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
7648 GEN_INT (last - first + 1));
7652 /* Return insn sequence to load the GOT register. */
7654 static GTY(()) rtx got_symbol;
7656 s390_load_got (void)
7662 got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
7663 SYMBOL_REF_FLAGS (got_symbol) = SYMBOL_FLAG_LOCAL;
7668 if (TARGET_CPU_ZARCH)
7670 emit_move_insn (pic_offset_table_rtx, got_symbol);
7676 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, got_symbol),
7677 UNSPEC_LTREL_OFFSET);
7678 offset = gen_rtx_CONST (Pmode, offset);
7679 offset = force_const_mem (Pmode, offset);
7681 emit_move_insn (pic_offset_table_rtx, offset);
7683 offset = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, XEXP (offset, 0)),
7685 offset = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, offset);
7687 emit_move_insn (pic_offset_table_rtx, offset);
7690 insns = get_insns ();
7695 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
7696 and the change to the stack pointer. */
7699 s390_emit_stack_tie (void)
7701 rtx mem = gen_frame_mem (BLKmode,
7702 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
7704 emit_insn (gen_stack_tie (mem));
7707 /* Expand the prologue into a bunch of separate insns. */
7710 s390_emit_prologue (void)
7718 /* Complete frame layout. */
7720 s390_update_frame_layout ();
7722 /* Annotate all constant pool references to let the scheduler know
7723 they implicitly use the base register. */
7725 push_topmost_sequence ();
7727 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
7730 annotate_constant_pool_refs (&PATTERN (insn));
7731 df_insn_rescan (insn);
7734 pop_topmost_sequence ();
7736 /* Choose best register to use for temp use within prologue.
7737 See below for why TPF must use the register 1. */
7739 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
7740 && !current_function_is_leaf
7741 && !TARGET_TPF_PROFILING)
7742 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
7744 temp_reg = gen_rtx_REG (Pmode, 1);
7746 /* Save call saved gprs. */
7747 if (cfun_frame_layout.first_save_gpr != -1)
7749 insn = save_gprs (stack_pointer_rtx,
7750 cfun_frame_layout.gprs_offset +
7751 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr
7752 - cfun_frame_layout.first_save_gpr_slot),
7753 cfun_frame_layout.first_save_gpr,
7754 cfun_frame_layout.last_save_gpr);
7758 /* Dummy insn to mark literal pool slot. */
7760 if (cfun->machine->base_reg)
7761 emit_insn (gen_main_pool (cfun->machine->base_reg));
7763 offset = cfun_frame_layout.f0_offset;
7765 /* Save f0 and f2. */
7766 for (i = 0; i < 2; i++)
7768 if (cfun_fpr_bit_p (i))
7770 save_fpr (stack_pointer_rtx, offset, i + 16);
7773 else if (!TARGET_PACKED_STACK)
7777 /* Save f4 and f6. */
7778 offset = cfun_frame_layout.f4_offset;
7779 for (i = 2; i < 4; i++)
7781 if (cfun_fpr_bit_p (i))
7783 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7786 /* If f4 and f6 are call clobbered they are saved due to stdargs and
7787 therefore are not frame related. */
7788 if (!call_really_used_regs[i + 16])
7789 RTX_FRAME_RELATED_P (insn) = 1;
7791 else if (!TARGET_PACKED_STACK)
7795 if (TARGET_PACKED_STACK
7796 && cfun_save_high_fprs_p
7797 && cfun_frame_layout.f8_offset + cfun_frame_layout.high_fprs * 8 > 0)
7799 offset = (cfun_frame_layout.f8_offset
7800 + (cfun_frame_layout.high_fprs - 1) * 8);
7802 for (i = 15; i > 7 && offset >= 0; i--)
7803 if (cfun_fpr_bit_p (i))
7805 insn = save_fpr (stack_pointer_rtx, offset, i + 16);
7807 RTX_FRAME_RELATED_P (insn) = 1;
7810 if (offset >= cfun_frame_layout.f8_offset)
7814 if (!TARGET_PACKED_STACK)
7815 next_fpr = cfun_save_high_fprs_p ? 31 : 0;
7817 /* Decrement stack pointer. */
7819 if (cfun_frame_layout.frame_size > 0)
7821 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7824 if (s390_stack_size)
7826 HOST_WIDE_INT stack_guard;
7828 if (s390_stack_guard)
7829 stack_guard = s390_stack_guard;
7832 /* If no value for stack guard is provided the smallest power of 2
7833 larger than the current frame size is chosen. */
7835 while (stack_guard < cfun_frame_layout.frame_size)
7839 if (cfun_frame_layout.frame_size >= s390_stack_size)
7841 warning (0, "frame size of function %qs is "
7842 HOST_WIDE_INT_PRINT_DEC
7843 " bytes exceeding user provided stack limit of "
7844 HOST_WIDE_INT_PRINT_DEC " bytes. "
7845 "An unconditional trap is added.",
7846 current_function_name(), cfun_frame_layout.frame_size,
7848 emit_insn (gen_trap ());
7852 /* stack_guard has to be smaller than s390_stack_size.
7853 Otherwise we would emit an AND with zero which would
7854 not match the test under mask pattern. */
7855 if (stack_guard >= s390_stack_size)
7857 warning (0, "frame size of function %qs is "
7858 HOST_WIDE_INT_PRINT_DEC
7859 " bytes which is more than half the stack size. "
7860 "The dynamic check would not be reliable. "
7861 "No check emitted for this function.",
7862 current_function_name(),
7863 cfun_frame_layout.frame_size);
7867 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1)
7868 & ~(stack_guard - 1));
7870 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx,
7871 GEN_INT (stack_check_mask));
7873 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode,
7875 t, const0_rtx, const0_rtx));
7877 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode,
7879 t, const0_rtx, const0_rtx));
7884 if (s390_warn_framesize > 0
7885 && cfun_frame_layout.frame_size >= s390_warn_framesize)
7886 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes",
7887 current_function_name (), cfun_frame_layout.frame_size);
7889 if (s390_warn_dynamicstack_p && cfun->calls_alloca)
7890 warning (0, "%qs uses dynamic stack allocation", current_function_name ());
7892 /* Save incoming stack pointer into temp reg. */
7893 if (TARGET_BACKCHAIN || next_fpr)
7894 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
7896 /* Subtract frame size from stack pointer. */
7898 if (DISP_IN_RANGE (INTVAL (frame_off)))
7900 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7901 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7903 insn = emit_insn (insn);
7907 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
7908 frame_off = force_const_mem (Pmode, frame_off);
7910 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
7911 annotate_constant_pool_refs (&PATTERN (insn));
7914 RTX_FRAME_RELATED_P (insn) = 1;
7915 real_frame_off = GEN_INT (-cfun_frame_layout.frame_size);
7916 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
7917 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
7918 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
7921 /* Set backchain. */
7923 if (TARGET_BACKCHAIN)
7925 if (cfun_frame_layout.backchain_offset)
7926 addr = gen_rtx_MEM (Pmode,
7927 plus_constant (stack_pointer_rtx,
7928 cfun_frame_layout.backchain_offset));
7930 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
7931 set_mem_alias_set (addr, get_frame_alias_set ());
7932 insn = emit_insn (gen_move_insn (addr, temp_reg));
7935 /* If we support asynchronous exceptions (e.g. for Java),
7936 we need to make sure the backchain pointer is set up
7937 before any possibly trapping memory access. */
7939 if (TARGET_BACKCHAIN && flag_non_call_exceptions)
7941 addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
7942 emit_clobber (addr);
7946 /* Save fprs 8 - 15 (64 bit ABI). */
7948 if (cfun_save_high_fprs_p && next_fpr)
7950 /* If the stack might be accessed through a different register
7951 we have to make sure that the stack pointer decrement is not
7952 moved below the use of the stack slots. */
7953 s390_emit_stack_tie ();
7955 insn = emit_insn (gen_add2_insn (temp_reg,
7956 GEN_INT (cfun_frame_layout.f8_offset)));
7960 for (i = 24; i <= next_fpr; i++)
7961 if (cfun_fpr_bit_p (i - 16))
7963 rtx addr = plus_constant (stack_pointer_rtx,
7964 cfun_frame_layout.frame_size
7965 + cfun_frame_layout.f8_offset
7968 insn = save_fpr (temp_reg, offset, i);
7970 RTX_FRAME_RELATED_P (insn) = 1;
7971 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
7972 gen_rtx_SET (VOIDmode,
7973 gen_rtx_MEM (DFmode, addr),
7974 gen_rtx_REG (DFmode, i)));
7978 /* Set frame pointer, if needed. */
7980 if (frame_pointer_needed)
7982 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
7983 RTX_FRAME_RELATED_P (insn) = 1;
7986 /* Set up got pointer, if needed. */
7988 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
7990 rtx insns = s390_load_got ();
7992 for (insn = insns; insn; insn = NEXT_INSN (insn))
7993 annotate_constant_pool_refs (&PATTERN (insn));
7998 if (TARGET_TPF_PROFILING)
8000 /* Generate a BAS instruction to serve as a function
8001 entry intercept to facilitate the use of tracing
8002 algorithms located at the branch target. */
8003 emit_insn (gen_prologue_tpf ());
8005 /* Emit a blockage here so that all code
8006 lies between the profiling mechanisms. */
8007 emit_insn (gen_blockage ());
8011 /* Expand the epilogue into a bunch of separate insns. */
8014 s390_emit_epilogue (bool sibcall)
8016 rtx frame_pointer, return_reg, cfa_restores = NULL_RTX;
8017 int area_bottom, area_top, offset = 0;
8022 if (TARGET_TPF_PROFILING)
8025 /* Generate a BAS instruction to serve as a function
8026 entry intercept to facilitate the use of tracing
8027 algorithms located at the branch target. */
8029 /* Emit a blockage here so that all code
8030 lies between the profiling mechanisms. */
8031 emit_insn (gen_blockage ());
8033 emit_insn (gen_epilogue_tpf ());
8036 /* Check whether to use frame or stack pointer for restore. */
8038 frame_pointer = (frame_pointer_needed
8039 ? hard_frame_pointer_rtx : stack_pointer_rtx);
8041 s390_frame_area (&area_bottom, &area_top);
8043 /* Check whether we can access the register save area.
8044 If not, increment the frame pointer as required. */
8046 if (area_top <= area_bottom)
8048 /* Nothing to restore. */
8050 else if (DISP_IN_RANGE (cfun_frame_layout.frame_size + area_bottom)
8051 && DISP_IN_RANGE (cfun_frame_layout.frame_size + area_top - 1))
8053 /* Area is in range. */
8054 offset = cfun_frame_layout.frame_size;
8058 rtx insn, frame_off, cfa;
8060 offset = area_bottom < 0 ? -area_bottom : 0;
8061 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset);
8063 cfa = gen_rtx_SET (VOIDmode, frame_pointer,
8064 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
8065 if (DISP_IN_RANGE (INTVAL (frame_off)))
8067 insn = gen_rtx_SET (VOIDmode, frame_pointer,
8068 gen_rtx_PLUS (Pmode, frame_pointer, frame_off));
8069 insn = emit_insn (insn);
8073 if (!CONST_OK_FOR_K (INTVAL (frame_off)))
8074 frame_off = force_const_mem (Pmode, frame_off);
8076 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
8077 annotate_constant_pool_refs (&PATTERN (insn));
8079 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa);
8080 RTX_FRAME_RELATED_P (insn) = 1;
8083 /* Restore call saved fprs. */
8087 if (cfun_save_high_fprs_p)
8089 next_offset = cfun_frame_layout.f8_offset;
8090 for (i = 24; i < 32; i++)
8092 if (cfun_fpr_bit_p (i - 16))
8094 restore_fpr (frame_pointer,
8095 offset + next_offset, i);
8097 = alloc_reg_note (REG_CFA_RESTORE,
8098 gen_rtx_REG (DFmode, i), cfa_restores);
8107 next_offset = cfun_frame_layout.f4_offset;
8108 for (i = 18; i < 20; i++)
8110 if (cfun_fpr_bit_p (i - 16))
8112 restore_fpr (frame_pointer,
8113 offset + next_offset, i);
8115 = alloc_reg_note (REG_CFA_RESTORE,
8116 gen_rtx_REG (DFmode, i), cfa_restores);
8119 else if (!TARGET_PACKED_STACK)
8125 /* Return register. */
8127 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
8129 /* Restore call saved gprs. */
8131 if (cfun_frame_layout.first_restore_gpr != -1)
8136 /* Check for global register and save them
8137 to stack location from where they get restored. */
8139 for (i = cfun_frame_layout.first_restore_gpr;
8140 i <= cfun_frame_layout.last_restore_gpr;
8143 if (global_not_special_regno_p (i))
8145 addr = plus_constant (frame_pointer,
8146 offset + cfun_frame_layout.gprs_offset
8147 + (i - cfun_frame_layout.first_save_gpr_slot)
8149 addr = gen_rtx_MEM (Pmode, addr);
8150 set_mem_alias_set (addr, get_frame_alias_set ());
8151 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
8155 = alloc_reg_note (REG_CFA_RESTORE,
8156 gen_rtx_REG (Pmode, i), cfa_restores);
8161 /* Fetch return address from stack before load multiple,
8162 this will do good for scheduling. */
8164 if (cfun_frame_layout.save_return_addr_p
8165 || (cfun_frame_layout.first_restore_gpr < BASE_REGNUM
8166 && cfun_frame_layout.last_restore_gpr > RETURN_REGNUM))
8168 int return_regnum = find_unused_clobbered_reg();
8171 return_reg = gen_rtx_REG (Pmode, return_regnum);
8173 addr = plus_constant (frame_pointer,
8174 offset + cfun_frame_layout.gprs_offset
8176 - cfun_frame_layout.first_save_gpr_slot)
8178 addr = gen_rtx_MEM (Pmode, addr);
8179 set_mem_alias_set (addr, get_frame_alias_set ());
8180 emit_move_insn (return_reg, addr);
8184 insn = restore_gprs (frame_pointer,
8185 offset + cfun_frame_layout.gprs_offset
8186 + (cfun_frame_layout.first_restore_gpr
8187 - cfun_frame_layout.first_save_gpr_slot)
8189 cfun_frame_layout.first_restore_gpr,
8190 cfun_frame_layout.last_restore_gpr);
8191 insn = emit_insn (insn);
8192 REG_NOTES (insn) = cfa_restores;
8193 add_reg_note (insn, REG_CFA_DEF_CFA,
8194 plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET));
8195 RTX_FRAME_RELATED_P (insn) = 1;
8201 /* Return to caller. */
8203 p = rtvec_alloc (2);
8205 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
8206 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
8207 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
8212 /* Return the size in bytes of a function argument of
8213 type TYPE and/or mode MODE. At least one of TYPE or
8214 MODE must be specified. */
8217 s390_function_arg_size (enum machine_mode mode, const_tree type)
8220 return int_size_in_bytes (type);
8222 /* No type info available for some library calls ... */
8223 if (mode != BLKmode)
8224 return GET_MODE_SIZE (mode);
8226 /* If we have neither type nor mode, abort */
8230 /* Return true if a function argument of type TYPE and mode MODE
8231 is to be passed in a floating-point register, if available. */
8234 s390_function_arg_float (enum machine_mode mode, tree type)
8236 int size = s390_function_arg_size (mode, type);
8240 /* Soft-float changes the ABI: no floating-point registers are used. */
8241 if (TARGET_SOFT_FLOAT)
8244 /* No type info available for some library calls ... */
8246 return mode == SFmode || mode == DFmode || mode == SDmode || mode == DDmode;
8248 /* The ABI says that record types with a single member are treated
8249 just like that member would be. */
8250 while (TREE_CODE (type) == RECORD_TYPE)
8252 tree field, single = NULL_TREE;
8254 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
8256 if (TREE_CODE (field) != FIELD_DECL)
8259 if (single == NULL_TREE)
8260 single = TREE_TYPE (field);
8265 if (single == NULL_TREE)
8271 return TREE_CODE (type) == REAL_TYPE;
8274 /* Return true if a function argument of type TYPE and mode MODE
8275 is to be passed in an integer register, or a pair of integer
8276 registers, if available. */
8279 s390_function_arg_integer (enum machine_mode mode, tree type)
8281 int size = s390_function_arg_size (mode, type);
8285 /* No type info available for some library calls ... */
8287 return GET_MODE_CLASS (mode) == MODE_INT
8288 || (TARGET_SOFT_FLOAT && SCALAR_FLOAT_MODE_P (mode));
8290 /* We accept small integral (and similar) types. */
8291 if (INTEGRAL_TYPE_P (type)
8292 || POINTER_TYPE_P (type)
8293 || TREE_CODE (type) == OFFSET_TYPE
8294 || (TARGET_SOFT_FLOAT && TREE_CODE (type) == REAL_TYPE))
8297 /* We also accept structs of size 1, 2, 4, 8 that are not
8298 passed in floating-point registers. */
8299 if (AGGREGATE_TYPE_P (type)
8300 && exact_log2 (size) >= 0
8301 && !s390_function_arg_float (mode, type))
8307 /* Return 1 if a function argument of type TYPE and mode MODE
8308 is to be passed by reference. The ABI specifies that only
8309 structures of size 1, 2, 4, or 8 bytes are passed by value,
8310 all other structures (and complex numbers) are passed by
8314 s390_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
8315 enum machine_mode mode, const_tree type,
8316 bool named ATTRIBUTE_UNUSED)
8318 int size = s390_function_arg_size (mode, type);
8324 if (AGGREGATE_TYPE_P (type) && exact_log2 (size) < 0)
8327 if (TREE_CODE (type) == COMPLEX_TYPE
8328 || TREE_CODE (type) == VECTOR_TYPE)
8335 /* Update the data in CUM to advance over an argument of mode MODE and
8336 data type TYPE. (TYPE is null for libcalls where that information
8337 may not be available.). The boolean NAMED specifies whether the
8338 argument is a named argument (as opposed to an unnamed argument
8339 matching an ellipsis). */
8342 s390_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
8343 tree type, int named ATTRIBUTE_UNUSED)
8345 if (s390_function_arg_float (mode, type))
8349 else if (s390_function_arg_integer (mode, type))
8351 int size = s390_function_arg_size (mode, type);
8352 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
8358 /* Define where to put the arguments to a function.
8359 Value is zero to push the argument on the stack,
8360 or a hard register in which to store the argument.
8362 MODE is the argument's machine mode.
8363 TYPE is the data type of the argument (as a tree).
8364 This is null for libcalls where that information may
8366 CUM is a variable of type CUMULATIVE_ARGS which gives info about
8367 the preceding args and about the function being called.
8368 NAMED is nonzero if this argument is a named parameter
8369 (otherwise it is an extra parameter matching an ellipsis).
8371 On S/390, we use general purpose registers 2 through 6 to
8372 pass integer, pointer, and certain structure arguments, and
8373 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
8374 to pass floating point arguments. All remaining arguments
8375 are pushed to the stack. */
8378 s390_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
8379 int named ATTRIBUTE_UNUSED)
8381 if (s390_function_arg_float (mode, type))
8383 if (cum->fprs + 1 > FP_ARG_NUM_REG)
8386 return gen_rtx_REG (mode, cum->fprs + 16);
8388 else if (s390_function_arg_integer (mode, type))
8390 int size = s390_function_arg_size (mode, type);
8391 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
8393 if (cum->gprs + n_gprs > GP_ARG_NUM_REG)
8396 return gen_rtx_REG (mode, cum->gprs + 2);
8399 /* After the real arguments, expand_call calls us once again
8400 with a void_type_node type. Whatever we return here is
8401 passed as operand 2 to the call expanders.
8403 We don't need this feature ... */
8404 else if (type == void_type_node)
8410 /* Return true if return values of type TYPE should be returned
8411 in a memory buffer whose address is passed by the caller as
8412 hidden first argument. */
8415 s390_return_in_memory (const_tree type, const_tree fundecl ATTRIBUTE_UNUSED)
8417 /* We accept small integral (and similar) types. */
8418 if (INTEGRAL_TYPE_P (type)
8419 || POINTER_TYPE_P (type)
8420 || TREE_CODE (type) == OFFSET_TYPE
8421 || TREE_CODE (type) == REAL_TYPE)
8422 return int_size_in_bytes (type) > 8;
8424 /* Aggregates and similar constructs are always returned
8426 if (AGGREGATE_TYPE_P (type)
8427 || TREE_CODE (type) == COMPLEX_TYPE
8428 || TREE_CODE (type) == VECTOR_TYPE)
8431 /* ??? We get called on all sorts of random stuff from
8432 aggregate_value_p. We can't abort, but it's not clear
8433 what's safe to return. Pretend it's a struct I guess. */
8437 /* Function arguments and return values are promoted to word size. */
8439 static enum machine_mode
8440 s390_promote_function_mode (const_tree type, enum machine_mode mode,
8442 const_tree fntype ATTRIBUTE_UNUSED,
8443 int for_return ATTRIBUTE_UNUSED)
8445 if (INTEGRAL_MODE_P (mode)
8446 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
8448 if (POINTER_TYPE_P (type))
8449 *punsignedp = POINTERS_EXTEND_UNSIGNED;
8456 /* Define where to return a (scalar) value of type TYPE.
8457 If TYPE is null, define where to return a (scalar)
8458 value of mode MODE from a libcall. */
8461 s390_function_value (const_tree type, const_tree fn, enum machine_mode mode)
8465 int unsignedp = TYPE_UNSIGNED (type);
8466 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, fn, 1);
8469 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode));
8470 gcc_assert (GET_MODE_SIZE (mode) <= 8);
8472 if (TARGET_HARD_FLOAT && SCALAR_FLOAT_MODE_P (mode))
8473 return gen_rtx_REG (mode, 16);
8475 return gen_rtx_REG (mode, 2);
8479 /* Create and return the va_list datatype.
8481 On S/390, va_list is an array type equivalent to
8483 typedef struct __va_list_tag
8487 void *__overflow_arg_area;
8488 void *__reg_save_area;
8491 where __gpr and __fpr hold the number of general purpose
8492 or floating point arguments used up to now, respectively,
8493 __overflow_arg_area points to the stack location of the
8494 next argument passed on the stack, and __reg_save_area
8495 always points to the start of the register area in the
8496 call frame of the current function. The function prologue
8497 saves all registers used for argument passing into this
8498 area if the function uses variable arguments. */
8501 s390_build_builtin_va_list (void)
8503 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
8505 record = lang_hooks.types.make_type (RECORD_TYPE);
8508 build_decl (BUILTINS_LOCATION,
8509 TYPE_DECL, get_identifier ("__va_list_tag"), record);
8511 f_gpr = build_decl (BUILTINS_LOCATION,
8512 FIELD_DECL, get_identifier ("__gpr"),
8513 long_integer_type_node);
8514 f_fpr = build_decl (BUILTINS_LOCATION,
8515 FIELD_DECL, get_identifier ("__fpr"),
8516 long_integer_type_node);
8517 f_ovf = build_decl (BUILTINS_LOCATION,
8518 FIELD_DECL, get_identifier ("__overflow_arg_area"),
8520 f_sav = build_decl (BUILTINS_LOCATION,
8521 FIELD_DECL, get_identifier ("__reg_save_area"),
8524 va_list_gpr_counter_field = f_gpr;
8525 va_list_fpr_counter_field = f_fpr;
8527 DECL_FIELD_CONTEXT (f_gpr) = record;
8528 DECL_FIELD_CONTEXT (f_fpr) = record;
8529 DECL_FIELD_CONTEXT (f_ovf) = record;
8530 DECL_FIELD_CONTEXT (f_sav) = record;
8532 TREE_CHAIN (record) = type_decl;
8533 TYPE_NAME (record) = type_decl;
8534 TYPE_FIELDS (record) = f_gpr;
8535 TREE_CHAIN (f_gpr) = f_fpr;
8536 TREE_CHAIN (f_fpr) = f_ovf;
8537 TREE_CHAIN (f_ovf) = f_sav;
8539 layout_type (record);
8541 /* The correct type is an array type of one element. */
8542 return build_array_type (record, build_index_type (size_zero_node));
8545 /* Implement va_start by filling the va_list structure VALIST.
8546 STDARG_P is always true, and ignored.
8547 NEXTARG points to the first anonymous stack argument.
8549 The following global variables are used to initialize
8550 the va_list structure:
8553 holds number of gprs and fprs used for named arguments.
8554 crtl->args.arg_offset_rtx:
8555 holds the offset of the first anonymous stack argument
8556 (relative to the virtual arg pointer). */
8559 s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
8561 HOST_WIDE_INT n_gpr, n_fpr;
8563 tree f_gpr, f_fpr, f_ovf, f_sav;
8564 tree gpr, fpr, ovf, sav, t;
8566 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8567 f_fpr = TREE_CHAIN (f_gpr);
8568 f_ovf = TREE_CHAIN (f_fpr);
8569 f_sav = TREE_CHAIN (f_ovf);
8571 valist = build_va_arg_indirect_ref (valist);
8572 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8573 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8574 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8575 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8577 /* Count number of gp and fp argument registers used. */
8579 n_gpr = crtl->args.info.gprs;
8580 n_fpr = crtl->args.info.fprs;
8582 if (cfun->va_list_gpr_size)
8584 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
8585 build_int_cst (NULL_TREE, n_gpr));
8586 TREE_SIDE_EFFECTS (t) = 1;
8587 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8590 if (cfun->va_list_fpr_size)
8592 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
8593 build_int_cst (NULL_TREE, n_fpr));
8594 TREE_SIDE_EFFECTS (t) = 1;
8595 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8598 /* Find the overflow area. */
8599 if (n_gpr + cfun->va_list_gpr_size > GP_ARG_NUM_REG
8600 || n_fpr + cfun->va_list_fpr_size > FP_ARG_NUM_REG)
8602 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8604 off = INTVAL (crtl->args.arg_offset_rtx);
8605 off = off < 0 ? 0 : off;
8606 if (TARGET_DEBUG_ARG)
8607 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
8608 (int)n_gpr, (int)n_fpr, off);
8610 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
8612 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
8613 TREE_SIDE_EFFECTS (t) = 1;
8614 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8617 /* Find the register save area. */
8618 if ((cfun->va_list_gpr_size && n_gpr < GP_ARG_NUM_REG)
8619 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
8621 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
8622 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8623 size_int (-RETURN_REGNUM * UNITS_PER_WORD));
8625 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
8626 TREE_SIDE_EFFECTS (t) = 1;
8627 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8631 /* Implement va_arg by updating the va_list structure
8632 VALIST as required to retrieve an argument of type
8633 TYPE, and returning that argument.
8635 Generates code equivalent to:
8637 if (integral value) {
8638 if (size <= 4 && args.gpr < 5 ||
8639 size > 4 && args.gpr < 4 )
8640 ret = args.reg_save_area[args.gpr+8]
8642 ret = *args.overflow_arg_area++;
8643 } else if (float value) {
8645 ret = args.reg_save_area[args.fpr+64]
8647 ret = *args.overflow_arg_area++;
8648 } else if (aggregate value) {
8650 ret = *args.reg_save_area[args.gpr]
8652 ret = **args.overflow_arg_area++;
8656 s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
8657 gimple_seq *post_p ATTRIBUTE_UNUSED)
8659 tree f_gpr, f_fpr, f_ovf, f_sav;
8660 tree gpr, fpr, ovf, sav, reg, t, u;
8661 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
8662 tree lab_false, lab_over, addr;
8664 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8665 f_fpr = TREE_CHAIN (f_gpr);
8666 f_ovf = TREE_CHAIN (f_fpr);
8667 f_sav = TREE_CHAIN (f_ovf);
8669 valist = build_va_arg_indirect_ref (valist);
8670 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8671 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
8672 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
8674 /* The tree for args* cannot be shared between gpr/fpr and ovf since
8675 both appear on a lhs. */
8676 valist = unshare_expr (valist);
8677 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
8679 size = int_size_in_bytes (type);
8681 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8683 if (TARGET_DEBUG_ARG)
8685 fprintf (stderr, "va_arg: aggregate type");
8689 /* Aggregates are passed by reference. */
8694 /* kernel stack layout on 31 bit: It is assumed here that no padding
8695 will be added by s390_frame_info because for va_args always an even
8696 number of gprs has to be saved r15-r2 = 14 regs. */
8697 sav_ofs = 2 * UNITS_PER_WORD;
8698 sav_scale = UNITS_PER_WORD;
8699 size = UNITS_PER_WORD;
8700 max_reg = GP_ARG_NUM_REG - n_reg;
8702 else if (s390_function_arg_float (TYPE_MODE (type), type))
8704 if (TARGET_DEBUG_ARG)
8706 fprintf (stderr, "va_arg: float type");
8710 /* FP args go in FP registers, if present. */
8714 sav_ofs = 16 * UNITS_PER_WORD;
8716 max_reg = FP_ARG_NUM_REG - n_reg;
8720 if (TARGET_DEBUG_ARG)
8722 fprintf (stderr, "va_arg: other type");
8726 /* Otherwise into GP registers. */
8729 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
8731 /* kernel stack layout on 31 bit: It is assumed here that no padding
8732 will be added by s390_frame_info because for va_args always an even
8733 number of gprs has to be saved r15-r2 = 14 regs. */
8734 sav_ofs = 2 * UNITS_PER_WORD;
8736 if (size < UNITS_PER_WORD)
8737 sav_ofs += UNITS_PER_WORD - size;
8739 sav_scale = UNITS_PER_WORD;
8740 max_reg = GP_ARG_NUM_REG - n_reg;
8743 /* Pull the value out of the saved registers ... */
8745 lab_false = create_artificial_label (UNKNOWN_LOCATION);
8746 lab_over = create_artificial_label (UNKNOWN_LOCATION);
8747 addr = create_tmp_var (ptr_type_node, "addr");
8749 t = fold_convert (TREE_TYPE (reg), size_int (max_reg));
8750 t = build2 (GT_EXPR, boolean_type_node, reg, t);
8751 u = build1 (GOTO_EXPR, void_type_node, lab_false);
8752 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
8753 gimplify_and_add (t, pre_p);
8755 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
8756 size_int (sav_ofs));
8757 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
8758 fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
8759 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
8761 gimplify_assign (addr, t, pre_p);
8763 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
8765 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
8768 /* ... Otherwise out of the overflow area. */
8771 if (size < UNITS_PER_WORD)
8772 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8773 size_int (UNITS_PER_WORD - size));
8775 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
8777 gimplify_assign (addr, t, pre_p);
8779 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
8781 gimplify_assign (ovf, t, pre_p);
8783 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
8786 /* Increment register save count. */
8788 u = build2 (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
8789 fold_convert (TREE_TYPE (reg), size_int (n_reg)));
8790 gimplify_and_add (u, pre_p);
8794 t = build_pointer_type_for_mode (build_pointer_type (type),
8796 addr = fold_convert (t, addr);
8797 addr = build_va_arg_indirect_ref (addr);
8801 t = build_pointer_type_for_mode (type, ptr_mode, true);
8802 addr = fold_convert (t, addr);
8805 return build_va_arg_indirect_ref (addr);
8813 S390_BUILTIN_THREAD_POINTER,
8814 S390_BUILTIN_SET_THREAD_POINTER,
8819 static enum insn_code const code_for_builtin_64[S390_BUILTIN_max] = {
8824 static enum insn_code const code_for_builtin_31[S390_BUILTIN_max] = {
8830 s390_init_builtins (void)
8834 ftype = build_function_type (ptr_type_node, void_list_node);
8835 add_builtin_function ("__builtin_thread_pointer", ftype,
8836 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
8839 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
8840 add_builtin_function ("__builtin_set_thread_pointer", ftype,
8841 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
8845 /* Expand an expression EXP that calls a built-in function,
8846 with result going to TARGET if that's convenient
8847 (and in mode MODE if that's convenient).
8848 SUBTARGET may be used as the target for computing one of EXP's operands.
8849 IGNORE is nonzero if the value is to be ignored. */
8852 s390_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
8853 enum machine_mode mode ATTRIBUTE_UNUSED,
8854 int ignore ATTRIBUTE_UNUSED)
8858 enum insn_code const *code_for_builtin =
8859 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
8861 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
8862 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
8863 enum insn_code icode;
8864 rtx op[MAX_ARGS], pat;
8868 call_expr_arg_iterator iter;
8870 if (fcode >= S390_BUILTIN_max)
8871 internal_error ("bad builtin fcode");
8872 icode = code_for_builtin[fcode];
8874 internal_error ("bad builtin fcode");
8876 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
8879 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
8881 const struct insn_operand_data *insn_op;
8883 if (arg == error_mark_node)
8885 if (arity > MAX_ARGS)
8888 insn_op = &insn_data[icode].operand[arity + nonvoid];
8890 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL);
8892 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
8893 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
8899 enum machine_mode tmode = insn_data[icode].operand[0].mode;
8901 || GET_MODE (target) != tmode
8902 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
8903 target = gen_reg_rtx (tmode);
8909 pat = GEN_FCN (icode) (target);
8913 pat = GEN_FCN (icode) (target, op[0]);
8915 pat = GEN_FCN (icode) (op[0]);
8918 pat = GEN_FCN (icode) (target, op[0], op[1]);
8934 /* Output assembly code for the trampoline template to
8937 On S/390, we use gpr 1 internally in the trampoline code;
8938 gpr 0 is used to hold the static chain. */
8941 s390_asm_trampoline_template (FILE *file)
8944 op[0] = gen_rtx_REG (Pmode, 0);
8945 op[1] = gen_rtx_REG (Pmode, 1);
8949 output_asm_insn ("basr\t%1,0", op);
8950 output_asm_insn ("lmg\t%0,%1,14(%1)", op);
8951 output_asm_insn ("br\t%1", op);
8952 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 10));
8956 output_asm_insn ("basr\t%1,0", op);
8957 output_asm_insn ("lm\t%0,%1,6(%1)", op);
8958 output_asm_insn ("br\t%1", op);
8959 ASM_OUTPUT_SKIP (file, (HOST_WIDE_INT)(TRAMPOLINE_SIZE - 8));
8963 /* Emit RTL insns to initialize the variable parts of a trampoline.
8964 FNADDR is an RTX for the address of the function's pure code.
8965 CXT is an RTX for the static chain value for the function. */
8968 s390_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
8970 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
8973 emit_block_move (m_tramp, assemble_trampoline_template (),
8974 GEN_INT (2*UNITS_PER_WORD), BLOCK_OP_NORMAL);
8976 mem = adjust_address (m_tramp, Pmode, 2*UNITS_PER_WORD);
8977 emit_move_insn (mem, cxt);
8978 mem = adjust_address (m_tramp, Pmode, 3*UNITS_PER_WORD);
8979 emit_move_insn (mem, fnaddr);
8982 /* Output assembler code to FILE to increment profiler label # LABELNO
8983 for profiling a function entry. */
8986 s390_function_profiler (FILE *file, int labelno)
8991 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
8993 fprintf (file, "# function profiler \n");
8995 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
8996 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8997 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
8999 op[2] = gen_rtx_REG (Pmode, 1);
9000 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
9001 SYMBOL_REF_FLAGS (op[3]) = SYMBOL_FLAG_LOCAL;
9003 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
9006 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), UNSPEC_PLT);
9007 op[4] = gen_rtx_CONST (Pmode, op[4]);
9012 output_asm_insn ("stg\t%0,%1", op);
9013 output_asm_insn ("larl\t%2,%3", op);
9014 output_asm_insn ("brasl\t%0,%4", op);
9015 output_asm_insn ("lg\t%0,%1", op);
9019 op[6] = gen_label_rtx ();
9021 output_asm_insn ("st\t%0,%1", op);
9022 output_asm_insn ("bras\t%2,%l6", op);
9023 output_asm_insn (".long\t%4", op);
9024 output_asm_insn (".long\t%3", op);
9025 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
9026 output_asm_insn ("l\t%0,0(%2)", op);
9027 output_asm_insn ("l\t%2,4(%2)", op);
9028 output_asm_insn ("basr\t%0,%0", op);
9029 output_asm_insn ("l\t%0,%1", op);
9033 op[5] = gen_label_rtx ();
9034 op[6] = gen_label_rtx ();
9036 output_asm_insn ("st\t%0,%1", op);
9037 output_asm_insn ("bras\t%2,%l6", op);
9038 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[5]));
9039 output_asm_insn (".long\t%4-%l5", op);
9040 output_asm_insn (".long\t%3-%l5", op);
9041 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[6]));
9042 output_asm_insn ("lr\t%0,%2", op);
9043 output_asm_insn ("a\t%0,0(%2)", op);
9044 output_asm_insn ("a\t%2,4(%2)", op);
9045 output_asm_insn ("basr\t%0,%0", op);
9046 output_asm_insn ("l\t%0,%1", op);
9050 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
9051 into its SYMBOL_REF_FLAGS. */
9054 s390_encode_section_info (tree decl, rtx rtl, int first)
9056 default_encode_section_info (decl, rtl, first);
9058 if (TREE_CODE (decl) == VAR_DECL)
9060 /* If a variable has a forced alignment to < 2 bytes, mark it
9061 with SYMBOL_FLAG_ALIGN1 to prevent it from being used as LARL
9063 if (DECL_USER_ALIGN (decl) && DECL_ALIGN (decl) < 16)
9064 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_ALIGN1;
9065 if (!DECL_SIZE (decl)
9066 || !DECL_ALIGN (decl)
9067 || !host_integerp (DECL_SIZE (decl), 0)
9068 || (DECL_ALIGN (decl) <= 64
9069 && DECL_ALIGN (decl) != tree_low_cst (DECL_SIZE (decl), 0)))
9070 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
9073 /* Literal pool references don't have a decl so they are handled
9074 differently here. We rely on the information in the MEM_ALIGN
9075 entry to decide upon natural alignment. */
9077 && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
9078 && TREE_CONSTANT_POOL_ADDRESS_P (XEXP (rtl, 0))
9079 && (MEM_ALIGN (rtl) == 0
9080 || GET_MODE_BITSIZE (GET_MODE (rtl)) == 0
9081 || MEM_ALIGN (rtl) < GET_MODE_BITSIZE (GET_MODE (rtl))))
9082 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= SYMBOL_FLAG_NOT_NATURALLY_ALIGNED;
9085 /* Output thunk to FILE that implements a C++ virtual function call (with
9086 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
9087 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
9088 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
9089 relative to the resulting this pointer. */
9092 s390_output_mi_thunk (FILE *file, tree thunk ATTRIBUTE_UNUSED,
9093 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
9099 /* Make sure unwind info is emitted for the thunk if needed. */
9100 final_start_function (emit_barrier (), file, 1);
9102 /* Operand 0 is the target function. */
9103 op[0] = XEXP (DECL_RTL (function), 0);
9104 if (flag_pic && !SYMBOL_REF_LOCAL_P (op[0]))
9107 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]),
9108 TARGET_64BIT ? UNSPEC_PLT : UNSPEC_GOT);
9109 op[0] = gen_rtx_CONST (Pmode, op[0]);
9112 /* Operand 1 is the 'this' pointer. */
9113 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
9114 op[1] = gen_rtx_REG (Pmode, 3);
9116 op[1] = gen_rtx_REG (Pmode, 2);
9118 /* Operand 2 is the delta. */
9119 op[2] = GEN_INT (delta);
9121 /* Operand 3 is the vcall_offset. */
9122 op[3] = GEN_INT (vcall_offset);
9124 /* Operand 4 is the temporary register. */
9125 op[4] = gen_rtx_REG (Pmode, 1);
9127 /* Operands 5 to 8 can be used as labels. */
9133 /* Operand 9 can be used for temporary register. */
9136 /* Generate code. */
9139 /* Setup literal pool pointer if required. */
9140 if ((!DISP_IN_RANGE (delta)
9141 && !CONST_OK_FOR_K (delta)
9142 && !CONST_OK_FOR_Os (delta))
9143 || (!DISP_IN_RANGE (vcall_offset)
9144 && !CONST_OK_FOR_K (vcall_offset)
9145 && !CONST_OK_FOR_Os (vcall_offset)))
9147 op[5] = gen_label_rtx ();
9148 output_asm_insn ("larl\t%4,%5", op);
9151 /* Add DELTA to this pointer. */
9154 if (CONST_OK_FOR_J (delta))
9155 output_asm_insn ("la\t%1,%2(%1)", op);
9156 else if (DISP_IN_RANGE (delta))
9157 output_asm_insn ("lay\t%1,%2(%1)", op);
9158 else if (CONST_OK_FOR_K (delta))
9159 output_asm_insn ("aghi\t%1,%2", op);
9160 else if (CONST_OK_FOR_Os (delta))
9161 output_asm_insn ("agfi\t%1,%2", op);
9164 op[6] = gen_label_rtx ();
9165 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
9169 /* Perform vcall adjustment. */
9172 if (DISP_IN_RANGE (vcall_offset))
9174 output_asm_insn ("lg\t%4,0(%1)", op);
9175 output_asm_insn ("ag\t%1,%3(%4)", op);
9177 else if (CONST_OK_FOR_K (vcall_offset))
9179 output_asm_insn ("lghi\t%4,%3", op);
9180 output_asm_insn ("ag\t%4,0(%1)", op);
9181 output_asm_insn ("ag\t%1,0(%4)", op);
9183 else if (CONST_OK_FOR_Os (vcall_offset))
9185 output_asm_insn ("lgfi\t%4,%3", op);
9186 output_asm_insn ("ag\t%4,0(%1)", op);
9187 output_asm_insn ("ag\t%1,0(%4)", op);
9191 op[7] = gen_label_rtx ();
9192 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
9193 output_asm_insn ("ag\t%4,0(%1)", op);
9194 output_asm_insn ("ag\t%1,0(%4)", op);
9198 /* Jump to target. */
9199 output_asm_insn ("jg\t%0", op);
9201 /* Output literal pool if required. */
9204 output_asm_insn (".align\t4", op);
9205 targetm.asm_out.internal_label (file, "L",
9206 CODE_LABEL_NUMBER (op[5]));
9210 targetm.asm_out.internal_label (file, "L",
9211 CODE_LABEL_NUMBER (op[6]));
9212 output_asm_insn (".long\t%2", op);
9216 targetm.asm_out.internal_label (file, "L",
9217 CODE_LABEL_NUMBER (op[7]));
9218 output_asm_insn (".long\t%3", op);
9223 /* Setup base pointer if required. */
9225 || (!DISP_IN_RANGE (delta)
9226 && !CONST_OK_FOR_K (delta)
9227 && !CONST_OK_FOR_Os (delta))
9228 || (!DISP_IN_RANGE (delta)
9229 && !CONST_OK_FOR_K (vcall_offset)
9230 && !CONST_OK_FOR_Os (vcall_offset)))
9232 op[5] = gen_label_rtx ();
9233 output_asm_insn ("basr\t%4,0", op);
9234 targetm.asm_out.internal_label (file, "L",
9235 CODE_LABEL_NUMBER (op[5]));
9238 /* Add DELTA to this pointer. */
9241 if (CONST_OK_FOR_J (delta))
9242 output_asm_insn ("la\t%1,%2(%1)", op);
9243 else if (DISP_IN_RANGE (delta))
9244 output_asm_insn ("lay\t%1,%2(%1)", op);
9245 else if (CONST_OK_FOR_K (delta))
9246 output_asm_insn ("ahi\t%1,%2", op);
9247 else if (CONST_OK_FOR_Os (delta))
9248 output_asm_insn ("afi\t%1,%2", op);
9251 op[6] = gen_label_rtx ();
9252 output_asm_insn ("a\t%1,%6-%5(%4)", op);
9256 /* Perform vcall adjustment. */
9259 if (CONST_OK_FOR_J (vcall_offset))
9261 output_asm_insn ("l\t%4,0(%1)", op);
9262 output_asm_insn ("a\t%1,%3(%4)", op);
9264 else if (DISP_IN_RANGE (vcall_offset))
9266 output_asm_insn ("l\t%4,0(%1)", op);
9267 output_asm_insn ("ay\t%1,%3(%4)", op);
9269 else if (CONST_OK_FOR_K (vcall_offset))
9271 output_asm_insn ("lhi\t%4,%3", op);
9272 output_asm_insn ("a\t%4,0(%1)", op);
9273 output_asm_insn ("a\t%1,0(%4)", op);
9275 else if (CONST_OK_FOR_Os (vcall_offset))
9277 output_asm_insn ("iilf\t%4,%3", op);
9278 output_asm_insn ("a\t%4,0(%1)", op);
9279 output_asm_insn ("a\t%1,0(%4)", op);
9283 op[7] = gen_label_rtx ();
9284 output_asm_insn ("l\t%4,%7-%5(%4)", op);
9285 output_asm_insn ("a\t%4,0(%1)", op);
9286 output_asm_insn ("a\t%1,0(%4)", op);
9289 /* We had to clobber the base pointer register.
9290 Re-setup the base pointer (with a different base). */
9291 op[5] = gen_label_rtx ();
9292 output_asm_insn ("basr\t%4,0", op);
9293 targetm.asm_out.internal_label (file, "L",
9294 CODE_LABEL_NUMBER (op[5]));
9297 /* Jump to target. */
9298 op[8] = gen_label_rtx ();
9301 output_asm_insn ("l\t%4,%8-%5(%4)", op);
9303 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9304 /* We cannot call through .plt, since .plt requires %r12 loaded. */
9305 else if (flag_pic == 1)
9307 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9308 output_asm_insn ("l\t%4,%0(%4)", op);
9310 else if (flag_pic == 2)
9312 op[9] = gen_rtx_REG (Pmode, 0);
9313 output_asm_insn ("l\t%9,%8-4-%5(%4)", op);
9314 output_asm_insn ("a\t%4,%8-%5(%4)", op);
9315 output_asm_insn ("ar\t%4,%9", op);
9316 output_asm_insn ("l\t%4,0(%4)", op);
9319 output_asm_insn ("br\t%4", op);
9321 /* Output literal pool. */
9322 output_asm_insn (".align\t4", op);
9324 if (nonlocal && flag_pic == 2)
9325 output_asm_insn (".long\t%0", op);
9328 op[0] = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
9329 SYMBOL_REF_FLAGS (op[0]) = SYMBOL_FLAG_LOCAL;
9332 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (op[8]));
9334 output_asm_insn (".long\t%0", op);
9336 output_asm_insn (".long\t%0-%5", op);
9340 targetm.asm_out.internal_label (file, "L",
9341 CODE_LABEL_NUMBER (op[6]));
9342 output_asm_insn (".long\t%2", op);
9346 targetm.asm_out.internal_label (file, "L",
9347 CODE_LABEL_NUMBER (op[7]));
9348 output_asm_insn (".long\t%3", op);
9351 final_end_function ();
9355 s390_valid_pointer_mode (enum machine_mode mode)
9357 return (mode == SImode || (TARGET_64BIT && mode == DImode));
9360 /* Checks whether the given CALL_EXPR would use a caller
9361 saved register. This is used to decide whether sibling call
9362 optimization could be performed on the respective function
9366 s390_call_saved_register_used (tree call_expr)
9368 CUMULATIVE_ARGS cum;
9370 enum machine_mode mode;
9375 INIT_CUMULATIVE_ARGS (cum, NULL, NULL, 0, 0);
9377 for (i = 0; i < call_expr_nargs (call_expr); i++)
9379 parameter = CALL_EXPR_ARG (call_expr, i);
9380 gcc_assert (parameter);
9382 /* For an undeclared variable passed as parameter we will get
9383 an ERROR_MARK node here. */
9384 if (TREE_CODE (parameter) == ERROR_MARK)
9387 type = TREE_TYPE (parameter);
9390 mode = TYPE_MODE (type);
9393 if (pass_by_reference (&cum, mode, type, true))
9396 type = build_pointer_type (type);
9399 parm_rtx = s390_function_arg (&cum, mode, type, 0);
9401 s390_function_arg_advance (&cum, mode, type, 0);
9403 if (parm_rtx && REG_P (parm_rtx))
9406 reg < HARD_REGNO_NREGS (REGNO (parm_rtx), GET_MODE (parm_rtx));
9408 if (! call_used_regs[reg + REGNO (parm_rtx)])
9415 /* Return true if the given call expression can be
9416 turned into a sibling call.
9417 DECL holds the declaration of the function to be called whereas
9418 EXP is the call expression itself. */
9421 s390_function_ok_for_sibcall (tree decl, tree exp)
9423 /* The TPF epilogue uses register 1. */
9424 if (TARGET_TPF_PROFILING)
9427 /* The 31 bit PLT code uses register 12 (GOT pointer - caller saved)
9428 which would have to be restored before the sibcall. */
9429 if (!TARGET_64BIT && flag_pic && decl && !targetm.binds_local_p (decl))
9432 /* Register 6 on s390 is available as an argument register but unfortunately
9433 "caller saved". This makes functions needing this register for arguments
9434 not suitable for sibcalls. */
9435 return !s390_call_saved_register_used (exp);
9438 /* Return the fixed registers used for condition codes. */
9441 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2)
9444 *p2 = INVALID_REGNUM;
9449 /* This function is used by the call expanders of the machine description.
9450 It emits the call insn itself together with the necessary operations
9451 to adjust the target address and returns the emitted insn.
9452 ADDR_LOCATION is the target address rtx
9453 TLS_CALL the location of the thread-local symbol
9454 RESULT_REG the register where the result of the call should be stored
9455 RETADDR_REG the register where the return address should be stored
9456 If this parameter is NULL_RTX the call is considered
9457 to be a sibling call. */
9460 s390_emit_call (rtx addr_location, rtx tls_call, rtx result_reg,
9463 bool plt_call = false;
9469 /* Direct function calls need special treatment. */
9470 if (GET_CODE (addr_location) == SYMBOL_REF)
9472 /* When calling a global routine in PIC mode, we must
9473 replace the symbol itself with the PLT stub. */
9474 if (flag_pic && !SYMBOL_REF_LOCAL_P (addr_location))
9476 addr_location = gen_rtx_UNSPEC (Pmode,
9477 gen_rtvec (1, addr_location),
9479 addr_location = gen_rtx_CONST (Pmode, addr_location);
9483 /* Unless we can use the bras(l) insn, force the
9484 routine address into a register. */
9485 if (!TARGET_SMALL_EXEC && !TARGET_CPU_ZARCH)
9488 addr_location = legitimize_pic_address (addr_location, 0);
9490 addr_location = force_reg (Pmode, addr_location);
9494 /* If it is already an indirect call or the code above moved the
9495 SYMBOL_REF to somewhere else make sure the address can be found in
9497 if (retaddr_reg == NULL_RTX
9498 && GET_CODE (addr_location) != SYMBOL_REF
9501 emit_move_insn (gen_rtx_REG (Pmode, SIBCALL_REGNUM), addr_location);
9502 addr_location = gen_rtx_REG (Pmode, SIBCALL_REGNUM);
9505 addr_location = gen_rtx_MEM (QImode, addr_location);
9506 call = gen_rtx_CALL (VOIDmode, addr_location, const0_rtx);
9508 if (result_reg != NULL_RTX)
9509 call = gen_rtx_SET (VOIDmode, result_reg, call);
9511 if (retaddr_reg != NULL_RTX)
9513 clobber = gen_rtx_CLOBBER (VOIDmode, retaddr_reg);
9515 if (tls_call != NULL_RTX)
9516 vec = gen_rtvec (3, call, clobber,
9517 gen_rtx_USE (VOIDmode, tls_call));
9519 vec = gen_rtvec (2, call, clobber);
9521 call = gen_rtx_PARALLEL (VOIDmode, vec);
9524 insn = emit_call_insn (call);
9526 /* 31-bit PLT stubs and tls calls use the GOT register implicitly. */
9527 if ((!TARGET_64BIT && plt_call) || tls_call != NULL_RTX)
9529 /* s390_function_ok_for_sibcall should
9530 have denied sibcalls in this case. */
9531 gcc_assert (retaddr_reg != NULL_RTX);
9533 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx);
9538 /* Implement CONDITIONAL_REGISTER_USAGE. */
9541 s390_conditional_register_usage (void)
9547 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9548 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
9550 if (TARGET_CPU_ZARCH)
9552 fixed_regs[BASE_REGNUM] = 0;
9553 call_used_regs[BASE_REGNUM] = 0;
9554 fixed_regs[RETURN_REGNUM] = 0;
9555 call_used_regs[RETURN_REGNUM] = 0;
9559 for (i = 24; i < 32; i++)
9560 call_used_regs[i] = call_really_used_regs[i] = 0;
9564 for (i = 18; i < 20; i++)
9565 call_used_regs[i] = call_really_used_regs[i] = 0;
9568 if (TARGET_SOFT_FLOAT)
9570 for (i = 16; i < 32; i++)
9571 call_used_regs[i] = fixed_regs[i] = 1;
9575 /* Corresponding function to eh_return expander. */
9577 static GTY(()) rtx s390_tpf_eh_return_symbol;
9579 s390_emit_tpf_eh_return (rtx target)
9583 if (!s390_tpf_eh_return_symbol)
9584 s390_tpf_eh_return_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tpf_eh_return");
9586 reg = gen_rtx_REG (Pmode, 2);
9588 emit_move_insn (reg, target);
9589 insn = s390_emit_call (s390_tpf_eh_return_symbol, NULL_RTX, reg,
9590 gen_rtx_REG (Pmode, RETURN_REGNUM));
9591 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), reg);
9593 emit_move_insn (EH_RETURN_HANDLER_RTX, reg);
9596 /* Rework the prologue/epilogue to avoid saving/restoring
9597 registers unnecessarily. */
9600 s390_optimize_prologue (void)
9602 rtx insn, new_insn, next_insn;
9604 /* Do a final recompute of the frame-related data. */
9606 s390_update_frame_layout ();
9608 /* If all special registers are in fact used, there's nothing we
9609 can do, so no point in walking the insn list. */
9611 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM
9612 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM
9613 && (TARGET_CPU_ZARCH
9614 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM
9615 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM)))
9618 /* Search for prologue/epilogue insns and replace them. */
9620 for (insn = get_insns (); insn; insn = next_insn)
9622 int first, last, off;
9623 rtx set, base, offset;
9625 next_insn = NEXT_INSN (insn);
9627 if (GET_CODE (insn) != INSN)
9630 if (GET_CODE (PATTERN (insn)) == PARALLEL
9631 && store_multiple_operation (PATTERN (insn), VOIDmode))
9633 set = XVECEXP (PATTERN (insn), 0, 0);
9634 first = REGNO (SET_SRC (set));
9635 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9636 offset = const0_rtx;
9637 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9638 off = INTVAL (offset);
9640 if (GET_CODE (base) != REG || off < 0)
9642 if (cfun_frame_layout.first_save_gpr != -1
9643 && (cfun_frame_layout.first_save_gpr < first
9644 || cfun_frame_layout.last_save_gpr > last))
9646 if (REGNO (base) != STACK_POINTER_REGNUM
9647 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9649 if (first > BASE_REGNUM || last < BASE_REGNUM)
9652 if (cfun_frame_layout.first_save_gpr != -1)
9654 new_insn = save_gprs (base,
9655 off + (cfun_frame_layout.first_save_gpr
9656 - first) * UNITS_PER_WORD,
9657 cfun_frame_layout.first_save_gpr,
9658 cfun_frame_layout.last_save_gpr);
9659 new_insn = emit_insn_before (new_insn, insn);
9660 INSN_ADDRESSES_NEW (new_insn, -1);
9667 if (cfun_frame_layout.first_save_gpr == -1
9668 && GET_CODE (PATTERN (insn)) == SET
9669 && GET_CODE (SET_SRC (PATTERN (insn))) == REG
9670 && (REGNO (SET_SRC (PATTERN (insn))) == BASE_REGNUM
9671 || (!TARGET_CPU_ZARCH
9672 && REGNO (SET_SRC (PATTERN (insn))) == RETURN_REGNUM))
9673 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
9675 set = PATTERN (insn);
9676 first = REGNO (SET_SRC (set));
9677 offset = const0_rtx;
9678 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
9679 off = INTVAL (offset);
9681 if (GET_CODE (base) != REG || off < 0)
9683 if (REGNO (base) != STACK_POINTER_REGNUM
9684 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9691 if (GET_CODE (PATTERN (insn)) == PARALLEL
9692 && load_multiple_operation (PATTERN (insn), VOIDmode))
9694 set = XVECEXP (PATTERN (insn), 0, 0);
9695 first = REGNO (SET_DEST (set));
9696 last = first + XVECLEN (PATTERN (insn), 0) - 1;
9697 offset = const0_rtx;
9698 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9699 off = INTVAL (offset);
9701 if (GET_CODE (base) != REG || off < 0)
9703 if (cfun_frame_layout.first_restore_gpr != -1
9704 && (cfun_frame_layout.first_restore_gpr < first
9705 || cfun_frame_layout.last_restore_gpr > last))
9707 if (REGNO (base) != STACK_POINTER_REGNUM
9708 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9710 if (first > BASE_REGNUM || last < BASE_REGNUM)
9713 if (cfun_frame_layout.first_restore_gpr != -1)
9715 new_insn = restore_gprs (base,
9716 off + (cfun_frame_layout.first_restore_gpr
9717 - first) * UNITS_PER_WORD,
9718 cfun_frame_layout.first_restore_gpr,
9719 cfun_frame_layout.last_restore_gpr);
9720 new_insn = emit_insn_before (new_insn, insn);
9721 INSN_ADDRESSES_NEW (new_insn, -1);
9728 if (cfun_frame_layout.first_restore_gpr == -1
9729 && GET_CODE (PATTERN (insn)) == SET
9730 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
9731 && (REGNO (SET_DEST (PATTERN (insn))) == BASE_REGNUM
9732 || (!TARGET_CPU_ZARCH
9733 && REGNO (SET_DEST (PATTERN (insn))) == RETURN_REGNUM))
9734 && GET_CODE (SET_SRC (PATTERN (insn))) == MEM)
9736 set = PATTERN (insn);
9737 first = REGNO (SET_DEST (set));
9738 offset = const0_rtx;
9739 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
9740 off = INTVAL (offset);
9742 if (GET_CODE (base) != REG || off < 0)
9744 if (REGNO (base) != STACK_POINTER_REGNUM
9745 && REGNO (base) != HARD_FRAME_POINTER_REGNUM)
9754 /* On z10 the dynamic branch prediction must see the backward jump in
9755 a window of 384 bytes. If not it falls back to the static
9756 prediction. This function rearranges the loop backward branch in a
9757 way which makes the static prediction always correct. The function
9758 returns true if it added an instruction. */
9760 s390_z10_fix_long_loop_prediction (rtx insn)
9762 rtx set = single_set (insn);
9763 rtx code_label, label_ref, new_label;
9769 /* This will exclude branch on count and branch on index patterns
9770 since these are correctly statically predicted. */
9772 || SET_DEST (set) != pc_rtx
9773 || GET_CODE (SET_SRC(set)) != IF_THEN_ELSE)
9776 label_ref = (GET_CODE (XEXP (SET_SRC (set), 1)) == LABEL_REF ?
9777 XEXP (SET_SRC (set), 1) : XEXP (SET_SRC (set), 2));
9779 gcc_assert (GET_CODE (label_ref) == LABEL_REF);
9781 code_label = XEXP (label_ref, 0);
9783 if (INSN_ADDRESSES (INSN_UID (code_label)) == -1
9784 || INSN_ADDRESSES (INSN_UID (insn)) == -1
9785 || (INSN_ADDRESSES (INSN_UID (insn))
9786 - INSN_ADDRESSES (INSN_UID (code_label)) < Z10_PREDICT_DISTANCE))
9789 for (distance = 0, cur_insn = PREV_INSN (insn);
9790 distance < Z10_PREDICT_DISTANCE - 6;
9791 distance += get_attr_length (cur_insn), cur_insn = PREV_INSN (cur_insn))
9792 if (!cur_insn || JUMP_P (cur_insn) || LABEL_P (cur_insn))
9795 new_label = gen_label_rtx ();
9796 uncond_jump = emit_jump_insn_after (
9797 gen_rtx_SET (VOIDmode, pc_rtx,
9798 gen_rtx_LABEL_REF (VOIDmode, code_label)),
9800 emit_label_after (new_label, uncond_jump);
9802 tmp = XEXP (SET_SRC (set), 1);
9803 XEXP (SET_SRC (set), 1) = XEXP (SET_SRC (set), 2);
9804 XEXP (SET_SRC (set), 2) = tmp;
9805 INSN_CODE (insn) = -1;
9807 XEXP (label_ref, 0) = new_label;
9808 JUMP_LABEL (insn) = new_label;
9809 JUMP_LABEL (uncond_jump) = code_label;
9814 /* Returns 1 if INSN reads the value of REG for purposes not related
9815 to addressing of memory, and 0 otherwise. */
9817 s390_non_addr_reg_read_p (rtx reg, rtx insn)
9819 return reg_referenced_p (reg, PATTERN (insn))
9820 && !reg_used_in_mem_p (REGNO (reg), PATTERN (insn));
9823 /* Starting from INSN find_cond_jump looks downwards in the insn
9824 stream for a single jump insn which is the last user of the
9825 condition code set in INSN. */
9827 find_cond_jump (rtx insn)
9829 for (; insn; insn = NEXT_INSN (insn))
9838 if (reg_mentioned_p (gen_rtx_REG (CCmode, CC_REGNUM), insn))
9843 /* This will be triggered by a return. */
9844 if (GET_CODE (PATTERN (insn)) != SET)
9847 gcc_assert (SET_DEST (PATTERN (insn)) == pc_rtx);
9848 ite = SET_SRC (PATTERN (insn));
9850 if (GET_CODE (ite) != IF_THEN_ELSE)
9853 cc = XEXP (XEXP (ite, 0), 0);
9854 if (!REG_P (cc) || !CC_REGNO_P (REGNO (cc)))
9857 if (find_reg_note (insn, REG_DEAD, cc))
9865 /* Swap the condition in COND and the operands in OP0 and OP1 so that
9866 the semantics does not change. If NULL_RTX is passed as COND the
9867 function tries to find the conditional jump starting with INSN. */
9869 s390_swap_cmp (rtx cond, rtx *op0, rtx *op1, rtx insn)
9873 if (cond == NULL_RTX)
9875 rtx jump = find_cond_jump (NEXT_INSN (insn));
9876 jump = jump ? single_set (jump) : NULL_RTX;
9878 if (jump == NULL_RTX)
9881 cond = XEXP (XEXP (jump, 1), 0);
9886 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
9889 /* On z10, instructions of the compare-and-branch family have the
9890 property to access the register occurring as second operand with
9891 its bits complemented. If such a compare is grouped with a second
9892 instruction that accesses the same register non-complemented, and
9893 if that register's value is delivered via a bypass, then the
9894 pipeline recycles, thereby causing significant performance decline.
9895 This function locates such situations and exchanges the two
9896 operands of the compare. The function return true whenever it
9899 s390_z10_optimize_cmp (rtx insn)
9901 rtx prev_insn, next_insn;
9902 bool insn_added_p = false;
9903 rtx cond, *op0, *op1;
9905 if (GET_CODE (PATTERN (insn)) == PARALLEL)
9907 /* Handle compare and branch and branch on count
9909 rtx pattern = single_set (insn);
9912 || SET_DEST (pattern) != pc_rtx
9913 || GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE)
9916 cond = XEXP (SET_SRC (pattern), 0);
9917 op0 = &XEXP (cond, 0);
9918 op1 = &XEXP (cond, 1);
9920 else if (GET_CODE (PATTERN (insn)) == SET)
9924 /* Handle normal compare instructions. */
9925 src = SET_SRC (PATTERN (insn));
9926 dest = SET_DEST (PATTERN (insn));
9929 || !CC_REGNO_P (REGNO (dest))
9930 || GET_CODE (src) != COMPARE)
9933 /* s390_swap_cmp will try to find the conditional
9934 jump when passing NULL_RTX as condition. */
9936 op0 = &XEXP (src, 0);
9937 op1 = &XEXP (src, 1);
9942 if (!REG_P (*op0) || !REG_P (*op1))
9945 if (GET_MODE_CLASS (GET_MODE (*op0)) != MODE_INT)
9948 /* Swap the COMPARE arguments and its mask if there is a
9949 conflicting access in the previous insn. */
9950 prev_insn = prev_active_insn (insn);
9951 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
9952 && reg_referenced_p (*op1, PATTERN (prev_insn)))
9953 s390_swap_cmp (cond, op0, op1, insn);
9955 /* Check if there is a conflict with the next insn. If there
9956 was no conflict with the previous insn, then swap the
9957 COMPARE arguments and its mask. If we already swapped
9958 the operands, or if swapping them would cause a conflict
9959 with the previous insn, issue a NOP after the COMPARE in
9960 order to separate the two instuctions. */
9961 next_insn = next_active_insn (insn);
9962 if (next_insn != NULL_RTX && INSN_P (next_insn)
9963 && s390_non_addr_reg_read_p (*op1, next_insn))
9965 if (prev_insn != NULL_RTX && INSN_P (prev_insn)
9966 && s390_non_addr_reg_read_p (*op0, prev_insn))
9968 if (REGNO (*op1) == 0)
9969 emit_insn_after (gen_nop1 (), insn);
9971 emit_insn_after (gen_nop (), insn);
9972 insn_added_p = true;
9975 s390_swap_cmp (cond, op0, op1, insn);
9977 return insn_added_p;
9980 /* Perform machine-dependent processing. */
9985 bool pool_overflow = false;
9987 /* Make sure all splits have been performed; splits after
9988 machine_dependent_reorg might confuse insn length counts. */
9989 split_all_insns_noflow ();
9991 /* Install the main literal pool and the associated base
9992 register load insns.
9994 In addition, there are two problematic situations we need
9997 - the literal pool might be > 4096 bytes in size, so that
9998 some of its elements cannot be directly accessed
10000 - a branch target might be > 64K away from the branch, so that
10001 it is not possible to use a PC-relative instruction.
10003 To fix those, we split the single literal pool into multiple
10004 pool chunks, reloading the pool base register at various
10005 points throughout the function to ensure it always points to
10006 the pool chunk the following code expects, and / or replace
10007 PC-relative branches by absolute branches.
10009 However, the two problems are interdependent: splitting the
10010 literal pool can move a branch further away from its target,
10011 causing the 64K limit to overflow, and on the other hand,
10012 replacing a PC-relative branch by an absolute branch means
10013 we need to put the branch target address into the literal
10014 pool, possibly causing it to overflow.
10016 So, we loop trying to fix up both problems until we manage
10017 to satisfy both conditions at the same time. Note that the
10018 loop is guaranteed to terminate as every pass of the loop
10019 strictly decreases the total number of PC-relative branches
10020 in the function. (This is not completely true as there
10021 might be branch-over-pool insns introduced by chunkify_start.
10022 Those never need to be split however.) */
10026 struct constant_pool *pool = NULL;
10028 /* Collect the literal pool. */
10029 if (!pool_overflow)
10031 pool = s390_mainpool_start ();
10033 pool_overflow = true;
10036 /* If literal pool overflowed, start to chunkify it. */
10038 pool = s390_chunkify_start ();
10040 /* Split out-of-range branches. If this has created new
10041 literal pool entries, cancel current chunk list and
10042 recompute it. zSeries machines have large branch
10043 instructions, so we never need to split a branch. */
10044 if (!TARGET_CPU_ZARCH && s390_split_branches ())
10047 s390_chunkify_cancel (pool);
10049 s390_mainpool_cancel (pool);
10054 /* If we made it up to here, both conditions are satisfied.
10055 Finish up literal pool related changes. */
10057 s390_chunkify_finish (pool);
10059 s390_mainpool_finish (pool);
10061 /* We're done splitting branches. */
10062 cfun->machine->split_branches_pending_p = false;
10066 /* Generate out-of-pool execute target insns. */
10067 if (TARGET_CPU_ZARCH)
10069 rtx insn, label, target;
10071 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10073 label = s390_execute_label (insn);
10077 gcc_assert (label != const0_rtx);
10079 target = emit_label (XEXP (label, 0));
10080 INSN_ADDRESSES_NEW (target, -1);
10082 target = emit_insn (s390_execute_target (insn));
10083 INSN_ADDRESSES_NEW (target, -1);
10087 /* Try to optimize prologue and epilogue further. */
10088 s390_optimize_prologue ();
10090 /* Walk over the insns and do some z10 specific changes. */
10091 if (s390_tune == PROCESSOR_2097_Z10)
10094 bool insn_added_p = false;
10096 /* The insn lengths and addresses have to be up to date for the
10097 following manipulations. */
10098 shorten_branches (get_insns ());
10100 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10102 if (!INSN_P (insn) || INSN_CODE (insn) <= 0)
10106 insn_added_p |= s390_z10_fix_long_loop_prediction (insn);
10108 if (GET_CODE (PATTERN (insn)) == PARALLEL
10109 || GET_CODE (PATTERN (insn)) == SET)
10110 insn_added_p |= s390_z10_optimize_cmp (insn);
10113 /* Adjust branches if we added new instructions. */
10115 shorten_branches (get_insns ());
10119 /* Return true if INSN is a fp load insn writing register REGNO. */
10121 s390_fpload_toreg (rtx insn, unsigned int regno)
10124 enum attr_type flag = s390_safe_attr_type (insn);
10126 if (flag != TYPE_FLOADSF && flag != TYPE_FLOADDF)
10129 set = single_set (insn);
10131 if (set == NULL_RTX)
10134 if (!REG_P (SET_DEST (set)) || !MEM_P (SET_SRC (set)))
10137 if (REGNO (SET_DEST (set)) != regno)
10143 /* This value describes the distance to be avoided between an
10144 aritmetic fp instruction and an fp load writing the same register.
10145 Z10_EARLYLOAD_DISTANCE - 1 as well as Z10_EARLYLOAD_DISTANCE + 1 is
10146 fine but the exact value has to be avoided. Otherwise the FP
10147 pipeline will throw an exception causing a major penalty. */
10148 #define Z10_EARLYLOAD_DISTANCE 7
10150 /* Rearrange the ready list in order to avoid the situation described
10151 for Z10_EARLYLOAD_DISTANCE. A problematic load instruction is
10152 moved to the very end of the ready list. */
10154 s390_z10_prevent_earlyload_conflicts (rtx *ready, int *nready_p)
10156 unsigned int regno;
10157 int nready = *nready_p;
10162 enum attr_type flag;
10165 /* Skip DISTANCE - 1 active insns. */
10166 for (insn = last_scheduled_insn, distance = Z10_EARLYLOAD_DISTANCE - 1;
10167 distance > 0 && insn != NULL_RTX;
10168 distance--, insn = prev_active_insn (insn))
10169 if (CALL_P (insn) || JUMP_P (insn))
10172 if (insn == NULL_RTX)
10175 set = single_set (insn);
10177 if (set == NULL_RTX || !REG_P (SET_DEST (set))
10178 || GET_MODE_CLASS (GET_MODE (SET_DEST (set))) != MODE_FLOAT)
10181 flag = s390_safe_attr_type (insn);
10183 if (flag == TYPE_FLOADSF || flag == TYPE_FLOADDF)
10186 regno = REGNO (SET_DEST (set));
10189 while (!s390_fpload_toreg (ready[i], regno) && i > 0)
10196 memmove (&ready[1], &ready[0], sizeof (rtx) * i);
10200 /* This function is called via hook TARGET_SCHED_REORDER before
10201 issueing one insn from list READY which contains *NREADYP entries.
10202 For target z10 it reorders load instructions to avoid early load
10203 conflicts in the floating point pipeline */
10205 s390_sched_reorder (FILE *file ATTRIBUTE_UNUSED, int verbose ATTRIBUTE_UNUSED,
10206 rtx *ready, int *nreadyp, int clock ATTRIBUTE_UNUSED)
10208 if (s390_tune == PROCESSOR_2097_Z10)
10209 if (reload_completed && *nreadyp > 1)
10210 s390_z10_prevent_earlyload_conflicts (ready, nreadyp);
10212 return s390_issue_rate ();
10215 /* This function is called via hook TARGET_SCHED_VARIABLE_ISSUE after
10216 the scheduler has issued INSN. It stores the last issued insn into
10217 last_scheduled_insn in order to make it available for
10218 s390_sched_reorder. */
10220 s390_sched_variable_issue (FILE *file ATTRIBUTE_UNUSED,
10221 int verbose ATTRIBUTE_UNUSED,
10222 rtx insn, int more)
10224 last_scheduled_insn = insn;
10226 if (GET_CODE (PATTERN (insn)) != USE
10227 && GET_CODE (PATTERN (insn)) != CLOBBER)
10234 s390_sched_init (FILE *file ATTRIBUTE_UNUSED,
10235 int verbose ATTRIBUTE_UNUSED,
10236 int max_ready ATTRIBUTE_UNUSED)
10238 last_scheduled_insn = NULL_RTX;
10241 /* Initialize GCC target structure. */
10243 #undef TARGET_ASM_ALIGNED_HI_OP
10244 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10245 #undef TARGET_ASM_ALIGNED_DI_OP
10246 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
10247 #undef TARGET_ASM_INTEGER
10248 #define TARGET_ASM_INTEGER s390_assemble_integer
10250 #undef TARGET_ASM_OPEN_PAREN
10251 #define TARGET_ASM_OPEN_PAREN ""
10253 #undef TARGET_ASM_CLOSE_PAREN
10254 #define TARGET_ASM_CLOSE_PAREN ""
10256 #undef TARGET_DEFAULT_TARGET_FLAGS
10257 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT | MASK_FUSED_MADD)
10258 #undef TARGET_HANDLE_OPTION
10259 #define TARGET_HANDLE_OPTION s390_handle_option
10261 #undef TARGET_ENCODE_SECTION_INFO
10262 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
10265 #undef TARGET_HAVE_TLS
10266 #define TARGET_HAVE_TLS true
10268 #undef TARGET_CANNOT_FORCE_CONST_MEM
10269 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
10271 #undef TARGET_DELEGITIMIZE_ADDRESS
10272 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
10274 #undef TARGET_LEGITIMIZE_ADDRESS
10275 #define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address
10277 #undef TARGET_RETURN_IN_MEMORY
10278 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory
10280 #undef TARGET_INIT_BUILTINS
10281 #define TARGET_INIT_BUILTINS s390_init_builtins
10282 #undef TARGET_EXPAND_BUILTIN
10283 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
10285 #undef TARGET_ASM_OUTPUT_MI_THUNK
10286 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
10287 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
10288 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
10290 #undef TARGET_SCHED_ADJUST_PRIORITY
10291 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
10292 #undef TARGET_SCHED_ISSUE_RATE
10293 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
10294 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
10295 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD s390_first_cycle_multipass_dfa_lookahead
10297 #undef TARGET_SCHED_VARIABLE_ISSUE
10298 #define TARGET_SCHED_VARIABLE_ISSUE s390_sched_variable_issue
10299 #undef TARGET_SCHED_REORDER
10300 #define TARGET_SCHED_REORDER s390_sched_reorder
10301 #undef TARGET_SCHED_INIT
10302 #define TARGET_SCHED_INIT s390_sched_init
10304 #undef TARGET_CANNOT_COPY_INSN_P
10305 #define TARGET_CANNOT_COPY_INSN_P s390_cannot_copy_insn_p
10306 #undef TARGET_RTX_COSTS
10307 #define TARGET_RTX_COSTS s390_rtx_costs
10308 #undef TARGET_ADDRESS_COST
10309 #define TARGET_ADDRESS_COST s390_address_cost
10311 #undef TARGET_MACHINE_DEPENDENT_REORG
10312 #define TARGET_MACHINE_DEPENDENT_REORG s390_reorg
10314 #undef TARGET_VALID_POINTER_MODE
10315 #define TARGET_VALID_POINTER_MODE s390_valid_pointer_mode
10317 #undef TARGET_BUILD_BUILTIN_VA_LIST
10318 #define TARGET_BUILD_BUILTIN_VA_LIST s390_build_builtin_va_list
10319 #undef TARGET_EXPAND_BUILTIN_VA_START
10320 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start
10321 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
10322 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg
10324 #undef TARGET_PROMOTE_FUNCTION_MODE
10325 #define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode
10326 #undef TARGET_PASS_BY_REFERENCE
10327 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference
10329 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10330 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall
10332 #undef TARGET_FIXED_CONDITION_CODE_REGS
10333 #define TARGET_FIXED_CONDITION_CODE_REGS s390_fixed_condition_code_regs
10335 #undef TARGET_CC_MODES_COMPATIBLE
10336 #define TARGET_CC_MODES_COMPATIBLE s390_cc_modes_compatible
10338 #undef TARGET_INVALID_WITHIN_DOLOOP
10339 #define TARGET_INVALID_WITHIN_DOLOOP hook_constcharptr_const_rtx_null
10342 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
10343 #define TARGET_ASM_OUTPUT_DWARF_DTPREL s390_output_dwarf_dtprel
10346 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
10347 #undef TARGET_MANGLE_TYPE
10348 #define TARGET_MANGLE_TYPE s390_mangle_type
10351 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10352 #define TARGET_SCALAR_MODE_SUPPORTED_P s390_scalar_mode_supported_p
10354 #undef TARGET_SECONDARY_RELOAD
10355 #define TARGET_SECONDARY_RELOAD s390_secondary_reload
10357 #undef TARGET_LIBGCC_CMP_RETURN_MODE
10358 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode
10360 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE
10361 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode
10363 #undef TARGET_LEGITIMATE_ADDRESS_P
10364 #define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p
10366 #undef TARGET_CAN_ELIMINATE
10367 #define TARGET_CAN_ELIMINATE s390_can_eliminate
10369 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
10370 #define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template
10371 #undef TARGET_TRAMPOLINE_INIT
10372 #define TARGET_TRAMPOLINE_INIT s390_trampoline_init
10374 struct gcc_target targetm = TARGET_INITIALIZER;
10376 #include "gt-s390.h"