1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
42 #include "basic-block.h"
43 #include "integrate.h"
46 #include "target-def.h"
48 #include "langhooks.h"
51 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
52 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
53 static int s390_adjust_priority PARAMS ((rtx, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
57 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT, tree));
59 #undef TARGET_ASM_ALIGNED_HI_OP
60 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
61 #undef TARGET_ASM_ALIGNED_DI_OP
62 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
63 #undef TARGET_ASM_INTEGER
64 #define TARGET_ASM_INTEGER s390_assemble_integer
66 #undef TARGET_ASM_OPEN_PAREN
67 #define TARGET_ASM_OPEN_PAREN ""
69 #undef TARGET_ASM_CLOSE_PAREN
70 #define TARGET_ASM_CLOSE_PAREN ""
72 #undef TARGET_ASM_SELECT_RTX_SECTION
73 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
75 #undef TARGET_SCHED_ADJUST_COST
76 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
78 #undef TARGET_SCHED_ADJUST_PRIORITY
79 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
81 #undef TARGET_ENCODE_SECTION_INFO
82 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
84 #undef TARGET_ASM_OUTPUT_MI_THUNK
85 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
87 struct gcc_target targetm = TARGET_INITIALIZER;
89 extern int reload_completed;
91 /* The alias set for prologue/epilogue register save/restore. */
92 static int s390_sr_alias_set = 0;
94 /* Save information from a "cmpxx" operation until the branch or scc is
96 rtx s390_compare_op0, s390_compare_op1;
98 /* Structure used to hold the components of a S/390 memory
99 address. A legitimate address on S/390 is of the general
101 base + index + displacement
102 where any of the components is optional.
104 base and index are registers of the class ADDR_REGS,
105 displacement is an unsigned 12-bit immediate constant. */
115 /* Structure containing information for prologue and epilogue. */
122 int first_restore_gpr;
124 int arg_frame_offset;
126 HOST_WIDE_INT frame_size;
129 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
130 static int s390_branch_condition_mask PARAMS ((rtx));
131 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
132 static int check_mode PARAMS ((rtx, enum machine_mode *));
133 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
134 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
135 static int reg_used_in_mem_p PARAMS ((int, rtx));
136 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
137 static int s390_split_branches PARAMS ((rtx, bool *));
138 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
139 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
140 static int find_base_register_in_addr PARAMS ((struct s390_address *));
141 static bool find_base_register_ref PARAMS ((rtx));
142 static void replace_base_register_ref PARAMS ((rtx *, rtx));
143 static void s390_optimize_prolog PARAMS ((int));
144 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
145 static int find_unused_clobbered_reg PARAMS ((void));
146 static void s390_frame_info PARAMS ((struct s390_frame *));
147 static rtx save_fpr PARAMS ((rtx, int, int));
148 static rtx restore_fpr PARAMS ((rtx, int, int));
149 static rtx save_gprs PARAMS ((rtx, int, int, int));
150 static rtx restore_gprs PARAMS ((rtx, int, int, int));
151 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
154 /* Return true if SET either doesn't set the CC register, or else
155 the source and destination have matching CC modes and that
156 CC mode is at least as constrained as REQ_MODE. */
159 s390_match_ccmode_set (set, req_mode)
161 enum machine_mode req_mode;
163 enum machine_mode set_mode;
165 if (GET_CODE (set) != SET)
168 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
171 set_mode = GET_MODE (SET_DEST (set));
184 if (req_mode != set_mode)
189 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
190 && req_mode != CCSRmode && req_mode != CCURmode)
196 if (req_mode != CCAmode)
204 return (GET_MODE (SET_SRC (set)) == set_mode);
207 /* Return true if every SET in INSN that sets the CC register
208 has source and destination with matching CC modes and that
209 CC mode is at least as constrained as REQ_MODE.
210 If REQ_MODE is VOIDmode, always return false. */
213 s390_match_ccmode (insn, req_mode)
215 enum machine_mode req_mode;
219 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
220 if (req_mode == VOIDmode)
223 if (GET_CODE (PATTERN (insn)) == SET)
224 return s390_match_ccmode_set (PATTERN (insn), req_mode);
226 if (GET_CODE (PATTERN (insn)) == PARALLEL)
227 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
229 rtx set = XVECEXP (PATTERN (insn), 0, i);
230 if (GET_CODE (set) == SET)
231 if (!s390_match_ccmode_set (set, req_mode))
238 /* If a test-under-mask instruction can be used to implement
239 (compare (and ... OP1) OP2), return the CC mode required
240 to do that. Otherwise, return VOIDmode.
241 MIXED is true if the instruction can distinguish between
242 CC1 and CC2 for mixed selected bits (TMxx), it is false
243 if the instruction cannot (TM). */
246 s390_tm_ccmode (op1, op2, mixed)
253 /* ??? Fixme: should work on CONST_DOUBLE as well. */
254 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
257 /* Selected bits all zero: CC0. */
258 if (INTVAL (op2) == 0)
261 /* Selected bits all one: CC3. */
262 if (INTVAL (op2) == INTVAL (op1))
265 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
268 bit1 = exact_log2 (INTVAL (op2));
269 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
270 if (bit0 != -1 && bit1 != -1)
271 return bit0 > bit1 ? CCT1mode : CCT2mode;
277 /* Given a comparison code OP (EQ, NE, etc.) and the operands
278 OP0 and OP1 of a COMPARE, return the mode to be used for the
282 s390_select_ccmode (code, op0, op1)
291 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
292 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
294 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
295 || GET_CODE (op1) == NEG)
298 if (GET_CODE (op0) == AND)
300 /* Check whether we can potentially do it via TM. */
301 enum machine_mode ccmode;
302 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
303 if (ccmode != VOIDmode)
305 /* Relax CCTmode to CCZmode to allow fall-back to AND
306 if that turns out to be beneficial. */
307 return ccmode == CCTmode ? CCZmode : ccmode;
311 if (register_operand (op0, HImode)
312 && GET_CODE (op1) == CONST_INT
313 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
315 if (register_operand (op0, QImode)
316 && GET_CODE (op1) == CONST_INT
317 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
326 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
327 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
329 if (INTVAL (XEXP((op0), 1)) < 0)
342 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
343 && GET_CODE (op1) != CONST_INT)
349 if (GET_CODE (op0) == PLUS)
352 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
353 && GET_CODE (op1) != CONST_INT)
359 if (GET_CODE (op0) == MINUS)
362 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
363 && GET_CODE (op1) != CONST_INT)
372 /* Return branch condition mask to implement a branch
373 specified by CODE. */
376 s390_branch_condition_mask (code)
379 const int CC0 = 1 << 3;
380 const int CC1 = 1 << 2;
381 const int CC2 = 1 << 1;
382 const int CC3 = 1 << 0;
384 if (GET_CODE (XEXP (code, 0)) != REG
385 || REGNO (XEXP (code, 0)) != CC_REGNUM
386 || XEXP (code, 1) != const0_rtx)
389 switch (GET_MODE (XEXP (code, 0)))
392 switch (GET_CODE (code))
395 case NE: return CC1 | CC2 | CC3;
402 switch (GET_CODE (code))
405 case NE: return CC0 | CC2 | CC3;
412 switch (GET_CODE (code))
415 case NE: return CC0 | CC1 | CC3;
422 switch (GET_CODE (code))
425 case NE: return CC0 | CC1 | CC2;
432 switch (GET_CODE (code))
434 case EQ: return CC0 | CC2;
435 case NE: return CC1 | CC3;
442 switch (GET_CODE (code))
444 case LTU: return CC2 | CC3; /* carry */
445 case GEU: return CC0 | CC1; /* no carry */
452 switch (GET_CODE (code))
454 case GTU: return CC0 | CC1; /* borrow */
455 case LEU: return CC2 | CC3; /* no borrow */
462 switch (GET_CODE (code))
465 case NE: return CC1 | CC2 | CC3;
466 case LTU: return CC1;
467 case GTU: return CC2;
468 case LEU: return CC0 | CC1;
469 case GEU: return CC0 | CC2;
476 switch (GET_CODE (code))
479 case NE: return CC2 | CC1 | CC3;
480 case LTU: return CC2;
481 case GTU: return CC1;
482 case LEU: return CC0 | CC2;
483 case GEU: return CC0 | CC1;
490 switch (GET_CODE (code))
493 case NE: return CC1 | CC2 | CC3;
494 case LT: return CC1 | CC3;
496 case LE: return CC0 | CC1 | CC3;
497 case GE: return CC0 | CC2;
504 switch (GET_CODE (code))
507 case NE: return CC1 | CC2 | CC3;
509 case GT: return CC2 | CC3;
510 case LE: return CC0 | CC1;
511 case GE: return CC0 | CC2 | CC3;
518 switch (GET_CODE (code))
521 case NE: return CC1 | CC2 | CC3;
524 case LE: return CC0 | CC1;
525 case GE: return CC0 | CC2;
526 case UNORDERED: return CC3;
527 case ORDERED: return CC0 | CC1 | CC2;
528 case UNEQ: return CC0 | CC3;
529 case UNLT: return CC1 | CC3;
530 case UNGT: return CC2 | CC3;
531 case UNLE: return CC0 | CC1 | CC3;
532 case UNGE: return CC0 | CC2 | CC3;
533 case LTGT: return CC1 | CC2;
540 switch (GET_CODE (code))
543 case NE: return CC2 | CC1 | CC3;
546 case LE: return CC0 | CC2;
547 case GE: return CC0 | CC1;
548 case UNORDERED: return CC3;
549 case ORDERED: return CC0 | CC2 | CC1;
550 case UNEQ: return CC0 | CC3;
551 case UNLT: return CC2 | CC3;
552 case UNGT: return CC1 | CC3;
553 case UNLE: return CC0 | CC2 | CC3;
554 case UNGE: return CC0 | CC1 | CC3;
555 case LTGT: return CC2 | CC1;
566 /* If INV is false, return assembler mnemonic string to implement
567 a branch specified by CODE. If INV is true, return mnemonic
568 for the corresponding inverted branch. */
571 s390_branch_condition_mnemonic (code, inv)
575 static const char *const mnemonic[16] =
577 NULL, "o", "h", "nle",
578 "l", "nhe", "lh", "ne",
579 "e", "nlh", "he", "nl",
580 "le", "nh", "no", NULL
583 int mask = s390_branch_condition_mask (code);
588 if (mask < 1 || mask > 14)
591 return mnemonic[mask];
594 /* If OP is an integer constant of mode MODE with exactly one
595 HImode subpart unequal to DEF, return the number of that
596 subpart. As a special case, all HImode subparts of OP are
597 equal to DEF, return zero. Otherwise, return -1. */
600 s390_single_hi (op, mode, def)
602 enum machine_mode mode;
605 if (GET_CODE (op) == CONST_INT)
607 unsigned HOST_WIDE_INT value;
608 int n_parts = GET_MODE_SIZE (mode) / 2;
611 for (i = 0; i < n_parts; i++)
614 value = (unsigned HOST_WIDE_INT) INTVAL (op);
618 if ((value & 0xffff) != (unsigned)(def & 0xffff))
627 return part == -1 ? 0 : (n_parts - 1 - part);
630 else if (GET_CODE (op) == CONST_DOUBLE
631 && GET_MODE (op) == VOIDmode)
633 unsigned HOST_WIDE_INT value;
634 int n_parts = GET_MODE_SIZE (mode) / 2;
637 for (i = 0; i < n_parts; i++)
640 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
641 else if (i == HOST_BITS_PER_WIDE_INT / 16)
642 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
646 if ((value & 0xffff) != (unsigned)(def & 0xffff))
655 return part == -1 ? 0 : (n_parts - 1 - part);
661 /* Extract the HImode part number PART from integer
662 constant OP of mode MODE. */
665 s390_extract_hi (op, mode, part)
667 enum machine_mode mode;
670 int n_parts = GET_MODE_SIZE (mode) / 2;
671 if (part < 0 || part >= n_parts)
674 part = n_parts - 1 - part;
676 if (GET_CODE (op) == CONST_INT)
678 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
679 return ((value >> (16 * part)) & 0xffff);
681 else if (GET_CODE (op) == CONST_DOUBLE
682 && GET_MODE (op) == VOIDmode)
684 unsigned HOST_WIDE_INT value;
685 if (part < HOST_BITS_PER_WIDE_INT / 16)
686 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
688 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
689 part -= HOST_BITS_PER_WIDE_INT / 16;
691 return ((value >> (16 * part)) & 0xffff);
697 /* If OP is an integer constant of mode MODE with exactly one
698 QImode subpart unequal to DEF, return the number of that
699 subpart. As a special case, all QImode subparts of OP are
700 equal to DEF, return zero. Otherwise, return -1. */
703 s390_single_qi (op, mode, def)
705 enum machine_mode mode;
708 if (GET_CODE (op) == CONST_INT)
710 unsigned HOST_WIDE_INT value;
711 int n_parts = GET_MODE_SIZE (mode);
714 for (i = 0; i < n_parts; i++)
717 value = (unsigned HOST_WIDE_INT) INTVAL (op);
721 if ((value & 0xff) != (unsigned)(def & 0xff))
730 return part == -1 ? 0 : (n_parts - 1 - part);
733 else if (GET_CODE (op) == CONST_DOUBLE
734 && GET_MODE (op) == VOIDmode)
736 unsigned HOST_WIDE_INT value;
737 int n_parts = GET_MODE_SIZE (mode);
740 for (i = 0; i < n_parts; i++)
743 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
744 else if (i == HOST_BITS_PER_WIDE_INT / 8)
745 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
749 if ((value & 0xff) != (unsigned)(def & 0xff))
758 return part == -1 ? 0 : (n_parts - 1 - part);
764 /* Extract the QImode part number PART from integer
765 constant OP of mode MODE. */
768 s390_extract_qi (op, mode, part)
770 enum machine_mode mode;
773 int n_parts = GET_MODE_SIZE (mode);
774 if (part < 0 || part >= n_parts)
777 part = n_parts - 1 - part;
779 if (GET_CODE (op) == CONST_INT)
781 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
782 return ((value >> (8 * part)) & 0xff);
784 else if (GET_CODE (op) == CONST_DOUBLE
785 && GET_MODE (op) == VOIDmode)
787 unsigned HOST_WIDE_INT value;
788 if (part < HOST_BITS_PER_WIDE_INT / 8)
789 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
791 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
792 part -= HOST_BITS_PER_WIDE_INT / 8;
794 return ((value >> (8 * part)) & 0xff);
801 /* Change optimizations to be performed, depending on the
804 LEVEL is the optimization level specified; 2 if `-O2' is
805 specified, 1 if `-O' is specified, and 0 if neither is specified.
807 SIZE is nonzero if `-Os' is specified and zero otherwise. */
810 optimization_options (level, size)
811 int level ATTRIBUTE_UNUSED;
812 int size ATTRIBUTE_UNUSED;
819 /* Acquire a unique set number for our register saves and restores. */
820 s390_sr_alias_set = new_alias_set ();
824 /* Map for smallest class containing reg regno. */
826 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
827 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
828 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
829 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
830 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
831 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
832 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
833 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
834 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
835 ADDR_REGS, NO_REGS, ADDR_REGS
839 /* Return true if OP a (const_int 0) operand.
840 OP is the current operation.
841 MODE is the current operation mode. */
844 const0_operand (op, mode)
846 enum machine_mode mode;
848 return op == CONST0_RTX (mode);
851 /* Return true if OP is constant.
852 OP is the current operation.
853 MODE is the current operation mode. */
856 consttable_operand (op, mode)
858 enum machine_mode mode ATTRIBUTE_UNUSED;
860 return CONSTANT_P (op);
863 /* Return true if the mode of operand OP matches MODE.
864 If MODE is set to VOIDmode, set it to the mode of OP. */
867 check_mode (op, mode)
869 enum machine_mode *mode;
871 if (*mode == VOIDmode)
872 *mode = GET_MODE (op);
875 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
881 /* Return true if OP a valid operand for the LARL instruction.
882 OP is the current operation.
883 MODE is the current operation mode. */
886 larl_operand (op, mode)
888 enum machine_mode mode;
890 if (! check_mode (op, &mode))
893 /* Allow labels and local symbols. */
894 if (GET_CODE (op) == LABEL_REF)
896 if (GET_CODE (op) == SYMBOL_REF
897 && (!flag_pic || SYMBOL_REF_FLAG (op)
898 || CONSTANT_POOL_ADDRESS_P (op)))
901 /* Everything else must have a CONST, so strip it. */
902 if (GET_CODE (op) != CONST)
906 /* Allow adding *even* constants. */
907 if (GET_CODE (op) == PLUS)
909 if (GET_CODE (XEXP (op, 1)) != CONST_INT
910 || (INTVAL (XEXP (op, 1)) & 1) != 0)
915 /* Labels and local symbols allowed here as well. */
916 if (GET_CODE (op) == LABEL_REF)
918 if (GET_CODE (op) == SYMBOL_REF
919 && (!flag_pic || SYMBOL_REF_FLAG (op)
920 || CONSTANT_POOL_ADDRESS_P (op)))
923 /* Now we must have a @GOTENT offset or @PLT stub. */
924 if (GET_CODE (op) == UNSPEC
925 && XINT (op, 1) == 111)
927 if (GET_CODE (op) == UNSPEC
928 && XINT (op, 1) == 113)
934 /* Return true if OP is a valid FP-Register.
935 OP is the current operation.
936 MODE is the current operation mode. */
939 fp_operand (op, mode)
941 enum machine_mode mode;
943 register enum rtx_code code = GET_CODE (op);
944 if (! check_mode (op, &mode))
946 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
952 /* Helper routine to implement s_operand and s_imm_operand.
953 OP is the current operation.
954 MODE is the current operation mode.
955 ALLOW_IMMEDIATE specifies whether immediate operands should
956 be accepted or not. */
959 general_s_operand (op, mode, allow_immediate)
961 enum machine_mode mode;
964 struct s390_address addr;
966 /* Call general_operand first, so that we don't have to
967 check for many special cases. */
968 if (!general_operand (op, mode))
971 /* Just like memory_operand, allow (subreg (mem ...))
974 && GET_CODE (op) == SUBREG
975 && GET_CODE (SUBREG_REG (op)) == MEM)
976 op = SUBREG_REG (op);
978 switch (GET_CODE (op))
980 /* Constants that we are sure will be forced to the
981 literal pool in reload are OK as s-operand. Note
982 that we cannot call s390_preferred_reload_class here
983 because it might not be known yet at this point
984 whether the current function is a leaf or not. */
987 if (!allow_immediate || reload_completed)
989 if (!legitimate_reload_constant_p (op))
995 /* Memory operands are OK unless they already use an
998 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1000 if (s390_decompose_address (XEXP (op, 0), &addr)
1012 /* Return true if OP is a valid S-type operand.
1013 OP is the current operation.
1014 MODE is the current operation mode. */
1017 s_operand (op, mode)
1019 enum machine_mode mode;
1021 return general_s_operand (op, mode, 0);
1024 /* Return true if OP is a valid S-type operand or an immediate
1025 operand that can be addressed as S-type operand by forcing
1026 it into the literal pool.
1027 OP is the current operation.
1028 MODE is the current operation mode. */
1031 s_imm_operand (op, mode)
1033 enum machine_mode mode;
1035 return general_s_operand (op, mode, 1);
1038 /* Return true if OP is a valid operand for a 'Q' constraint.
1039 This differs from s_operand in that only memory operands
1040 without index register are accepted, nothing else. */
1046 struct s390_address addr;
1048 if (GET_CODE (op) != MEM)
1051 if (!s390_decompose_address (XEXP (op, 0), &addr))
1060 /* Return the cost of an address rtx ADDR. */
1063 s390_address_cost (addr)
1066 struct s390_address ad;
1067 if (!s390_decompose_address (addr, &ad))
1070 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1073 /* Return true if OP is a valid operand for the BRAS instruction.
1074 OP is the current operation.
1075 MODE is the current operation mode. */
1078 bras_sym_operand (op, mode)
1080 enum machine_mode mode ATTRIBUTE_UNUSED;
1082 register enum rtx_code code = GET_CODE (op);
1084 /* Allow SYMBOL_REFs. */
1085 if (code == SYMBOL_REF)
1088 /* Allow @PLT stubs. */
1090 && GET_CODE (XEXP (op, 0)) == UNSPEC
1091 && XINT (XEXP (op, 0), 1) == 113)
1097 /* Return true if OP is a load multiple operation. It is known to be a
1098 PARALLEL and the first section will be tested.
1099 OP is the current operation.
1100 MODE is the current operation mode. */
1103 load_multiple_operation (op, mode)
1105 enum machine_mode mode ATTRIBUTE_UNUSED;
1107 int count = XVECLEN (op, 0);
1108 unsigned int dest_regno;
1113 /* Perform a quick check so we don't blow up below. */
1115 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1116 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1117 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1120 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1121 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1123 /* Check, is base, or base + displacement. */
1125 if (GET_CODE (src_addr) == REG)
1127 else if (GET_CODE (src_addr) == PLUS
1128 && GET_CODE (XEXP (src_addr, 0)) == REG
1129 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1131 off = INTVAL (XEXP (src_addr, 1));
1132 src_addr = XEXP (src_addr, 0);
1137 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1140 for (i = 1; i < count; i++)
1142 rtx elt = XVECEXP (op, 0, i);
1144 if (GET_CODE (elt) != SET
1145 || GET_CODE (SET_DEST (elt)) != REG
1146 || GET_MODE (SET_DEST (elt)) != Pmode
1147 || REGNO (SET_DEST (elt)) != dest_regno + i
1148 || GET_CODE (SET_SRC (elt)) != MEM
1149 || GET_MODE (SET_SRC (elt)) != Pmode
1150 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1151 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1152 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1153 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1154 != off + i * UNITS_PER_WORD)
1161 /* Return true if OP is a store multiple operation. It is known to be a
1162 PARALLEL and the first section will be tested.
1163 OP is the current operation.
1164 MODE is the current operation mode. */
1167 store_multiple_operation (op, mode)
1169 enum machine_mode mode ATTRIBUTE_UNUSED;
1171 int count = XVECLEN (op, 0);
1172 unsigned int src_regno;
1176 /* Perform a quick check so we don't blow up below. */
1178 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1179 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1180 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1183 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1184 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1186 /* Check, is base, or base + displacement. */
1188 if (GET_CODE (dest_addr) == REG)
1190 else if (GET_CODE (dest_addr) == PLUS
1191 && GET_CODE (XEXP (dest_addr, 0)) == REG
1192 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1194 off = INTVAL (XEXP (dest_addr, 1));
1195 dest_addr = XEXP (dest_addr, 0);
1200 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1203 for (i = 1; i < count; i++)
1205 rtx elt = XVECEXP (op, 0, i);
1207 if (GET_CODE (elt) != SET
1208 || GET_CODE (SET_SRC (elt)) != REG
1209 || GET_MODE (SET_SRC (elt)) != Pmode
1210 || REGNO (SET_SRC (elt)) != src_regno + i
1211 || GET_CODE (SET_DEST (elt)) != MEM
1212 || GET_MODE (SET_DEST (elt)) != Pmode
1213 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1214 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1215 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1216 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1217 != off + i * UNITS_PER_WORD)
1224 /* Return true if OP contains a symbol reference */
1227 symbolic_reference_mentioned_p (op)
1230 register const char *fmt;
1233 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1236 fmt = GET_RTX_FORMAT (GET_CODE (op));
1237 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1243 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1244 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1248 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1256 /* Return true if OP is a legitimate general operand when
1257 generating PIC code. It is given that flag_pic is on
1258 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1261 legitimate_pic_operand_p (op)
1264 /* Accept all non-symbolic constants. */
1265 if (!SYMBOLIC_CONST (op))
1268 /* Reject everything else; must be handled
1269 via emit_pic_move. */
1273 /* Returns true if the constant value OP is a legitimate general operand.
1274 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1277 legitimate_constant_p (op)
1280 /* Accept all non-symbolic constants. */
1281 if (!SYMBOLIC_CONST (op))
1284 /* In the PIC case, symbolic constants must *not* be
1285 forced into the literal pool. We accept them here,
1286 so that they will be handled by emit_pic_move. */
1290 /* Even in the non-PIC case, we can accept immediate
1291 LARL operands here. */
1293 return larl_operand (op, VOIDmode);
1295 /* All remaining non-PIC symbolic constants are
1296 forced into the literal pool. */
1300 /* Returns true if the constant value OP is a legitimate general
1301 operand during and after reload. The difference to
1302 legitimate_constant_p is that this function will not accept
1303 a constant that would need to be forced to the literal pool
1304 before it can be used as operand. */
1307 legitimate_reload_constant_p (op)
1310 /* Accept l(g)hi operands. */
1311 if (GET_CODE (op) == CONST_INT
1312 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1315 /* Accept lliXX operands. */
1317 && s390_single_hi (op, DImode, 0) >= 0)
1320 /* Accept larl operands. */
1322 && larl_operand (op, VOIDmode))
1325 /* Everything else cannot be handled without reload. */
1329 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1330 return the class of reg to actually use. */
1333 s390_preferred_reload_class (op, class)
1335 enum reg_class class;
1337 /* This can happen if a floating point constant is being
1338 reloaded into an integer register. Leave well alone. */
1339 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1340 && class != FP_REGS)
1343 switch (GET_CODE (op))
1345 /* Constants we cannot reload must be forced into the
1346 literal pool. For constants we *could* handle directly,
1347 it might still be preferable to put them in the pool and
1348 use a memory-to-memory instruction.
1350 However, try to avoid needlessly allocating a literal
1351 pool in a routine that wouldn't otherwise need any.
1352 Heuristically, we assume that 64-bit leaf functions
1353 typically don't need a literal pool, all others do. */
1356 if (!legitimate_reload_constant_p (op))
1359 if (TARGET_64BIT && current_function_is_leaf)
1364 /* If a symbolic constant or a PLUS is reloaded,
1365 it is most likely being used as an address, so
1366 prefer ADDR_REGS. If 'class' is not a superset
1367 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1372 if (reg_class_subset_p (ADDR_REGS, class))
1384 /* Return the register class of a scratch register needed to
1385 load IN into a register of class CLASS in MODE.
1387 We need a temporary when loading a PLUS expression which
1388 is not a legitimate operand of the LOAD ADDRESS instruction. */
1391 s390_secondary_input_reload_class (class, mode, in)
1392 enum reg_class class ATTRIBUTE_UNUSED;
1393 enum machine_mode mode;
1396 if (s390_plus_operand (in, mode))
1402 /* Return true if OP is a PLUS that is not a legitimate
1403 operand for the LA instruction.
1404 OP is the current operation.
1405 MODE is the current operation mode. */
1408 s390_plus_operand (op, mode)
1410 enum machine_mode mode;
1412 if (!check_mode (op, &mode) || mode != Pmode)
1415 if (GET_CODE (op) != PLUS)
1418 if (legitimate_la_operand_p (op))
1424 /* Generate code to load SRC, which is PLUS that is not a
1425 legitimate operand for the LA instruction, into TARGET.
1426 SCRATCH may be used as scratch register. */
1429 s390_expand_plus_operand (target, src, scratch)
1430 register rtx target;
1432 register rtx scratch;
1435 struct s390_address ad;
1437 /* src must be a PLUS; get its two operands. */
1438 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1441 /* Check if any of the two operands is already scheduled
1442 for replacement by reload. This can happen e.g. when
1443 float registers occur in an address. */
1444 sum1 = find_replacement (&XEXP (src, 0));
1445 sum2 = find_replacement (&XEXP (src, 1));
1446 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1448 /* If the address is already strictly valid, there's nothing to do. */
1449 if (!s390_decompose_address (src, &ad)
1450 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1451 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1453 /* Otherwise, one of the operands cannot be an address register;
1454 we reload its value into the scratch register. */
1455 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1457 emit_move_insn (scratch, sum1);
1460 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1462 emit_move_insn (scratch, sum2);
1466 /* According to the way these invalid addresses are generated
1467 in reload.c, it should never happen (at least on s390) that
1468 *neither* of the PLUS components, after find_replacements
1469 was applied, is an address register. */
1470 if (sum1 == scratch && sum2 == scratch)
1476 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1479 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1480 is only ever performed on addresses, so we can mark the
1481 sum as legitimate for LA in any case. */
1482 s390_load_address (target, src);
1486 /* Decompose a RTL expression ADDR for a memory address into
1487 its components, returned in OUT.
1489 Returns 0 if ADDR is not a valid memory address, nonzero
1490 otherwise. If OUT is NULL, don't return the components,
1491 but check for validity only.
1493 Note: Only addresses in canonical form are recognized.
1494 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1495 canonical form so that they will be recognized. */
1498 s390_decompose_address (addr, out)
1500 struct s390_address *out;
1502 rtx base = NULL_RTX;
1503 rtx indx = NULL_RTX;
1504 rtx disp = NULL_RTX;
1505 int pointer = FALSE;
1507 /* Decompose address into base + index + displacement. */
1509 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1512 else if (GET_CODE (addr) == PLUS)
1514 rtx op0 = XEXP (addr, 0);
1515 rtx op1 = XEXP (addr, 1);
1516 enum rtx_code code0 = GET_CODE (op0);
1517 enum rtx_code code1 = GET_CODE (op1);
1519 if (code0 == REG || code0 == UNSPEC)
1521 if (code1 == REG || code1 == UNSPEC)
1523 indx = op0; /* index + base */
1529 base = op0; /* base + displacement */
1534 else if (code0 == PLUS)
1536 indx = XEXP (op0, 0); /* index + base + disp */
1537 base = XEXP (op0, 1);
1548 disp = addr; /* displacement */
1551 /* Validate base register. */
1554 if (GET_CODE (base) == UNSPEC)
1556 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1558 base = XVECEXP (base, 0, 0);
1562 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1565 if (REGNO (base) == BASE_REGISTER
1566 || REGNO (base) == STACK_POINTER_REGNUM
1567 || REGNO (base) == FRAME_POINTER_REGNUM
1568 || ((reload_completed || reload_in_progress)
1569 && frame_pointer_needed
1570 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1571 || REGNO (base) == ARG_POINTER_REGNUM
1572 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1573 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1575 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1579 /* Validate index register. */
1582 if (GET_CODE (indx) == UNSPEC)
1584 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1586 indx = XVECEXP (indx, 0, 0);
1590 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1593 if (REGNO (indx) == BASE_REGISTER
1594 || REGNO (indx) == STACK_POINTER_REGNUM
1595 || REGNO (indx) == FRAME_POINTER_REGNUM
1596 || ((reload_completed || reload_in_progress)
1597 && frame_pointer_needed
1598 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1599 || REGNO (indx) == ARG_POINTER_REGNUM
1600 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1601 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1603 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1607 /* Validate displacement. */
1610 /* Allow integer constant in range. */
1611 if (GET_CODE (disp) == CONST_INT)
1613 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1617 /* In the small-PIC case, the linker converts @GOT12
1618 offsets to possible displacements. */
1619 else if (GET_CODE (disp) == CONST
1620 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1621 && XINT (XEXP (disp, 0), 1) == 110)
1629 /* Accept chunkfied literal pool symbol references. */
1630 else if (GET_CODE (disp) == CONST
1631 && GET_CODE (XEXP (disp, 0)) == MINUS
1632 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1633 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1638 /* Likewise if a constant offset is present. */
1639 else if (GET_CODE (disp) == CONST
1640 && GET_CODE (XEXP (disp, 0)) == PLUS
1641 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1642 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1643 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1644 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1649 /* We can convert literal pool addresses to
1650 displacements by basing them off the base register. */
1653 /* In some cases, we can accept an additional
1654 small constant offset. Split these off here. */
1656 unsigned int offset = 0;
1658 if (GET_CODE (disp) == CONST
1659 && GET_CODE (XEXP (disp, 0)) == PLUS
1660 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1662 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1663 disp = XEXP (XEXP (disp, 0), 0);
1666 /* Now we must have a literal pool address. */
1667 if (GET_CODE (disp) != SYMBOL_REF
1668 || !CONSTANT_POOL_ADDRESS_P (disp))
1671 /* In 64-bit PIC mode we cannot accept symbolic
1672 constants in the constant pool. */
1673 if (TARGET_64BIT && flag_pic
1674 && SYMBOLIC_CONST (get_pool_constant (disp)))
1677 /* If we have an offset, make sure it does not
1678 exceed the size of the constant pool entry. */
1679 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1682 /* Either base or index must be free to
1683 hold the base register. */
1687 /* Convert the address. */
1689 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1691 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1693 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1694 disp = gen_rtx_CONST (Pmode, disp);
1697 disp = plus_constant (disp, offset);
1711 out->pointer = pointer;
1717 /* Return nonzero if ADDR is a valid memory address.
1718 STRICT specifies whether strict register checking applies. */
1721 legitimate_address_p (mode, addr, strict)
1722 enum machine_mode mode ATTRIBUTE_UNUSED;
1726 struct s390_address ad;
1727 if (!s390_decompose_address (addr, &ad))
1732 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1734 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1739 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1741 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1748 /* Return 1 if OP is a valid operand for the LA instruction.
1749 In 31-bit, we need to prove that the result is used as an
1750 address, as LA performs only a 31-bit addition. */
1753 legitimate_la_operand_p (op)
1756 struct s390_address addr;
1757 if (!s390_decompose_address (op, &addr))
1760 if (TARGET_64BIT || addr.pointer)
1766 /* Return 1 if OP is a valid operand for the LA instruction,
1767 and we prefer to use LA over addition to compute it.
1768 If STRICT is true, only accept operands that will never
1769 change to something we cannot recognize as preferred. */
1772 preferred_la_operand_p (op, strict)
1776 struct s390_address addr;
1777 if (!s390_decompose_address (op, &addr))
1780 if (!TARGET_64BIT && !addr.pointer)
1787 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
1788 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
1794 /* Emit a forced load-address operation to load SRC into DST.
1795 This will use the LOAD ADDRESS instruction even in situations
1796 where legitimate_la_operand_p (SRC) returns false. */
1799 s390_load_address (dst, src)
1804 emit_move_insn (dst, src);
1806 emit_insn (gen_force_la_31 (dst, src));
1809 /* Return a legitimate reference for ORIG (an address) using the
1810 register REG. If REG is 0, a new pseudo is generated.
1812 There are two types of references that must be handled:
1814 1. Global data references must load the address from the GOT, via
1815 the PIC reg. An insn is emitted to do this load, and the reg is
1818 2. Static data references, constant pool addresses, and code labels
1819 compute the address as an offset from the GOT, whose base is in
1820 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1821 differentiate them from global data objects. The returned
1822 address is the PIC reg + an unspec constant.
1824 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1825 reg also appears in the address. */
1828 legitimize_pic_address (orig, reg)
1836 if (GET_CODE (addr) == LABEL_REF
1837 || (GET_CODE (addr) == SYMBOL_REF
1838 && (SYMBOL_REF_FLAG (addr)
1839 || CONSTANT_POOL_ADDRESS_P (addr))))
1841 /* This is a local symbol. */
1844 /* Access local symbols PC-relative via LARL.
1845 This is the same as in the non-PIC case, so it is
1846 handled automatically ... */
1850 /* Access local symbols relative to the literal pool. */
1852 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1854 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1855 addr = gen_rtx_CONST (SImode, addr);
1856 addr = force_const_mem (SImode, addr);
1857 emit_move_insn (temp, addr);
1859 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1860 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1861 new = gen_rtx_PLUS (Pmode, base, temp);
1865 emit_move_insn (reg, new);
1870 else if (GET_CODE (addr) == SYMBOL_REF)
1873 reg = gen_reg_rtx (Pmode);
1877 /* Assume GOT offset < 4k. This is handled the same way
1878 in both 31- and 64-bit code (@GOT12). */
1880 if (reload_in_progress || reload_completed)
1881 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1883 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1884 new = gen_rtx_CONST (Pmode, new);
1885 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1886 new = gen_rtx_MEM (Pmode, new);
1887 RTX_UNCHANGING_P (new) = 1;
1888 emit_move_insn (reg, new);
1891 else if (TARGET_64BIT)
1893 /* If the GOT offset might be >= 4k, we determine the position
1894 of the GOT entry via a PC-relative LARL (@GOTENT). */
1896 rtx temp = gen_reg_rtx (Pmode);
1898 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1899 new = gen_rtx_CONST (Pmode, new);
1900 emit_move_insn (temp, new);
1902 new = gen_rtx_MEM (Pmode, temp);
1903 RTX_UNCHANGING_P (new) = 1;
1904 emit_move_insn (reg, new);
1909 /* If the GOT offset might be >= 4k, we have to load it
1910 from the literal pool (@GOT). */
1912 rtx temp = gen_reg_rtx (Pmode);
1914 if (reload_in_progress || reload_completed)
1915 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1917 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1918 addr = gen_rtx_CONST (SImode, addr);
1919 addr = force_const_mem (SImode, addr);
1920 emit_move_insn (temp, addr);
1922 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1923 new = gen_rtx_MEM (Pmode, new);
1924 RTX_UNCHANGING_P (new) = 1;
1925 emit_move_insn (reg, new);
1931 if (GET_CODE (addr) == CONST)
1933 addr = XEXP (addr, 0);
1934 if (GET_CODE (addr) == UNSPEC)
1936 if (XVECLEN (addr, 0) != 1)
1938 switch (XINT (addr, 1))
1940 /* If someone moved an @GOT or lt-relative UNSPEC
1941 out of the literal pool, force them back in. */
1945 new = force_const_mem (SImode, orig);
1948 /* @GOTENT is OK as is. */
1952 /* @PLT is OK as is on 64-bit, must be converted to
1953 lt-relative PLT on 31-bit. */
1957 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1959 addr = XVECEXP (addr, 0, 0);
1960 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1961 addr = gen_rtx_CONST (SImode, addr);
1962 addr = force_const_mem (SImode, addr);
1963 emit_move_insn (temp, addr);
1965 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1966 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1967 new = gen_rtx_PLUS (Pmode, base, temp);
1971 emit_move_insn (reg, new);
1977 /* Everything else cannot happen. */
1982 else if (GET_CODE (addr) != PLUS)
1985 if (GET_CODE (addr) == PLUS)
1987 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1988 /* Check first to see if this is a constant offset
1989 from a local symbol reference. */
1990 if ((GET_CODE (op0) == LABEL_REF
1991 || (GET_CODE (op0) == SYMBOL_REF
1992 && (SYMBOL_REF_FLAG (op0)
1993 || CONSTANT_POOL_ADDRESS_P (op0))))
1994 && GET_CODE (op1) == CONST_INT)
1998 if (INTVAL (op1) & 1)
2000 /* LARL can't handle odd offsets, so emit a
2001 pair of LARL and LA. */
2002 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2004 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2006 int even = INTVAL (op1) - 1;
2007 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2008 op0 = gen_rtx_CONST (Pmode, op0);
2012 emit_move_insn (temp, op0);
2013 new = gen_rtx_PLUS (Pmode, temp, op1);
2017 emit_move_insn (reg, new);
2023 /* If the offset is even, we can just use LARL.
2024 This will happen automatically. */
2029 /* Access local symbols relative to the literal pool. */
2031 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2033 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
2034 addr = gen_rtx_PLUS (SImode, addr, op1);
2035 addr = gen_rtx_CONST (SImode, addr);
2036 addr = force_const_mem (SImode, addr);
2037 emit_move_insn (temp, addr);
2039 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2040 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2041 new = gen_rtx_PLUS (Pmode, base, temp);
2045 emit_move_insn (reg, new);
2051 /* Now, check whether it is an LT-relative symbol plus offset
2052 that was pulled out of the literal pool. Force it back in. */
2054 else if (GET_CODE (op0) == UNSPEC
2055 && GET_CODE (op1) == CONST_INT)
2057 if (XVECLEN (op0, 0) != 1)
2059 if (XINT (op0, 1) != 100)
2062 new = force_const_mem (SImode, orig);
2065 /* Otherwise, compute the sum. */
2068 base = legitimize_pic_address (XEXP (addr, 0), reg);
2069 new = legitimize_pic_address (XEXP (addr, 1),
2070 base == reg ? NULL_RTX : reg);
2071 if (GET_CODE (new) == CONST_INT)
2072 new = plus_constant (base, INTVAL (new));
2075 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2077 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2078 new = XEXP (new, 1);
2080 new = gen_rtx_PLUS (Pmode, base, new);
2083 if (GET_CODE (new) == CONST)
2084 new = XEXP (new, 0);
2085 new = force_operand (new, 0);
2092 /* Emit insns to move operands[1] into operands[0]. */
2095 emit_pic_move (operands, mode)
2097 enum machine_mode mode ATTRIBUTE_UNUSED;
2099 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2101 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2102 operands[1] = force_reg (Pmode, operands[1]);
2104 operands[1] = legitimize_pic_address (operands[1], temp);
2107 /* Try machine-dependent ways of modifying an illegitimate address X
2108 to be legitimate. If we find one, return the new, valid address.
2110 OLDX is the address as it was before break_out_memory_refs was called.
2111 In some cases it is useful to look at this to decide what needs to be done.
2113 MODE is the mode of the operand pointed to by X.
2115 When -fpic is used, special handling is needed for symbolic references.
2116 See comments by legitimize_pic_address for details. */
2119 legitimize_address (x, oldx, mode)
2121 register rtx oldx ATTRIBUTE_UNUSED;
2122 enum machine_mode mode ATTRIBUTE_UNUSED;
2124 rtx constant_term = const0_rtx;
2128 if (SYMBOLIC_CONST (x)
2129 || (GET_CODE (x) == PLUS
2130 && (SYMBOLIC_CONST (XEXP (x, 0))
2131 || SYMBOLIC_CONST (XEXP (x, 1)))))
2132 x = legitimize_pic_address (x, 0);
2134 if (legitimate_address_p (mode, x, FALSE))
2138 x = eliminate_constant_term (x, &constant_term);
2140 /* Optimize loading of large displacements by splitting them
2141 into the multiple of 4K and the rest; this allows the
2142 former to be CSE'd if possible.
2144 Don't do this if the displacement is added to a register
2145 pointing into the stack frame, as the offsets will
2146 change later anyway. */
2148 if (GET_CODE (constant_term) == CONST_INT
2149 && (INTVAL (constant_term) < 0
2150 || INTVAL (constant_term) >= 4096)
2151 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2153 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2154 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2156 rtx temp = gen_reg_rtx (Pmode);
2157 rtx val = force_operand (GEN_INT (upper), temp);
2159 emit_move_insn (temp, val);
2161 x = gen_rtx_PLUS (Pmode, x, temp);
2162 constant_term = GEN_INT (lower);
2165 if (GET_CODE (x) == PLUS)
2167 if (GET_CODE (XEXP (x, 0)) == REG)
2169 register rtx temp = gen_reg_rtx (Pmode);
2170 register rtx val = force_operand (XEXP (x, 1), temp);
2172 emit_move_insn (temp, val);
2174 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2177 else if (GET_CODE (XEXP (x, 1)) == REG)
2179 register rtx temp = gen_reg_rtx (Pmode);
2180 register rtx val = force_operand (XEXP (x, 0), temp);
2182 emit_move_insn (temp, val);
2184 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2188 if (constant_term != const0_rtx)
2189 x = gen_rtx_PLUS (Pmode, x, constant_term);
2194 /* Emit code to move LEN bytes from DST to SRC. */
2197 s390_expand_movstr (dst, src, len)
2202 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2203 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2204 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2205 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2208 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2210 if (INTVAL (len) > 0)
2211 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2214 else if (TARGET_MVCLE)
2216 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2217 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2218 rtx reg0 = gen_reg_rtx (double_mode);
2219 rtx reg1 = gen_reg_rtx (double_mode);
2221 emit_move_insn (gen_highpart (single_mode, reg0),
2222 force_operand (XEXP (dst, 0), NULL_RTX));
2223 emit_move_insn (gen_highpart (single_mode, reg1),
2224 force_operand (XEXP (src, 0), NULL_RTX));
2226 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2227 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2229 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2234 rtx dst_addr, src_addr, count, blocks, temp;
2235 rtx end_label = gen_label_rtx ();
2236 enum machine_mode mode;
2239 mode = GET_MODE (len);
2240 if (mode == VOIDmode)
2243 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2247 dst_addr = gen_reg_rtx (Pmode);
2248 src_addr = gen_reg_rtx (Pmode);
2249 count = gen_reg_rtx (mode);
2250 blocks = gen_reg_rtx (mode);
2252 convert_move (count, len, 1);
2253 emit_cmp_and_jump_insns (count, const0_rtx,
2254 EQ, NULL_RTX, mode, 1, end_label);
2256 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2257 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2258 dst = change_address (dst, VOIDmode, dst_addr);
2259 src = change_address (src, VOIDmode, src_addr);
2261 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2263 emit_move_insn (count, temp);
2265 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2267 emit_move_insn (blocks, temp);
2269 expand_start_loop (1);
2270 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2271 make_tree (type, blocks),
2272 make_tree (type, const0_rtx)));
2274 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2275 s390_load_address (dst_addr,
2276 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2277 s390_load_address (src_addr,
2278 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2280 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2282 emit_move_insn (blocks, temp);
2286 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2287 emit_label (end_label);
2291 /* Emit code to clear LEN bytes at DST. */
2294 s390_expand_clrstr (dst, len)
2298 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2299 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2300 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2301 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2304 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2306 if (INTVAL (len) > 0)
2307 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2310 else if (TARGET_MVCLE)
2312 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2313 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2314 rtx reg0 = gen_reg_rtx (double_mode);
2315 rtx reg1 = gen_reg_rtx (double_mode);
2317 emit_move_insn (gen_highpart (single_mode, reg0),
2318 force_operand (XEXP (dst, 0), NULL_RTX));
2319 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2321 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2322 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2324 emit_insn ((*gen_long) (reg0, reg1, reg0));
2329 rtx dst_addr, src_addr, count, blocks, temp;
2330 rtx end_label = gen_label_rtx ();
2331 enum machine_mode mode;
2334 mode = GET_MODE (len);
2335 if (mode == VOIDmode)
2338 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2342 dst_addr = gen_reg_rtx (Pmode);
2343 src_addr = gen_reg_rtx (Pmode);
2344 count = gen_reg_rtx (mode);
2345 blocks = gen_reg_rtx (mode);
2347 convert_move (count, len, 1);
2348 emit_cmp_and_jump_insns (count, const0_rtx,
2349 EQ, NULL_RTX, mode, 1, end_label);
2351 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2352 dst = change_address (dst, VOIDmode, dst_addr);
2354 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2356 emit_move_insn (count, temp);
2358 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2360 emit_move_insn (blocks, temp);
2362 expand_start_loop (1);
2363 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2364 make_tree (type, blocks),
2365 make_tree (type, const0_rtx)));
2367 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2368 s390_load_address (dst_addr,
2369 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2371 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2373 emit_move_insn (blocks, temp);
2377 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2378 emit_label (end_label);
2382 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2383 and return the result in TARGET. */
2386 s390_expand_cmpstr (target, op0, op1, len)
2392 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2393 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2394 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2395 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2396 rtx (*gen_result) PARAMS ((rtx)) =
2397 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2399 op0 = protect_from_queue (op0, 0);
2400 op1 = protect_from_queue (op1, 0);
2401 len = protect_from_queue (len, 0);
2403 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2405 if (INTVAL (len) > 0)
2407 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2408 emit_insn ((*gen_result) (target));
2411 emit_move_insn (target, const0_rtx);
2414 else if (TARGET_MVCLE)
2416 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2417 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2418 rtx reg0 = gen_reg_rtx (double_mode);
2419 rtx reg1 = gen_reg_rtx (double_mode);
2421 emit_move_insn (gen_highpart (single_mode, reg0),
2422 force_operand (XEXP (op0, 0), NULL_RTX));
2423 emit_move_insn (gen_highpart (single_mode, reg1),
2424 force_operand (XEXP (op1, 0), NULL_RTX));
2426 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2427 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2429 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2430 emit_insn ((*gen_result) (target));
2435 rtx addr0, addr1, count, blocks, temp;
2436 rtx end_label = gen_label_rtx ();
2437 enum machine_mode mode;
2440 mode = GET_MODE (len);
2441 if (mode == VOIDmode)
2444 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2448 addr0 = gen_reg_rtx (Pmode);
2449 addr1 = gen_reg_rtx (Pmode);
2450 count = gen_reg_rtx (mode);
2451 blocks = gen_reg_rtx (mode);
2453 convert_move (count, len, 1);
2454 emit_cmp_and_jump_insns (count, const0_rtx,
2455 EQ, NULL_RTX, mode, 1, end_label);
2457 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2458 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2459 op0 = change_address (op0, VOIDmode, addr0);
2460 op1 = change_address (op1, VOIDmode, addr1);
2462 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2464 emit_move_insn (count, temp);
2466 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2468 emit_move_insn (blocks, temp);
2470 expand_start_loop (1);
2471 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2472 make_tree (type, blocks),
2473 make_tree (type, const0_rtx)));
2475 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2476 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2477 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2478 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2479 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2480 emit_jump_insn (temp);
2482 s390_load_address (addr0,
2483 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2484 s390_load_address (addr1,
2485 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2487 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2489 emit_move_insn (blocks, temp);
2493 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2494 emit_label (end_label);
2496 emit_insn ((*gen_result) (target));
2500 /* In the name of slightly smaller debug output, and to cater to
2501 general assembler losage, recognize various UNSPEC sequences
2502 and turn them back into a direct symbol reference. */
2505 s390_simplify_dwarf_addr (orig_x)
2510 if (GET_CODE (x) != MEM)
2514 if (GET_CODE (x) == PLUS
2515 && GET_CODE (XEXP (x, 1)) == CONST
2516 && GET_CODE (XEXP (x, 0)) == REG
2517 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2519 y = XEXP (XEXP (x, 1), 0);
2520 if (GET_CODE (y) == UNSPEC
2521 && XINT (y, 1) == 110)
2522 return XVECEXP (y, 0, 0);
2526 if (GET_CODE (x) == CONST)
2529 if (GET_CODE (y) == UNSPEC
2530 && XINT (y, 1) == 111)
2531 return XVECEXP (y, 0, 0);
2538 /* Output symbolic constant X in assembler syntax to
2539 stdio stream FILE. */
2542 s390_output_symbolic_const (file, x)
2546 switch (GET_CODE (x))
2551 s390_output_symbolic_const (file, XEXP (x, 0));
2555 s390_output_symbolic_const (file, XEXP (x, 0));
2556 fprintf (file, "+");
2557 s390_output_symbolic_const (file, XEXP (x, 1));
2561 s390_output_symbolic_const (file, XEXP (x, 0));
2562 fprintf (file, "-");
2563 s390_output_symbolic_const (file, XEXP (x, 1));
2570 output_addr_const (file, x);
2574 if (XVECLEN (x, 0) != 1)
2575 output_operand_lossage ("invalid UNSPEC as operand (1)");
2576 switch (XINT (x, 1))
2580 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2581 fprintf (file, "-.LT%d", current_function_funcdef_no);
2584 fprintf (file, ".LT%d-", current_function_funcdef_no);
2585 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2588 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2589 fprintf (file, "@GOT12");
2592 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2593 fprintf (file, "@GOTENT");
2596 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2597 fprintf (file, "@GOT");
2600 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2601 fprintf (file, "@PLT");
2604 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2605 fprintf (file, "@PLT-.LT%d", current_function_funcdef_no);
2608 output_operand_lossage ("invalid UNSPEC as operand (2)");
2614 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2619 /* Output address operand ADDR in assembler syntax to
2620 stdio stream FILE. */
2623 print_operand_address (file, addr)
2627 struct s390_address ad;
2629 if (!s390_decompose_address (addr, &ad)
2630 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2631 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2632 output_operand_lossage ("Cannot decompose address.");
2635 s390_output_symbolic_const (file, ad.disp);
2637 fprintf (file, "0");
2639 if (ad.base && ad.indx)
2640 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2641 reg_names[REGNO (ad.base)]);
2643 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2646 /* Output operand X in assembler syntax to stdio stream FILE.
2647 CODE specified the format flag. The following format flags
2650 'C': print opcode suffix for branch condition.
2651 'D': print opcode suffix for inverse branch condition.
2652 'O': print only the displacement of a memory reference.
2653 'R': print only the base register of a memory reference.
2654 'N': print the second word of a DImode operand.
2655 'M': print the second word of a TImode operand.
2657 'b': print integer X as if it's an unsigned byte.
2658 'x': print integer X as if it's an unsigned word.
2659 'h': print integer X as if it's a signed word. */
2662 print_operand (file, x, code)
2670 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2674 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2679 struct s390_address ad;
2681 if (GET_CODE (x) != MEM
2682 || !s390_decompose_address (XEXP (x, 0), &ad)
2683 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2688 s390_output_symbolic_const (file, ad.disp);
2690 fprintf (file, "0");
2696 struct s390_address ad;
2698 if (GET_CODE (x) != MEM
2699 || !s390_decompose_address (XEXP (x, 0), &ad)
2700 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2705 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2707 fprintf (file, "0");
2712 if (GET_CODE (x) == REG)
2713 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2714 else if (GET_CODE (x) == MEM)
2715 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2721 if (GET_CODE (x) == REG)
2722 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2723 else if (GET_CODE (x) == MEM)
2724 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2730 switch (GET_CODE (x))
2733 fprintf (file, "%s", reg_names[REGNO (x)]);
2737 output_address (XEXP (x, 0));
2744 s390_output_symbolic_const (file, x);
2749 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2750 else if (code == 'x')
2751 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2752 else if (code == 'h')
2753 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2755 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2759 if (GET_MODE (x) != VOIDmode)
2762 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2763 else if (code == 'x')
2764 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2765 else if (code == 'h')
2766 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2772 fatal_insn ("UNKNOWN in print_operand !?", x);
2777 /* Target hook for assembling integer objects. We need to define it
2778 here to work a round a bug in some versions of GAS, which couldn't
2779 handle values smaller than INT_MIN when printed in decimal. */
2782 s390_assemble_integer (x, size, aligned_p)
2787 if (size == 8 && aligned_p
2788 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2790 fputs ("\t.quad\t", asm_out_file);
2791 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2792 putc ('\n', asm_out_file);
2795 return default_assemble_integer (x, size, aligned_p);
2799 #define DEBUG_SCHED 0
2801 /* Returns true if register REGNO is used for forming
2802 a memory address in expression X. */
2805 reg_used_in_mem_p (regno, x)
2809 enum rtx_code code = GET_CODE (x);
2815 if (refers_to_regno_p (regno, regno+1,
2819 else if (code == SET
2820 && GET_CODE (SET_DEST (x)) == PC)
2822 if (refers_to_regno_p (regno, regno+1,
2827 fmt = GET_RTX_FORMAT (code);
2828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2831 && reg_used_in_mem_p (regno, XEXP (x, i)))
2834 else if (fmt[i] == 'E')
2835 for (j = 0; j < XVECLEN (x, i); j++)
2836 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2842 /* Returns true if expression DEP_RTX sets an address register
2843 used by instruction INSN to address memory. */
2846 addr_generation_dependency_p (dep_rtx, insn)
2852 if (GET_CODE (dep_rtx) == SET)
2854 target = SET_DEST (dep_rtx);
2855 if (GET_CODE (target) == STRICT_LOW_PART)
2856 target = XEXP (target, 0);
2857 while (GET_CODE (target) == SUBREG)
2858 target = SUBREG_REG (target);
2860 if (GET_CODE (target) == REG)
2862 int regno = REGNO (target);
2864 if (get_attr_type (insn) == TYPE_LA)
2866 pat = PATTERN (insn);
2867 if (GET_CODE (pat) == PARALLEL)
2869 if (XVECLEN (pat, 0) != 2)
2871 pat = XVECEXP (pat, 0, 0);
2873 if (GET_CODE (pat) == SET)
2874 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2878 else if (get_attr_atype (insn) == ATYPE_MEM)
2879 return reg_used_in_mem_p (regno, PATTERN (insn));
2886 /* Return the modified cost of the dependency of instruction INSN
2887 on instruction DEP_INSN through the link LINK. COST is the
2888 default cost of that dependency.
2890 Data dependencies are all handled without delay. However, if a
2891 register is modified and subsequently used as base or index
2892 register of a memory reference, at least 4 cycles need to pass
2893 between setting and using the register to avoid pipeline stalls.
2894 An exception is the LA instruction. An address generated by LA can
2895 be used by introducing only a one cycle stall on the pipeline. */
2898 s390_adjust_cost (insn, link, dep_insn, cost)
2907 /* If the dependence is an anti-dependence, there is no cost. For an
2908 output dependence, there is sometimes a cost, but it doesn't seem
2909 worth handling those few cases. */
2911 if (REG_NOTE_KIND (link) != 0)
2914 /* If we can't recognize the insns, we can't really do anything. */
2915 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2918 dep_rtx = PATTERN (dep_insn);
2920 if (GET_CODE (dep_rtx) == SET)
2922 if (addr_generation_dependency_p (dep_rtx, insn))
2924 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2927 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2929 debug_rtx (dep_insn);
2934 else if (GET_CODE (dep_rtx) == PARALLEL)
2936 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2938 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2941 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2944 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2946 debug_rtx (dep_insn);
2957 /* A C statement (sans semicolon) to update the integer scheduling priority
2958 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2959 increase the priority to execute INSN later. Do not define this macro if
2960 you do not need to adjust the scheduling priorities of insns.
2962 A LA instruction maybe scheduled later, since the pipeline bypasses the
2963 calculated value. */
2966 s390_adjust_priority (insn, priority)
2967 rtx insn ATTRIBUTE_UNUSED;
2970 if (! INSN_P (insn))
2973 if (GET_CODE (PATTERN (insn)) == USE
2974 || GET_CODE (PATTERN (insn)) == CLOBBER)
2977 switch (get_attr_type (insn))
2983 if (priority >= 0 && priority < 0x01000000)
2987 /* LM in epilogue should never be scheduled. This
2988 is due to literal access done in function body.
2989 The usage of register 13 is not mentioned explicitly,
2990 leading to scheduling 'LM' accross this instructions.
2992 priority = 0x7fffffff;
3000 /* Split all branches that exceed the maximum distance.
3001 Returns true if this created a new literal pool entry.
3003 Code generated by this routine is allowed to use
3004 TEMP_REG as temporary scratch register. If this is
3005 done, TEMP_USED is set to true. */
3008 s390_split_branches (temp_reg, temp_used)
3012 int new_literal = 0;
3013 rtx insn, pat, tmp, target;
3016 /* We need correct insn addresses. */
3018 shorten_branches (get_insns ());
3020 /* Find all branches that exceed 64KB, and split them. */
3022 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3024 if (GET_CODE (insn) != JUMP_INSN)
3027 pat = PATTERN (insn);
3028 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3029 pat = XVECEXP (pat, 0, 0);
3030 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3033 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3035 label = &SET_SRC (pat);
3037 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3039 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3040 label = &XEXP (SET_SRC (pat), 1);
3041 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3042 label = &XEXP (SET_SRC (pat), 2);
3049 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3056 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3057 INSN_ADDRESSES_NEW (tmp, -1);
3064 tmp = force_const_mem (Pmode, *label);
3065 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3066 INSN_ADDRESSES_NEW (tmp, -1);
3073 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3074 tmp = gen_rtx_CONST (SImode, tmp);
3075 tmp = force_const_mem (SImode, tmp);
3076 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3077 INSN_ADDRESSES_NEW (tmp, -1);
3079 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3080 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3083 if (!validate_change (insn, label, target, 0))
3091 /* Find a literal pool symbol referenced in RTX X, and store
3092 it at REF. Will abort if X contains references to more than
3093 one such pool symbol; multiple references to the same symbol
3094 are allowed, however.
3096 The rtx pointed to by REF must be initialized to NULL_RTX
3097 by the caller before calling this routine. */
3100 find_constant_pool_ref (x, ref)
3107 if (GET_CODE (x) == SYMBOL_REF
3108 && CONSTANT_POOL_ADDRESS_P (x))
3110 if (*ref == NULL_RTX)
3116 fmt = GET_RTX_FORMAT (GET_CODE (x));
3117 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3121 find_constant_pool_ref (XEXP (x, i), ref);
3123 else if (fmt[i] == 'E')
3125 for (j = 0; j < XVECLEN (x, i); j++)
3126 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3131 /* Replace every reference to the literal pool symbol REF
3132 in X by the address ADDR. Fix up MEMs as required. */
3135 replace_constant_pool_ref (x, ref, addr)
3146 /* Literal pool references can only occur inside a MEM ... */
3147 if (GET_CODE (*x) == MEM)
3149 rtx memref = XEXP (*x, 0);
3153 *x = replace_equiv_address (*x, addr);
3157 if (GET_CODE (memref) == CONST
3158 && GET_CODE (XEXP (memref, 0)) == PLUS
3159 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3160 && XEXP (XEXP (memref, 0), 0) == ref)
3162 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3163 *x = replace_equiv_address (*x, plus_constant (addr, off));
3168 /* ... or a load-address type pattern. */
3169 if (GET_CODE (*x) == SET)
3171 rtx addrref = SET_SRC (*x);
3175 SET_SRC (*x) = addr;
3179 if (GET_CODE (addrref) == CONST
3180 && GET_CODE (XEXP (addrref, 0)) == PLUS
3181 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3182 && XEXP (XEXP (addrref, 0), 0) == ref)
3184 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3185 SET_SRC (*x) = plus_constant (addr, off);
3190 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3191 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3195 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3197 else if (fmt[i] == 'E')
3199 for (j = 0; j < XVECLEN (*x, i); j++)
3200 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3205 /* Check whether ADDR is an address that uses the base register,
3206 without actually constituting a literal pool access. (This happens
3207 in 31-bit PIC mode, where the base register is used as anchor for
3208 relative addressing of local symbols.)
3210 Returns 1 if the base register occupies the base slot,
3211 returns 2 if the base register occupies the index slot,
3212 returns 0 if the address is not of this form. */
3215 find_base_register_in_addr (addr)
3216 struct s390_address *addr;
3218 /* If DISP is complex, we might have a literal pool reference. */
3219 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3222 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3225 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3231 /* Return true if X contains an address that uses the base register,
3232 without actually constituting a literal pool access. */
3235 find_base_register_ref (x)
3239 struct s390_address addr;
3243 /* Addresses can only occur inside a MEM ... */
3244 if (GET_CODE (x) == MEM)
3246 if (s390_decompose_address (XEXP (x, 0), &addr)
3247 && find_base_register_in_addr (&addr))
3251 /* ... or a load-address type pattern. */
3252 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3254 if (s390_decompose_address (SET_SRC (x), &addr)
3255 && find_base_register_in_addr (&addr))
3259 fmt = GET_RTX_FORMAT (GET_CODE (x));
3260 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3264 retv |= find_base_register_ref (XEXP (x, i));
3266 else if (fmt[i] == 'E')
3268 for (j = 0; j < XVECLEN (x, i); j++)
3269 retv |= find_base_register_ref (XVECEXP (x, i, j));
3276 /* If X contains an address that uses the base register,
3277 without actually constituting a literal pool access,
3278 replace the base register with REPL in all such cases.
3280 Handles both MEMs and load address patterns. */
3283 replace_base_register_ref (x, repl)
3287 struct s390_address addr;
3292 /* Addresses can only occur inside a MEM ... */
3293 if (GET_CODE (*x) == MEM)
3295 if (s390_decompose_address (XEXP (*x, 0), &addr)
3296 && (pos = find_base_register_in_addr (&addr)))
3303 new_addr = addr.base;
3305 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3307 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3309 *x = replace_equiv_address (*x, new_addr);
3314 /* ... or a load-address type pattern. */
3315 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3317 if (s390_decompose_address (SET_SRC (*x), &addr)
3318 && (pos = find_base_register_in_addr (&addr)))
3325 new_addr = addr.base;
3327 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3329 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3331 SET_SRC (*x) = new_addr;
3336 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3337 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3341 replace_base_register_ref (&XEXP (*x, i), repl);
3343 else if (fmt[i] == 'E')
3345 for (j = 0; j < XVECLEN (*x, i); j++)
3346 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
3352 /* We keep a list of constants we which we have to add to internal
3353 constant tables in the middle of large functions. */
3355 #define NR_C_MODES 6
3356 enum machine_mode constant_modes[NR_C_MODES] =
3364 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3366 gen_consttable_df, gen_consttable_di,
3367 gen_consttable_sf, gen_consttable_si,
3374 struct constant *next;
3379 struct constant_pool
3381 struct constant_pool *next;
3386 struct constant *constants[NR_C_MODES];
3392 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
3393 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
3394 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
3396 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3397 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3398 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
3399 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3400 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3401 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3402 static void s390_add_anchor PARAMS ((struct constant_pool *));
3403 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3404 static void s390_free_pool PARAMS ((struct constant_pool *));
3406 /* Create new constant pool covering instructions starting at INSN
3407 and chain it to the end of POOL_LIST. */
3409 static struct constant_pool *
3410 s390_start_pool (pool_list, insn)
3411 struct constant_pool **pool_list;
3414 struct constant_pool *pool, **prev;
3417 pool = (struct constant_pool *) xmalloc (sizeof *pool);
3419 for (i = 0; i < NR_C_MODES; i++)
3420 pool->constants[i] = NULL;
3422 pool->label = gen_label_rtx ();
3423 pool->first_insn = insn;
3424 pool->pool_insn = NULL_RTX;
3425 pool->insns = BITMAP_XMALLOC ();
3427 pool->anchor = FALSE;
3429 for (prev = pool_list; *prev; prev = &(*prev)->next)
3436 /* End range of instructions covered by POOL at INSN and emit
3437 placeholder insn representing the pool. */
3440 s390_end_pool (pool, insn)
3441 struct constant_pool *pool;
3444 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
3447 insn = get_last_insn ();
3449 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
3450 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
3453 /* Add INSN to the list of insns covered by POOL. */
3456 s390_add_pool_insn (pool, insn)
3457 struct constant_pool *pool;
3460 bitmap_set_bit (pool->insns, INSN_UID (insn));
3463 /* Return pool out of POOL_LIST that covers INSN. */
3465 static struct constant_pool *
3466 s390_find_pool (pool_list, insn)
3467 struct constant_pool *pool_list;
3470 struct constant_pool *pool;
3472 for (pool = pool_list; pool; pool = pool->next)
3473 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
3479 /* Add constant VAL of mode MODE to the constant pool POOL. */
3482 s390_add_constant (pool, val, mode)
3483 struct constant_pool *pool;
3485 enum machine_mode mode;
3490 for (i = 0; i < NR_C_MODES; i++)
3491 if (constant_modes[i] == mode)
3493 if (i == NR_C_MODES)
3496 for (c = pool->constants[i]; c != NULL; c = c->next)
3497 if (rtx_equal_p (val, c->value))
3502 c = (struct constant *) xmalloc (sizeof *c);
3504 c->label = gen_label_rtx ();
3505 c->next = pool->constants[i];
3506 pool->constants[i] = c;
3507 pool->size += GET_MODE_SIZE (mode);
3511 /* Find constant VAL of mode MODE in the constant pool POOL.
3512 Return an RTX describing the distance from the start of
3513 the pool to the location of the new constant. */
3516 s390_find_constant (pool, val, mode)
3517 struct constant_pool *pool;
3519 enum machine_mode mode;
3525 for (i = 0; i < NR_C_MODES; i++)
3526 if (constant_modes[i] == mode)
3528 if (i == NR_C_MODES)
3531 for (c = pool->constants[i]; c != NULL; c = c->next)
3532 if (rtx_equal_p (val, c->value))
3538 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
3539 gen_rtx_LABEL_REF (Pmode, pool->label));
3540 offset = gen_rtx_CONST (Pmode, offset);
3544 /* Set 'anchor' flag in POOL. */
3547 s390_add_anchor (pool)
3548 struct constant_pool *pool;
3552 pool->anchor = TRUE;
3557 /* Dump out the constants in POOL. */
3560 s390_dump_pool (pool)
3561 struct constant_pool *pool;
3567 /* Pool start insn switches to proper section
3568 and guarantees necessary alignment. */
3570 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
3572 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
3573 INSN_ADDRESSES_NEW (insn, -1);
3575 insn = emit_label_after (pool->label, insn);
3576 INSN_ADDRESSES_NEW (insn, -1);
3578 /* Emit anchor if we need one. */
3581 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
3582 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
3583 anchor = gen_rtx_CONST (VOIDmode, anchor);
3584 insn = emit_insn_after (gen_consttable_si (anchor), insn);
3585 INSN_ADDRESSES_NEW (insn, -1);
3588 /* Dump constants in descending alignment requirement order,
3589 ensuring proper alignment for every constant. */
3590 for (i = 0; i < NR_C_MODES; i++)
3591 for (c = pool->constants[i]; c; c = c->next)
3593 /* Convert 104 unspecs to pool-relative references. */
3594 rtx value = c->value;
3595 if (GET_CODE (value) == CONST
3596 && GET_CODE (XEXP (value, 0)) == UNSPEC
3597 && XINT (XEXP (value, 0), 1) == 104
3598 && XVECLEN (XEXP (value, 0), 0) == 1)
3600 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
3601 gen_rtx_LABEL_REF (VOIDmode, pool->label));
3602 value = gen_rtx_CONST (VOIDmode, value);
3605 insn = emit_label_after (c->label, insn);
3606 INSN_ADDRESSES_NEW (insn, -1);
3607 insn = emit_insn_after (gen_consttable[i] (value), insn);
3608 INSN_ADDRESSES_NEW (insn, -1);
3611 /* Pool end insn switches back to previous section
3612 and guarantees necessary alignment. */
3614 insn = emit_insn_after (gen_pool_end_64 (), insn);
3616 insn = emit_insn_after (gen_pool_end_31 (), insn);
3617 INSN_ADDRESSES_NEW (insn, -1);
3619 insn = emit_barrier_after (insn);
3620 INSN_ADDRESSES_NEW (insn, -1);
3622 /* Remove placeholder insn. */
3623 remove_insn (pool->pool_insn);
3628 /* Free all memory used by POOL. */
3631 s390_free_pool (pool)
3632 struct constant_pool *pool;
3636 for (i = 0; i < NR_C_MODES; i++)
3638 struct constant *c = pool->constants[i];
3641 struct constant *next = c->next;
3647 BITMAP_XFREE (pool->insns);
3652 /* Chunkify the literal pool if required.
3654 Code generated by this routine is allowed to use
3655 TEMP_REG as temporary scratch register. If this is
3656 done, TEMP_USED is set to true. */
3658 #define S390_POOL_CHUNK_MIN 0xc00
3659 #define S390_POOL_CHUNK_MAX 0xe00
3661 static struct constant_pool *
3662 s390_chunkify_start (temp_reg, temp_used)
3666 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3668 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3673 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
3674 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
3677 /* Do we need to chunkify the literal pool? */
3679 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3682 /* We need correct insn addresses. */
3684 shorten_branches (get_insns ());
3686 /* Scan all insns and move literals to pool chunks.
3687 Also, emit anchor reload insns before every insn that uses
3688 the literal pool base register as anchor pointer. */
3690 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3692 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3694 rtx pool_ref = NULL_RTX;
3695 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3699 curr_pool = s390_start_pool (&pool_list, insn);
3701 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
3702 get_pool_mode (pool_ref));
3703 s390_add_pool_insn (curr_pool, insn);
3706 else if (!TARGET_64BIT && flag_pic
3707 && find_base_register_ref (PATTERN (insn)))
3709 rtx new = gen_reload_anchor (temp_reg, base_reg);
3710 new = emit_insn_before (new, insn);
3711 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
3716 curr_pool = s390_start_pool (&pool_list, new);
3718 s390_add_anchor (curr_pool);
3719 s390_add_pool_insn (curr_pool, insn);
3723 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
3725 s390_add_pool_insn (curr_pool, insn);
3728 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3729 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3734 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3737 s390_end_pool (curr_pool, NULL_RTX);
3742 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3743 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3746 /* We will later have to insert base register reload insns.
3747 Those will have an effect on code size, which we need to
3748 consider here. This calculation makes rather pessimistic
3749 worst-case assumptions. */
3750 if (GET_CODE (insn) == CODE_LABEL)
3753 if (chunk_size < S390_POOL_CHUNK_MIN
3754 && curr_pool->size < S390_POOL_CHUNK_MIN)
3757 /* Pool chunks can only be inserted after BARRIERs ... */
3758 if (GET_CODE (insn) == BARRIER)
3760 s390_end_pool (curr_pool, insn);
3765 /* ... so if we don't find one in time, create one. */
3766 else if ((chunk_size > S390_POOL_CHUNK_MAX
3767 || curr_pool->size > S390_POOL_CHUNK_MAX))
3769 rtx label, jump, barrier;
3771 /* We can insert the barrier only after a 'real' insn. */
3772 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
3774 if (get_attr_length (insn) == 0)
3777 /* Don't separate insns created by s390_split_branches. */
3778 if (GET_CODE (insn) == INSN
3779 && GET_CODE (PATTERN (insn)) == SET
3780 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
3783 label = gen_label_rtx ();
3784 jump = emit_jump_insn_after (gen_jump (label), insn);
3785 barrier = emit_barrier_after (jump);
3786 insn = emit_label_after (label, barrier);
3787 JUMP_LABEL (jump) = label;
3788 LABEL_NUSES (label) = 1;
3790 INSN_ADDRESSES_NEW (jump, -1);
3791 INSN_ADDRESSES_NEW (barrier, -1);
3792 INSN_ADDRESSES_NEW (insn, -1);
3794 s390_end_pool (curr_pool, barrier);
3802 s390_end_pool (curr_pool, NULL_RTX);
3805 /* Find all labels that are branched into
3806 from an insn belonging to a different chunk. */
3808 far_labels = BITMAP_XMALLOC ();
3810 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3812 /* Labels marked with LABEL_PRESERVE_P can be target
3813 of non-local jumps, so we have to mark them.
3814 The same holds for named labels.
3816 Don't do that, however, if it is the label before
3819 if (GET_CODE (insn) == CODE_LABEL
3820 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3822 rtx vec_insn = next_real_insn (insn);
3823 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3824 PATTERN (vec_insn) : NULL_RTX;
3826 || !(GET_CODE (vec_pat) == ADDR_VEC
3827 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3828 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3831 /* If we have a direct jump (conditional or unconditional)
3832 or a casesi jump, check all potential targets. */
3833 else if (GET_CODE (insn) == JUMP_INSN)
3835 rtx pat = PATTERN (insn);
3836 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3837 pat = XVECEXP (pat, 0, 0);
3839 if (GET_CODE (pat) == SET)
3841 rtx label = JUMP_LABEL (insn);
3844 if (s390_find_pool (pool_list, label)
3845 != s390_find_pool (pool_list, insn))
3846 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3849 else if (GET_CODE (pat) == PARALLEL
3850 && XVECLEN (pat, 0) == 2
3851 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3852 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3853 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3855 /* Find the jump table used by this casesi jump. */
3856 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3857 rtx vec_insn = next_real_insn (vec_label);
3858 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3859 PATTERN (vec_insn) : NULL_RTX;
3861 && (GET_CODE (vec_pat) == ADDR_VEC
3862 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3864 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3866 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3868 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3870 if (s390_find_pool (pool_list, label)
3871 != s390_find_pool (pool_list, insn))
3872 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3879 /* Insert base register reload insns before every pool. */
3881 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3883 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3884 rtx insn = curr_pool->first_insn;
3885 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3888 /* Insert base register reload insns at every far label. */
3890 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3891 if (GET_CODE (insn) == CODE_LABEL
3892 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3894 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3897 rtx new_insn = gen_reload_base (base_reg, pool->label);
3898 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3903 BITMAP_XFREE (far_labels);
3906 /* Recompute insn addresses. */
3908 init_insn_lengths ();
3909 shorten_branches (get_insns ());
3914 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3915 After we have decided to use this list, finish implementing
3916 all changes to the current function as required.
3918 Code generated by this routine is allowed to use
3919 TEMP_REG as temporary scratch register. */
3922 s390_chunkify_finish (pool_list, temp_reg)
3923 struct constant_pool *pool_list;
3926 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3927 struct constant_pool *curr_pool = NULL;
3931 /* Replace all literal pool references. */
3933 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3935 curr_pool = s390_find_pool (pool_list, insn);
3939 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3941 rtx addr, pool_ref = NULL_RTX;
3942 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3945 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
3946 get_pool_mode (pool_ref));
3947 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
3948 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
3949 INSN_CODE (insn) = -1;
3952 else if (!TARGET_64BIT && flag_pic
3953 && find_base_register_ref (PATTERN (insn)))
3955 replace_base_register_ref (&PATTERN (insn), temp_reg);
3960 /* Dump out all literal pools. */
3962 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3963 s390_dump_pool (curr_pool);
3965 /* Free pool list. */
3969 struct constant_pool *next = pool_list->next;
3970 s390_free_pool (pool_list);
3975 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3976 We have decided we cannot use this list, so revert all changes
3977 to the current function that were done by s390_chunkify_start. */
3980 s390_chunkify_cancel (pool_list)
3981 struct constant_pool *pool_list;
3983 struct constant_pool *curr_pool = NULL;
3986 /* Remove all pool placeholder insns. */
3988 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3990 /* Did we insert an extra barrier? Remove it. */
3991 rtx barrier = PREV_INSN (curr_pool->pool_insn);
3992 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
3993 rtx label = NEXT_INSN (curr_pool->pool_insn);
3995 if (jump && GET_CODE (jump) == JUMP_INSN
3996 && barrier && GET_CODE (barrier) == BARRIER
3997 && label && GET_CODE (label) == CODE_LABEL
3998 && GET_CODE (PATTERN (jump)) == SET
3999 && SET_DEST (PATTERN (jump)) == pc_rtx
4000 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4001 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4004 remove_insn (barrier);
4005 remove_insn (label);
4008 remove_insn (curr_pool->pool_insn);
4011 /* Remove all base/anchor register reload insns. */
4013 for (insn = get_insns (); insn; )
4015 rtx next_insn = NEXT_INSN (insn);
4017 if (GET_CODE (insn) == INSN
4018 && GET_CODE (PATTERN (insn)) == SET
4019 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4020 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4021 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4027 /* Free pool list. */
4031 struct constant_pool *next = pool_list->next;
4032 s390_free_pool (pool_list);
4038 /* Index of constant pool chunk that is currently being processed.
4039 Set to -1 before function output has started. */
4040 int s390_pool_count = -1;
4042 /* Number of elements of current constant pool. */
4043 int s390_nr_constants;
4045 /* Output main constant pool to stdio stream FILE. */
4048 s390_output_constant_pool (file)
4051 /* Output constant pool. */
4052 if (s390_nr_constants)
4056 fprintf (file, "\tlarl\t%s,.LT%d\n", reg_names[BASE_REGISTER],
4057 current_function_funcdef_no);
4058 readonly_data_section ();
4059 ASM_OUTPUT_ALIGN (file, 3);
4063 fprintf (file, "\tbras\t%s,.LTN%d\n", reg_names[BASE_REGISTER],
4064 current_function_funcdef_no);
4066 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4068 s390_pool_count = 0;
4069 output_constant_pool (current_function_name, current_function_decl);
4070 s390_pool_count = -1;
4073 function_section (current_function_decl);
4075 fprintf (file, ".LTN%d:\n", current_function_funcdef_no);
4078 /* If no pool required, at least output the anchor label. */
4079 else if (!TARGET_64BIT && flag_pic)
4080 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4084 /* Rework the prolog/epilog to avoid saving/restoring
4085 registers unnecessarily. If TEMP_REGNO is nonnegative,
4086 it specifies the number of a caller-saved register used
4087 as temporary scratch register by code emitted during
4088 machine dependent reorg. */
4091 s390_optimize_prolog (temp_regno)
4094 int save_first, save_last, restore_first, restore_last;
4096 rtx insn, new_insn, next_insn;
4098 struct s390_frame frame;
4099 s390_frame_info (&frame);
4101 /* Recompute regs_ever_live data for special registers. */
4102 regs_ever_live[BASE_REGISTER] = 0;
4103 regs_ever_live[RETURN_REGNUM] = 0;
4104 regs_ever_live[STACK_POINTER_REGNUM] = frame.frame_size > 0;
4106 /* If there is (possibly) any pool entry, we need to
4107 load the base register.
4108 ??? FIXME: this should be more precise. */
4109 if (get_pool_size ())
4110 regs_ever_live[BASE_REGISTER] = 1;
4112 /* In non-leaf functions, the prolog/epilog code relies
4113 on RETURN_REGNUM being saved in any case. */
4114 if (!current_function_is_leaf)
4115 regs_ever_live[RETURN_REGNUM] = 1;
4117 /* We need to save/restore the temporary register. */
4118 if (temp_regno >= 0)
4119 regs_ever_live[temp_regno] = 1;
4122 /* Find first and last gpr to be saved. */
4124 for (i = 6; i < 16; i++)
4125 if (regs_ever_live[i])
4128 for (j = 15; j > i; j--)
4129 if (regs_ever_live[j])
4134 /* Nothing to save/restore. */
4135 save_first = restore_first = -1;
4136 save_last = restore_last = -1;
4140 /* Save/restore from i to j. */
4141 save_first = restore_first = i;
4142 save_last = restore_last = j;
4145 /* Varargs functions need to save gprs 2 to 6. */
4146 if (current_function_stdarg)
4154 /* If all special registers are in fact used, there's nothing we
4155 can do, so no point in walking the insn list. */
4156 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4157 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4161 /* Search for prolog/epilog insns and replace them. */
4163 for (insn = get_insns (); insn; insn = next_insn)
4165 int first, last, off;
4166 rtx set, base, offset;
4168 next_insn = NEXT_INSN (insn);
4170 if (GET_CODE (insn) != INSN)
4172 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4175 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4177 set = XVECEXP (PATTERN (insn), 0, 0);
4178 first = REGNO (SET_SRC (set));
4179 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4180 offset = const0_rtx;
4181 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4182 off = INTVAL (offset) - first * UNITS_PER_WORD;
4184 if (GET_CODE (base) != REG || off < 0)
4186 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4188 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4191 if (save_first != -1)
4193 new_insn = save_gprs (base, off, save_first, save_last);
4194 new_insn = emit_insn_before (new_insn, insn);
4195 INSN_ADDRESSES_NEW (new_insn, -1);
4201 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4203 set = XVECEXP (PATTERN (insn), 0, 0);
4204 first = REGNO (SET_DEST (set));
4205 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4206 offset = const0_rtx;
4207 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4208 off = INTVAL (offset) - first * UNITS_PER_WORD;
4210 if (GET_CODE (base) != REG || off < 0)
4212 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4214 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4217 if (restore_first != -1)
4219 new_insn = restore_gprs (base, off, restore_first, restore_last);
4220 new_insn = emit_insn_before (new_insn, insn);
4221 INSN_ADDRESSES_NEW (new_insn, -1);
4229 /* Check whether any insn in the function makes use of the original
4230 value of RETURN_REG (e.g. for __builtin_return_address).
4231 If so, insert an insn reloading that value.
4233 Return true if any such insn was found. */
4236 s390_fixup_clobbered_return_reg (return_reg)
4239 bool replacement_done = 0;
4242 struct s390_frame frame;
4243 s390_frame_info (&frame);
4245 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4247 rtx reg, off, new_insn;
4249 if (GET_CODE (insn) != INSN)
4251 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4253 if (GET_CODE (PATTERN (insn)) == PARALLEL
4254 && store_multiple_operation (PATTERN (insn), VOIDmode))
4257 if (frame.frame_pointer_p)
4258 reg = hard_frame_pointer_rtx;
4260 reg = stack_pointer_rtx;
4262 off = GEN_INT (frame.frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4263 if (INTVAL (off) >= 4096)
4265 off = force_const_mem (Pmode, off);
4266 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4267 new_insn = emit_insn_before (new_insn, insn);
4268 INSN_ADDRESSES_NEW (new_insn, -1);
4272 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4273 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4274 new_insn = emit_insn_before (new_insn, insn);
4275 INSN_ADDRESSES_NEW (new_insn, -1);
4277 replacement_done = 1;
4280 return replacement_done;
4283 /* Perform machine-dependent processing. */
4286 s390_machine_dependent_reorg (first)
4287 rtx first ATTRIBUTE_UNUSED;
4289 bool fixed_up_clobbered_return_reg = 0;
4290 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4293 /* Make sure all splits have been performed; splits after
4294 machine_dependent_reorg might confuse insn length counts. */
4295 split_all_insns_noflow ();
4298 /* There are two problematic situations we need to correct:
4300 - the literal pool might be > 4096 bytes in size, so that
4301 some of its elements cannot be directly accessed
4303 - a branch target might be > 64K away from the branch, so that
4304 it is not possible to use a PC-relative instruction.
4306 To fix those, we split the single literal pool into multiple
4307 pool chunks, reloading the pool base register at various
4308 points throughout the function to ensure it always points to
4309 the pool chunk the following code expects, and / or replace
4310 PC-relative branches by absolute branches.
4312 However, the two problems are interdependent: splitting the
4313 literal pool can move a branch further away from its target,
4314 causing the 64K limit to overflow, and on the other hand,
4315 replacing a PC-relative branch by an absolute branch means
4316 we need to put the branch target address into the literal
4317 pool, possibly causing it to overflow.
4319 So, we loop trying to fix up both problems until we manage
4320 to satisfy both conditions at the same time. Note that the
4321 loop is guaranteed to terminate as every pass of the loop
4322 strictly decreases the total number of PC-relative branches
4323 in the function. (This is not completely true as there
4324 might be branch-over-pool insns introduced by chunkify_start.
4325 Those never need to be split however.) */
4329 struct constant_pool *pool_list;
4331 /* Try to chunkify the literal pool. */
4332 pool_list = s390_chunkify_start (temp_reg, &temp_used);
4334 /* Split out-of-range branches. If this has created new
4335 literal pool entries, cancel current chunk list and
4337 if (s390_split_branches (temp_reg, &temp_used))
4340 s390_chunkify_cancel (pool_list);
4345 /* Check whether we have clobbered a use of the return
4346 register (e.g. for __builtin_return_address). If so,
4347 add insns reloading the register where necessary. */
4348 if (temp_used && !fixed_up_clobbered_return_reg
4349 && s390_fixup_clobbered_return_reg (temp_reg))
4351 fixed_up_clobbered_return_reg = 1;
4353 /* The fixup insns might have caused a jump to overflow. */
4355 s390_chunkify_cancel (pool_list);
4360 /* If we made it up to here, both conditions are satisfied.
4361 Finish up pool chunkification if required. */
4363 s390_chunkify_finish (pool_list, temp_reg);
4368 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4372 /* Find first call clobbered register unsused in a function.
4373 This could be used as base register in a leaf function
4374 or for holding the return address before epilogue. */
4377 find_unused_clobbered_reg ()
4380 for (i = 0; i < 6; i++)
4381 if (!regs_ever_live[i])
4386 /* Fill FRAME with info about frame of current function. */
4389 s390_frame_info (frame)
4390 struct s390_frame *frame;
4392 char gprs_ever_live[16];
4394 HOST_WIDE_INT fsize = get_frame_size ();
4396 if (fsize > 0x7fff0000)
4397 fatal_error ("Total size of local variables exceeds architecture limit.");
4399 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
4400 frame->save_fprs_p = 0;
4402 for (i = 24; i < 32; i++)
4403 if (regs_ever_live[i])
4405 frame->save_fprs_p = 1;
4409 frame->frame_size = fsize + frame->save_fprs_p * 64;
4411 /* Does function need to setup frame and save area. */
4413 if (! current_function_is_leaf
4414 || frame->frame_size > 0
4415 || current_function_calls_alloca
4416 || current_function_stdarg)
4417 frame->frame_size += STARTING_FRAME_OFFSET;
4419 /* Frame pointer needed. */
4421 frame->frame_pointer_p = frame_pointer_needed;
4423 /* Find first and last gpr to be saved. Note that at this point,
4424 we assume the return register and the base register always
4425 need to be saved. This is done because the usage of these
4426 register might change even after the prolog was emitted.
4427 If it turns out later that we really don't need them, the
4428 prolog/epilog code is modified again. */
4430 for (i = 0; i < 16; i++)
4431 gprs_ever_live[i] = regs_ever_live[i];
4433 gprs_ever_live[BASE_REGISTER] = 1;
4434 gprs_ever_live[RETURN_REGNUM] = 1;
4435 gprs_ever_live[STACK_POINTER_REGNUM] = frame->frame_size > 0;
4437 for (i = 6; i < 16; i++)
4438 if (gprs_ever_live[i])
4441 for (j = 15; j > i; j--)
4442 if (gprs_ever_live[j])
4446 /* Save / Restore from gpr i to j. */
4447 frame->first_save_gpr = i;
4448 frame->first_restore_gpr = i;
4449 frame->last_save_gpr = j;
4451 /* Varargs functions need to save gprs 2 to 6. */
4452 if (current_function_stdarg)
4453 frame->first_save_gpr = 2;
4456 /* Return offset between argument pointer and frame pointer
4457 initially after prologue. */
4460 s390_arg_frame_offset ()
4462 struct s390_frame frame;
4464 /* Compute frame_info. */
4466 s390_frame_info (&frame);
4468 return frame.frame_size + STACK_POINTER_OFFSET;
4471 /* Emit insn to save fpr REGNUM at offset OFFSET relative
4472 to register BASE. Return generated insn. */
4475 save_fpr (base, offset, regnum)
4481 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4482 set_mem_alias_set (addr, s390_sr_alias_set);
4484 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
4487 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
4488 to register BASE. Return generated insn. */
4491 restore_fpr (base, offset, regnum)
4497 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4498 set_mem_alias_set (addr, s390_sr_alias_set);
4500 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4503 /* Generate insn to save registers FIRST to LAST into
4504 the register save area located at offset OFFSET
4505 relative to register BASE. */
4508 save_gprs (base, offset, first, last)
4514 rtx addr, insn, note;
4517 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4518 addr = gen_rtx_MEM (Pmode, addr);
4519 set_mem_alias_set (addr, s390_sr_alias_set);
4521 /* Special-case single register. */
4525 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
4527 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
4529 RTX_FRAME_RELATED_P (insn) = 1;
4534 insn = gen_store_multiple (addr,
4535 gen_rtx_REG (Pmode, first),
4536 GEN_INT (last - first + 1));
4539 /* We need to set the FRAME_RELATED flag on all SETs
4540 inside the store-multiple pattern.
4542 However, we must not emit DWARF records for registers 2..5
4543 if they are stored for use by variable arguments ...
4545 ??? Unfortunately, it is not enough to simply not the the
4546 FRAME_RELATED flags for those SETs, because the first SET
4547 of the PARALLEL is always treated as if it had the flag
4548 set, even if it does not. Therefore we emit a new pattern
4549 without those registers as REG_FRAME_RELATED_EXPR note. */
4553 rtx pat = PATTERN (insn);
4555 for (i = 0; i < XVECLEN (pat, 0); i++)
4556 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
4557 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
4559 RTX_FRAME_RELATED_P (insn) = 1;
4563 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
4564 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
4565 gen_rtx_REG (Pmode, 6),
4566 GEN_INT (last - 6 + 1));
4567 note = PATTERN (note);
4570 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4571 note, REG_NOTES (insn));
4573 for (i = 0; i < XVECLEN (note, 0); i++)
4574 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
4575 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
4577 RTX_FRAME_RELATED_P (insn) = 1;
4583 /* Generate insn to restore registers FIRST to LAST from
4584 the register save area located at offset OFFSET
4585 relative to register BASE. */
4588 restore_gprs (base, offset, first, last)
4596 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4597 addr = gen_rtx_MEM (Pmode, addr);
4598 set_mem_alias_set (addr, s390_sr_alias_set);
4600 /* Special-case single register. */
4604 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
4606 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
4611 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
4613 GEN_INT (last - first + 1));
4617 /* Expand the prologue into a bunch of separate insns. */
4620 s390_emit_prologue ()
4622 struct s390_frame frame;
4627 /* Compute frame_info. */
4629 s390_frame_info (&frame);
4631 /* Choose best register to use for temp use within prologue. */
4633 if (!current_function_is_leaf
4634 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
4635 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
4636 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4638 temp_reg = gen_rtx_REG (Pmode, 1);
4640 /* Save call saved gprs. */
4642 insn = save_gprs (stack_pointer_rtx, 0,
4643 frame.first_save_gpr, frame.last_save_gpr);
4646 /* Dump constant pool and set constant pool register (13). */
4648 insn = emit_insn (gen_lit ());
4650 /* Save fprs for variable args. */
4652 if (current_function_stdarg)
4654 /* Save fpr 0 and 2. */
4656 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
4657 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
4661 /* Save fpr 4 and 6. */
4663 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4664 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4668 /* Save fprs 4 and 6 if used (31 bit ABI). */
4672 /* Save fpr 4 and 6. */
4673 if (regs_ever_live[18])
4675 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4676 RTX_FRAME_RELATED_P (insn) = 1;
4678 if (regs_ever_live[19])
4680 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4681 RTX_FRAME_RELATED_P (insn) = 1;
4685 /* Decrement stack pointer. */
4687 if (frame.frame_size > 0)
4689 rtx frame_off = GEN_INT (-frame.frame_size);
4691 /* Save incoming stack pointer into temp reg. */
4693 if (TARGET_BACKCHAIN || frame.save_fprs_p)
4695 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
4698 /* Substract frame size from stack pointer. */
4700 frame_off = GEN_INT (-frame.frame_size);
4701 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
4702 frame_off = force_const_mem (Pmode, frame_off);
4704 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
4705 RTX_FRAME_RELATED_P (insn) = 1;
4707 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4708 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
4709 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4710 GEN_INT (-frame.frame_size))),
4713 /* Set backchain. */
4715 if (TARGET_BACKCHAIN)
4717 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4718 set_mem_alias_set (addr, s390_sr_alias_set);
4719 insn = emit_insn (gen_move_insn (addr, temp_reg));
4723 /* Save fprs 8 - 15 (64 bit ABI). */
4725 if (frame.save_fprs_p)
4727 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
4729 for (i = 24; i < 32; i++)
4730 if (regs_ever_live[i])
4732 rtx addr = plus_constant (stack_pointer_rtx,
4733 frame.frame_size - 64 + (i-24)*8);
4735 insn = save_fpr (temp_reg, (i-24)*8, i);
4736 RTX_FRAME_RELATED_P (insn) = 1;
4738 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4739 gen_rtx_SET (VOIDmode,
4740 gen_rtx_MEM (DFmode, addr),
4741 gen_rtx_REG (DFmode, i)),
4746 /* Set frame pointer, if needed. */
4748 if (frame.frame_pointer_p)
4750 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4751 RTX_FRAME_RELATED_P (insn) = 1;
4754 /* Set up got pointer, if needed. */
4756 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4758 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4759 SYMBOL_REF_FLAG (got_symbol) = 1;
4763 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
4766 /* It can happen that the GOT pointer isn't really needed ... */
4767 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4772 got_symbol = gen_rtx_UNSPEC (VOIDmode,
4773 gen_rtvec (1, got_symbol), 100);
4774 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
4775 got_symbol = force_const_mem (Pmode, got_symbol);
4776 insn = emit_move_insn (pic_offset_table_rtx,
4778 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4781 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
4782 gen_rtx_REG (Pmode, BASE_REGISTER)));
4783 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4789 /* Expand the epilogue into a bunch of separate insns. */
4792 s390_emit_epilogue ()
4794 struct s390_frame frame;
4795 rtx frame_pointer, return_reg;
4796 int area_bottom, area_top, offset;
4799 /* Compute frame_info. */
4801 s390_frame_info (&frame);
4803 /* Check whether to use frame or stack pointer for restore. */
4805 frame_pointer = frame.frame_pointer_p ?
4806 hard_frame_pointer_rtx : stack_pointer_rtx;
4808 /* Compute which parts of the save area we need to access. */
4810 if (frame.first_restore_gpr != -1)
4812 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
4813 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
4817 area_bottom = INT_MAX;
4823 if (frame.save_fprs_p)
4825 if (area_bottom > -64)
4833 if (regs_ever_live[18])
4835 if (area_bottom > STACK_POINTER_OFFSET - 16)
4836 area_bottom = STACK_POINTER_OFFSET - 16;
4837 if (area_top < STACK_POINTER_OFFSET - 8)
4838 area_top = STACK_POINTER_OFFSET - 8;
4840 if (regs_ever_live[19])
4842 if (area_bottom > STACK_POINTER_OFFSET - 8)
4843 area_bottom = STACK_POINTER_OFFSET - 8;
4844 if (area_top < STACK_POINTER_OFFSET)
4845 area_top = STACK_POINTER_OFFSET;
4849 /* Check whether we can access the register save area.
4850 If not, increment the frame pointer as required. */
4852 if (area_top <= area_bottom)
4854 /* Nothing to restore. */
4856 else if (frame.frame_size + area_bottom >= 0
4857 && frame.frame_size + area_top <= 4096)
4859 /* Area is in range. */
4860 offset = frame.frame_size;
4864 rtx insn, frame_off;
4866 offset = area_bottom < 0 ? -area_bottom : 0;
4867 frame_off = GEN_INT (frame.frame_size - offset);
4869 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
4870 frame_off = force_const_mem (Pmode, frame_off);
4872 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
4875 /* Restore call saved fprs. */
4881 if (frame.save_fprs_p)
4882 for (i = 24; i < 32; i++)
4883 if (regs_ever_live[i] && !global_regs[i])
4884 restore_fpr (frame_pointer,
4885 offset - 64 + (i-24) * 8, i);
4889 if (regs_ever_live[18] && !global_regs[18])
4890 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
4891 if (regs_ever_live[19] && !global_regs[19])
4892 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
4895 /* Return register. */
4897 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4899 /* Restore call saved gprs. */
4901 if (frame.first_restore_gpr != -1)
4906 /* Check for global register and save them
4907 to stack location from where they get restored. */
4909 for (i = frame.first_restore_gpr;
4910 i <= frame.last_save_gpr;
4913 /* These registers are special and need to be
4914 restored in any case. */
4915 if (i == STACK_POINTER_REGNUM
4916 || i == RETURN_REGNUM
4917 || i == BASE_REGISTER
4918 || (flag_pic && i == PIC_OFFSET_TABLE_REGNUM))
4923 addr = plus_constant (frame_pointer,
4924 offset + i * UNITS_PER_WORD);
4925 addr = gen_rtx_MEM (Pmode, addr);
4926 set_mem_alias_set (addr, s390_sr_alias_set);
4927 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
4931 /* Fetch return address from stack before load multiple,
4932 this will do good for scheduling. */
4934 if (!current_function_is_leaf)
4936 int return_regnum = find_unused_clobbered_reg();
4939 return_reg = gen_rtx_REG (Pmode, return_regnum);
4941 addr = plus_constant (frame_pointer,
4942 offset + RETURN_REGNUM * UNITS_PER_WORD);
4943 addr = gen_rtx_MEM (Pmode, addr);
4944 set_mem_alias_set (addr, s390_sr_alias_set);
4945 emit_move_insn (return_reg, addr);
4948 /* ??? As references to the base register are not made
4949 explicit in insn RTX code, we have to add a barrier here
4950 to prevent incorrect scheduling. */
4952 emit_insn (gen_blockage());
4954 insn = restore_gprs (frame_pointer, offset,
4955 frame.first_restore_gpr, frame.last_save_gpr);
4959 /* Return to caller. */
4961 p = rtvec_alloc (2);
4963 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
4964 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
4965 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
4969 /* Return the size in bytes of a function argument of
4970 type TYPE and/or mode MODE. At least one of TYPE or
4971 MODE must be specified. */
4974 s390_function_arg_size (mode, type)
4975 enum machine_mode mode;
4979 return int_size_in_bytes (type);
4981 /* No type info available for some library calls ... */
4982 if (mode != BLKmode)
4983 return GET_MODE_SIZE (mode);
4985 /* If we have neither type nor mode, abort */
4989 /* Return 1 if a function argument of type TYPE and mode MODE
4990 is to be passed by reference. The ABI specifies that only
4991 structures of size 1, 2, 4, or 8 bytes are passed by value,
4992 all other structures (and complex numbers) are passed by
4996 s390_function_arg_pass_by_reference (mode, type)
4997 enum machine_mode mode;
5000 int size = s390_function_arg_size (mode, type);
5004 if (AGGREGATE_TYPE_P (type) &&
5005 size != 1 && size != 2 && size != 4 && size != 8)
5008 if (TREE_CODE (type) == COMPLEX_TYPE)
5015 /* Update the data in CUM to advance over an argument of mode MODE and
5016 data type TYPE. (TYPE is null for libcalls where that information
5017 may not be available.). The boolean NAMED specifies whether the
5018 argument is a named argument (as opposed to an unnamed argument
5019 matching an ellipsis). */
5022 s390_function_arg_advance (cum, mode, type, named)
5023 CUMULATIVE_ARGS *cum;
5024 enum machine_mode mode;
5026 int named ATTRIBUTE_UNUSED;
5028 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5032 else if (s390_function_arg_pass_by_reference (mode, type))
5038 int size = s390_function_arg_size (mode, type);
5039 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5043 /* Define where to put the arguments to a function.
5044 Value is zero to push the argument on the stack,
5045 or a hard register in which to store the argument.
5047 MODE is the argument's machine mode.
5048 TYPE is the data type of the argument (as a tree).
5049 This is null for libcalls where that information may
5051 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5052 the preceding args and about the function being called.
5053 NAMED is nonzero if this argument is a named parameter
5054 (otherwise it is an extra parameter matching an ellipsis).
5056 On S/390, we use general purpose registers 2 through 6 to
5057 pass integer, pointer, and certain structure arguments, and
5058 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5059 to pass floating point arguments. All remaining arguments
5060 are pushed to the stack. */
5063 s390_function_arg (cum, mode, type, named)
5064 CUMULATIVE_ARGS *cum;
5065 enum machine_mode mode;
5067 int named ATTRIBUTE_UNUSED;
5069 if (s390_function_arg_pass_by_reference (mode, type))
5072 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5074 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5077 return gen_rtx (REG, mode, cum->fprs + 16);
5081 int size = s390_function_arg_size (mode, type);
5082 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5084 if (cum->gprs + n_gprs > 5)
5087 return gen_rtx (REG, mode, cum->gprs + 2);
5092 /* Create and return the va_list datatype.
5094 On S/390, va_list is an array type equivalent to
5096 typedef struct __va_list_tag
5100 void *__overflow_arg_area;
5101 void *__reg_save_area;
5105 where __gpr and __fpr hold the number of general purpose
5106 or floating point arguments used up to now, respectively,
5107 __overflow_arg_area points to the stack location of the
5108 next argument passed on the stack, and __reg_save_area
5109 always points to the start of the register area in the
5110 call frame of the current function. The function prologue
5111 saves all registers used for argument passing into this
5112 area if the function uses variable arguments. */
5115 s390_build_va_list ()
5117 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5119 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5122 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5124 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5125 long_integer_type_node);
5126 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5127 long_integer_type_node);
5128 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5130 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5133 DECL_FIELD_CONTEXT (f_gpr) = record;
5134 DECL_FIELD_CONTEXT (f_fpr) = record;
5135 DECL_FIELD_CONTEXT (f_ovf) = record;
5136 DECL_FIELD_CONTEXT (f_sav) = record;
5138 TREE_CHAIN (record) = type_decl;
5139 TYPE_NAME (record) = type_decl;
5140 TYPE_FIELDS (record) = f_gpr;
5141 TREE_CHAIN (f_gpr) = f_fpr;
5142 TREE_CHAIN (f_fpr) = f_ovf;
5143 TREE_CHAIN (f_ovf) = f_sav;
5145 layout_type (record);
5147 /* The correct type is an array type of one element. */
5148 return build_array_type (record, build_index_type (size_zero_node));
5151 /* Implement va_start by filling the va_list structure VALIST.
5152 STDARG_P is always true, and ignored.
5153 NEXTARG points to the first anonymous stack argument.
5155 The following global variables are used to initialize
5156 the va_list structure:
5158 current_function_args_info:
5159 holds number of gprs and fprs used for named arguments.
5160 current_function_arg_offset_rtx:
5161 holds the offset of the first anonymous stack argument
5162 (relative to the virtual arg pointer). */
5165 s390_va_start (valist, nextarg)
5167 rtx nextarg ATTRIBUTE_UNUSED;
5169 HOST_WIDE_INT n_gpr, n_fpr;
5171 tree f_gpr, f_fpr, f_ovf, f_sav;
5172 tree gpr, fpr, ovf, sav, t;
5174 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5175 f_fpr = TREE_CHAIN (f_gpr);
5176 f_ovf = TREE_CHAIN (f_fpr);
5177 f_sav = TREE_CHAIN (f_ovf);
5179 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5180 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5181 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5182 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5183 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5185 /* Count number of gp and fp argument registers used. */
5187 n_gpr = current_function_args_info.gprs;
5188 n_fpr = current_function_args_info.fprs;
5190 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5191 TREE_SIDE_EFFECTS (t) = 1;
5192 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5194 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5195 TREE_SIDE_EFFECTS (t) = 1;
5196 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5198 /* Find the overflow area. */
5199 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5201 off = INTVAL (current_function_arg_offset_rtx);
5202 off = off < 0 ? 0 : off;
5203 if (TARGET_DEBUG_ARG)
5204 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5205 (int)n_gpr, (int)n_fpr, off);
5207 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5209 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5210 TREE_SIDE_EFFECTS (t) = 1;
5211 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5213 /* Find the register save area. */
5214 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5215 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5216 build_int_2 (-STACK_POINTER_OFFSET, -1));
5217 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5218 TREE_SIDE_EFFECTS (t) = 1;
5219 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5222 /* Implement va_arg by updating the va_list structure
5223 VALIST as required to retrieve an argument of type
5224 TYPE, and returning that argument.
5226 Generates code equivalent to:
5228 if (integral value) {
5229 if (size <= 4 && args.gpr < 5 ||
5230 size > 4 && args.gpr < 4 )
5231 ret = args.reg_save_area[args.gpr+8]
5233 ret = *args.overflow_arg_area++;
5234 } else if (float value) {
5236 ret = args.reg_save_area[args.fpr+64]
5238 ret = *args.overflow_arg_area++;
5239 } else if (aggregate value) {
5241 ret = *args.reg_save_area[args.gpr]
5243 ret = **args.overflow_arg_area++;
5247 s390_va_arg (valist, type)
5251 tree f_gpr, f_fpr, f_ovf, f_sav;
5252 tree gpr, fpr, ovf, sav, reg, t, u;
5253 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5254 rtx lab_false, lab_over, addr_rtx, r;
5256 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5257 f_fpr = TREE_CHAIN (f_gpr);
5258 f_ovf = TREE_CHAIN (f_fpr);
5259 f_sav = TREE_CHAIN (f_ovf);
5261 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5262 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5263 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5264 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5265 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5267 size = int_size_in_bytes (type);
5269 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5271 if (TARGET_DEBUG_ARG)
5273 fprintf (stderr, "va_arg: aggregate type");
5277 /* Aggregates are passed by reference. */
5281 sav_ofs = 2 * UNITS_PER_WORD;
5282 sav_scale = UNITS_PER_WORD;
5283 size = UNITS_PER_WORD;
5286 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5288 if (TARGET_DEBUG_ARG)
5290 fprintf (stderr, "va_arg: float type");
5294 /* FP args go in FP registers, if present. */
5298 sav_ofs = 16 * UNITS_PER_WORD;
5300 /* TARGET_64BIT has up to 4 parameter in fprs */
5301 max_reg = TARGET_64BIT ? 3 : 1;
5305 if (TARGET_DEBUG_ARG)
5307 fprintf (stderr, "va_arg: other type");
5311 /* Otherwise into GP registers. */
5314 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5315 sav_ofs = 2 * UNITS_PER_WORD;
5317 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
5318 TYPE_MODE (type) == HImode ? 6 :
5319 TYPE_MODE (type) == QImode ? 7 : 0;
5321 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
5322 TYPE_MODE (type) == QImode ? 3 : 0;
5324 sav_scale = UNITS_PER_WORD;
5331 /* Pull the value out of the saved registers ... */
5333 lab_false = gen_label_rtx ();
5334 lab_over = gen_label_rtx ();
5335 addr_rtx = gen_reg_rtx (Pmode);
5337 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
5339 GT, const1_rtx, Pmode, 0, lab_false);
5342 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5346 u = build (MULT_EXPR, long_integer_type_node,
5347 reg, build_int_2 (sav_scale, 0));
5348 TREE_SIDE_EFFECTS (u) = 1;
5350 t = build (PLUS_EXPR, ptr_type_node, t, u);
5351 TREE_SIDE_EFFECTS (t) = 1;
5353 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5355 emit_move_insn (addr_rtx, r);
5358 emit_jump_insn (gen_jump (lab_over));
5360 emit_label (lab_false);
5362 /* ... Otherwise out of the overflow area. */
5364 t = save_expr (ovf);
5367 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
5368 if (size < UNITS_PER_WORD)
5370 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
5371 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5372 TREE_SIDE_EFFECTS (t) = 1;
5373 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5375 t = save_expr (ovf);
5378 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5380 emit_move_insn (addr_rtx, r);
5382 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5383 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5384 TREE_SIDE_EFFECTS (t) = 1;
5385 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5387 emit_label (lab_over);
5389 /* If less than max_regs a registers are retrieved out
5390 of register save area, increment. */
5392 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
5393 build_int_2 (n_reg, 0));
5394 TREE_SIDE_EFFECTS (u) = 1;
5395 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5399 r = gen_rtx_MEM (Pmode, addr_rtx);
5400 set_mem_alias_set (r, get_varargs_alias_set ());
5401 emit_move_insn (addr_rtx, r);
5409 /* Output assembly code for the trampoline template to
5412 On S/390, we use gpr 1 internally in the trampoline code;
5413 gpr 0 is used to hold the static chain. */
5416 s390_trampoline_template (file)
5421 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
5422 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
5423 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
5424 fprintf (file, "br\t%s\n", reg_names[1]);
5425 fprintf (file, "0:\t.quad\t0\n");
5426 fprintf (file, ".quad\t0\n");
5430 fprintf (file, "basr\t%s,0\n", reg_names[1]);
5431 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
5432 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
5433 fprintf (file, "br\t%s\n", reg_names[1]);
5434 fprintf (file, ".long\t0\n");
5435 fprintf (file, ".long\t0\n");
5439 /* Emit RTL insns to initialize the variable parts of a trampoline.
5440 FNADDR is an RTX for the address of the function's pure code.
5441 CXT is an RTX for the static chain value for the function. */
5444 s390_initialize_trampoline (addr, fnaddr, cxt)
5449 emit_move_insn (gen_rtx
5451 memory_address (Pmode,
5452 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
5453 emit_move_insn (gen_rtx
5455 memory_address (Pmode,
5456 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
5459 /* Return rtx for 64-bit constant formed from the 32-bit subwords
5460 LOW and HIGH, independent of the host word size. */
5463 s390_gen_rtx_const_DI (high, low)
5467 #if HOST_BITS_PER_WIDE_INT >= 64
5469 val = (HOST_WIDE_INT)high;
5471 val |= (HOST_WIDE_INT)low;
5473 return GEN_INT (val);
5475 #if HOST_BITS_PER_WIDE_INT >= 32
5476 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
5483 /* Output assembler code to FILE to increment profiler label # LABELNO
5484 for profiling a function entry. */
5487 s390_function_profiler (file, labelno)
5494 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
5496 fprintf (file, "# function profiler \n");
5498 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
5499 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
5500 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
5502 op[2] = gen_rtx_REG (Pmode, 1);
5503 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
5504 SYMBOL_REF_FLAG (op[3]) = 1;
5506 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
5509 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
5510 op[4] = gen_rtx_CONST (Pmode, op[4]);
5515 output_asm_insn ("stg\t%0,%1", op);
5516 output_asm_insn ("larl\t%2,%3", op);
5517 output_asm_insn ("brasl\t%0,%4", op);
5518 output_asm_insn ("lg\t%0,%1", op);
5522 op[6] = gen_label_rtx ();
5524 output_asm_insn ("st\t%0,%1", op);
5525 output_asm_insn ("bras\t%2,%l6", op);
5526 output_asm_insn (".long\t%4", op);
5527 output_asm_insn (".long\t%3", op);
5528 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5529 output_asm_insn ("l\t%0,0(%2)", op);
5530 output_asm_insn ("l\t%2,4(%2)", op);
5531 output_asm_insn ("basr\t%0,%0", op);
5532 output_asm_insn ("l\t%0,%1", op);
5536 op[5] = gen_label_rtx ();
5537 op[6] = gen_label_rtx ();
5539 output_asm_insn ("st\t%0,%1", op);
5540 output_asm_insn ("bras\t%2,%l6", op);
5541 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5542 output_asm_insn (".long\t%4-%l5", op);
5543 output_asm_insn (".long\t%3-%l5", op);
5544 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5545 output_asm_insn ("lr\t%0,%2", op);
5546 output_asm_insn ("a\t%0,0(%2)", op);
5547 output_asm_insn ("a\t%2,4(%2)", op);
5548 output_asm_insn ("basr\t%0,%0", op);
5549 output_asm_insn ("l\t%0,%1", op);
5553 /* Select section for constant in constant pool. In 32-bit mode,
5554 constants go in the function section; in 64-bit mode in .rodata. */
5557 s390_select_rtx_section (mode, x, align)
5558 enum machine_mode mode ATTRIBUTE_UNUSED;
5559 rtx x ATTRIBUTE_UNUSED;
5560 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
5563 readonly_data_section ();
5565 function_section (current_function_decl);
5568 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
5569 may access it directly in the GOT. */
5572 s390_encode_section_info (decl, first)
5574 int first ATTRIBUTE_UNUSED;
5578 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5579 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
5581 if (GET_CODE (rtl) == MEM)
5583 SYMBOL_REF_FLAG (XEXP (rtl, 0))
5584 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5585 || ! TREE_PUBLIC (decl));
5591 s390_output_mi_thunk (file, thunk, delta, function)
5593 tree thunk ATTRIBUTE_UNUSED;
5594 HOST_WIDE_INT delta;
5601 fprintf (file, "\tlarl 1,0f\n");
5602 fprintf (file, "\tagf %d,0(1)\n",
5603 aggregate_value_p (TREE_TYPE
5604 (TREE_TYPE (function))) ? 3 :2 );
5605 fprintf (file, "\tlarl 1,");
5606 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
5607 fprintf (file, "@GOTENT\n");
5608 fprintf (file, "\tlg 1,0(1)\n");
5609 fprintf (file, "\tbr 1\n");
5610 fprintf (file, "0:\t.long ");
5611 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (delta));
5612 fprintf (file, "\n");
5616 fprintf (file, "\tlarl 1,0f\n");
5617 fprintf (file, "\tagf %d,0(1)\n",
5618 aggregate_value_p (TREE_TYPE
5619 (TREE_TYPE (function))) ? 3 :2 );
5620 fprintf (file, "\tjg ");
5621 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
5622 fprintf (file, "\n");
5623 fprintf (file, "0:\t.long ");
5624 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (delta));
5625 fprintf (file, "\n");
5632 fprintf (file, "\tbras 1,0f\n");
5633 fprintf (file, "\t.long _GLOBAL_OFFSET_TABLE_-.\n");
5634 fprintf (file, "\t.long ");
5635 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
5636 fprintf (file, "@GOT\n");
5637 fprintf (file, "\t.long ");
5638 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (delta));
5639 fprintf (file, "\n");
5640 fprintf (file, "0:\tal %d,8(1)\n",
5641 aggregate_value_p (TREE_TYPE
5642 (TREE_TYPE (function))) ? 3 : 2 );
5643 fprintf (file, "\tl 0,4(1)\n");
5644 fprintf (file, "\tal 1,0(1)\n");
5645 fprintf (file, "\talr 1,0\n");
5646 fprintf (file, "\tl 1,0(1)\n");
5647 fprintf (file, "\tbr 1\n");
5649 fprintf (file, "\tbras 1,0f\n");
5650 fprintf (file, "\t.long ");
5651 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
5652 fprintf (file, "-.\n");
5653 fprintf (file, "\t.long ");
5654 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (delta));
5655 fprintf (file, "\n");
5656 fprintf (file, "0:\tal %d,4(1)\n",
5657 aggregate_value_p (TREE_TYPE
5658 (TREE_TYPE (function))) ? 3 : 2 );
5659 fprintf (file, "\tal 1,0(1)\n");
5660 fprintf (file, "\tbr 1\n");