1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
42 #include "basic-block.h"
43 #include "integrate.h"
46 #include "target-def.h"
48 #include "langhooks.h"
51 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
52 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
53 static int s390_adjust_priority PARAMS ((rtx, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
58 #undef TARGET_ASM_ALIGNED_HI_OP
59 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
60 #undef TARGET_ASM_ALIGNED_DI_OP
61 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
62 #undef TARGET_ASM_INTEGER
63 #define TARGET_ASM_INTEGER s390_assemble_integer
65 #undef TARGET_ASM_OPEN_PAREN
66 #define TARGET_ASM_OPEN_PAREN ""
68 #undef TARGET_ASM_CLOSE_PAREN
69 #define TARGET_ASM_CLOSE_PAREN ""
71 #undef TARGET_ASM_SELECT_RTX_SECTION
72 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
74 #undef TARGET_SCHED_ADJUST_COST
75 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
77 #undef TARGET_SCHED_ADJUST_PRIORITY
78 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
80 #undef TARGET_ENCODE_SECTION_INFO
81 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
83 struct gcc_target targetm = TARGET_INITIALIZER;
85 extern int reload_completed;
87 /* The alias set for prologue/epilogue register save/restore. */
88 static int s390_sr_alias_set = 0;
90 /* Save information from a "cmpxx" operation until the branch or scc is
92 rtx s390_compare_op0, s390_compare_op1;
94 /* Structure used to hold the components of a S/390 memory
95 address. A legitimate address on S/390 is of the general
97 base + index + displacement
98 where any of the components is optional.
100 base and index are registers of the class ADDR_REGS,
101 displacement is an unsigned 12-bit immediate constant. */
111 /* Structure containing information for prologue and epilogue. */
118 int first_restore_gpr;
120 int arg_frame_offset;
122 HOST_WIDE_INT frame_size;
125 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
126 static int s390_branch_condition_mask PARAMS ((rtx));
127 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
128 static int check_mode PARAMS ((rtx, enum machine_mode *));
129 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
130 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
131 static int reg_used_in_mem_p PARAMS ((int, rtx));
132 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
133 static int s390_split_branches PARAMS ((rtx, bool *));
134 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
135 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
136 static int find_base_register_in_addr PARAMS ((struct s390_address *));
137 static bool find_base_register_ref PARAMS ((rtx));
138 static void replace_base_register_ref PARAMS ((rtx *, rtx));
139 static void s390_optimize_prolog PARAMS ((int));
140 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
141 static int find_unused_clobbered_reg PARAMS ((void));
142 static void s390_frame_info PARAMS ((struct s390_frame *));
143 static rtx save_fpr PARAMS ((rtx, int, int));
144 static rtx restore_fpr PARAMS ((rtx, int, int));
145 static rtx save_gprs PARAMS ((rtx, int, int, int));
146 static rtx restore_gprs PARAMS ((rtx, int, int, int));
147 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
150 /* Return true if SET either doesn't set the CC register, or else
151 the source and destination have matching CC modes and that
152 CC mode is at least as constrained as REQ_MODE. */
155 s390_match_ccmode_set (set, req_mode)
157 enum machine_mode req_mode;
159 enum machine_mode set_mode;
161 if (GET_CODE (set) != SET)
164 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
167 set_mode = GET_MODE (SET_DEST (set));
180 if (req_mode != set_mode)
185 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
186 && req_mode != CCSRmode && req_mode != CCURmode)
192 if (req_mode != CCAmode)
200 return (GET_MODE (SET_SRC (set)) == set_mode);
203 /* Return true if every SET in INSN that sets the CC register
204 has source and destination with matching CC modes and that
205 CC mode is at least as constrained as REQ_MODE.
206 If REQ_MODE is VOIDmode, always return false. */
209 s390_match_ccmode (insn, req_mode)
211 enum machine_mode req_mode;
215 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
216 if (req_mode == VOIDmode)
219 if (GET_CODE (PATTERN (insn)) == SET)
220 return s390_match_ccmode_set (PATTERN (insn), req_mode);
222 if (GET_CODE (PATTERN (insn)) == PARALLEL)
223 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
225 rtx set = XVECEXP (PATTERN (insn), 0, i);
226 if (GET_CODE (set) == SET)
227 if (!s390_match_ccmode_set (set, req_mode))
234 /* If a test-under-mask instruction can be used to implement
235 (compare (and ... OP1) OP2), return the CC mode required
236 to do that. Otherwise, return VOIDmode.
237 MIXED is true if the instruction can distinguish between
238 CC1 and CC2 for mixed selected bits (TMxx), it is false
239 if the instruction cannot (TM). */
242 s390_tm_ccmode (op1, op2, mixed)
249 /* ??? Fixme: should work on CONST_DOUBLE as well. */
250 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
253 /* Selected bits all zero: CC0. */
254 if (INTVAL (op2) == 0)
257 /* Selected bits all one: CC3. */
258 if (INTVAL (op2) == INTVAL (op1))
261 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
264 bit1 = exact_log2 (INTVAL (op2));
265 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
266 if (bit0 != -1 && bit1 != -1)
267 return bit0 > bit1 ? CCT1mode : CCT2mode;
273 /* Given a comparison code OP (EQ, NE, etc.) and the operands
274 OP0 and OP1 of a COMPARE, return the mode to be used for the
278 s390_select_ccmode (code, op0, op1)
287 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
288 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
290 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
291 || GET_CODE (op1) == NEG)
294 if (GET_CODE (op0) == AND)
296 /* Check whether we can potentially do it via TM. */
297 enum machine_mode ccmode;
298 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
299 if (ccmode != VOIDmode)
301 /* Relax CCTmode to CCZmode to allow fall-back to AND
302 if that turns out to be beneficial. */
303 return ccmode == CCTmode ? CCZmode : ccmode;
307 if (register_operand (op0, HImode)
308 && GET_CODE (op1) == CONST_INT
309 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
311 if (register_operand (op0, QImode)
312 && GET_CODE (op1) == CONST_INT
313 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
322 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
323 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
325 if (INTVAL (XEXP((op0), 1)) < 0)
338 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
339 && GET_CODE (op1) != CONST_INT)
345 if (GET_CODE (op0) == PLUS)
348 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
349 && GET_CODE (op1) != CONST_INT)
355 if (GET_CODE (op0) == MINUS)
358 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
359 && GET_CODE (op1) != CONST_INT)
368 /* Return branch condition mask to implement a branch
369 specified by CODE. */
372 s390_branch_condition_mask (code)
375 const int CC0 = 1 << 3;
376 const int CC1 = 1 << 2;
377 const int CC2 = 1 << 1;
378 const int CC3 = 1 << 0;
380 if (GET_CODE (XEXP (code, 0)) != REG
381 || REGNO (XEXP (code, 0)) != CC_REGNUM
382 || XEXP (code, 1) != const0_rtx)
385 switch (GET_MODE (XEXP (code, 0)))
388 switch (GET_CODE (code))
391 case NE: return CC1 | CC2 | CC3;
398 switch (GET_CODE (code))
401 case NE: return CC0 | CC2 | CC3;
408 switch (GET_CODE (code))
411 case NE: return CC0 | CC1 | CC3;
418 switch (GET_CODE (code))
421 case NE: return CC0 | CC1 | CC2;
428 switch (GET_CODE (code))
430 case EQ: return CC0 | CC2;
431 case NE: return CC1 | CC3;
438 switch (GET_CODE (code))
440 case LTU: return CC2 | CC3; /* carry */
441 case GEU: return CC0 | CC1; /* no carry */
448 switch (GET_CODE (code))
450 case GTU: return CC0 | CC1; /* borrow */
451 case LEU: return CC2 | CC3; /* no borrow */
458 switch (GET_CODE (code))
461 case NE: return CC1 | CC2 | CC3;
462 case LTU: return CC1;
463 case GTU: return CC2;
464 case LEU: return CC0 | CC1;
465 case GEU: return CC0 | CC2;
472 switch (GET_CODE (code))
475 case NE: return CC2 | CC1 | CC3;
476 case LTU: return CC2;
477 case GTU: return CC1;
478 case LEU: return CC0 | CC2;
479 case GEU: return CC0 | CC1;
486 switch (GET_CODE (code))
489 case NE: return CC1 | CC2 | CC3;
490 case LT: return CC1 | CC3;
492 case LE: return CC0 | CC1 | CC3;
493 case GE: return CC0 | CC2;
500 switch (GET_CODE (code))
503 case NE: return CC1 | CC2 | CC3;
505 case GT: return CC2 | CC3;
506 case LE: return CC0 | CC1;
507 case GE: return CC0 | CC2 | CC3;
514 switch (GET_CODE (code))
517 case NE: return CC1 | CC2 | CC3;
520 case LE: return CC0 | CC1;
521 case GE: return CC0 | CC2;
522 case UNORDERED: return CC3;
523 case ORDERED: return CC0 | CC1 | CC2;
524 case UNEQ: return CC0 | CC3;
525 case UNLT: return CC1 | CC3;
526 case UNGT: return CC2 | CC3;
527 case UNLE: return CC0 | CC1 | CC3;
528 case UNGE: return CC0 | CC2 | CC3;
529 case LTGT: return CC1 | CC2;
536 switch (GET_CODE (code))
539 case NE: return CC2 | CC1 | CC3;
542 case LE: return CC0 | CC2;
543 case GE: return CC0 | CC1;
544 case UNORDERED: return CC3;
545 case ORDERED: return CC0 | CC2 | CC1;
546 case UNEQ: return CC0 | CC3;
547 case UNLT: return CC2 | CC3;
548 case UNGT: return CC1 | CC3;
549 case UNLE: return CC0 | CC2 | CC3;
550 case UNGE: return CC0 | CC1 | CC3;
551 case LTGT: return CC2 | CC1;
562 /* If INV is false, return assembler mnemonic string to implement
563 a branch specified by CODE. If INV is true, return mnemonic
564 for the corresponding inverted branch. */
567 s390_branch_condition_mnemonic (code, inv)
571 static const char *const mnemonic[16] =
573 NULL, "o", "h", "nle",
574 "l", "nhe", "lh", "ne",
575 "e", "nlh", "he", "nl",
576 "le", "nh", "no", NULL
579 int mask = s390_branch_condition_mask (code);
584 if (mask < 1 || mask > 14)
587 return mnemonic[mask];
590 /* If OP is an integer constant of mode MODE with exactly one
591 HImode subpart unequal to DEF, return the number of that
592 subpart. As a special case, all HImode subparts of OP are
593 equal to DEF, return zero. Otherwise, return -1. */
596 s390_single_hi (op, mode, def)
598 enum machine_mode mode;
601 if (GET_CODE (op) == CONST_INT)
603 unsigned HOST_WIDE_INT value;
604 int n_parts = GET_MODE_SIZE (mode) / 2;
607 for (i = 0; i < n_parts; i++)
610 value = (unsigned HOST_WIDE_INT) INTVAL (op);
614 if ((value & 0xffff) != (unsigned)(def & 0xffff))
623 return part == -1 ? 0 : (n_parts - 1 - part);
626 else if (GET_CODE (op) == CONST_DOUBLE
627 && GET_MODE (op) == VOIDmode)
629 unsigned HOST_WIDE_INT value;
630 int n_parts = GET_MODE_SIZE (mode) / 2;
633 for (i = 0; i < n_parts; i++)
636 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
637 else if (i == HOST_BITS_PER_WIDE_INT / 16)
638 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
642 if ((value & 0xffff) != (unsigned)(def & 0xffff))
651 return part == -1 ? 0 : (n_parts - 1 - part);
657 /* Extract the HImode part number PART from integer
658 constant OP of mode MODE. */
661 s390_extract_hi (op, mode, part)
663 enum machine_mode mode;
666 int n_parts = GET_MODE_SIZE (mode) / 2;
667 if (part < 0 || part >= n_parts)
670 part = n_parts - 1 - part;
672 if (GET_CODE (op) == CONST_INT)
674 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
675 return ((value >> (16 * part)) & 0xffff);
677 else if (GET_CODE (op) == CONST_DOUBLE
678 && GET_MODE (op) == VOIDmode)
680 unsigned HOST_WIDE_INT value;
681 if (part < HOST_BITS_PER_WIDE_INT / 16)
682 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
684 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
685 part -= HOST_BITS_PER_WIDE_INT / 16;
687 return ((value >> (16 * part)) & 0xffff);
693 /* If OP is an integer constant of mode MODE with exactly one
694 QImode subpart unequal to DEF, return the number of that
695 subpart. As a special case, all QImode subparts of OP are
696 equal to DEF, return zero. Otherwise, return -1. */
699 s390_single_qi (op, mode, def)
701 enum machine_mode mode;
704 if (GET_CODE (op) == CONST_INT)
706 unsigned HOST_WIDE_INT value;
707 int n_parts = GET_MODE_SIZE (mode);
710 for (i = 0; i < n_parts; i++)
713 value = (unsigned HOST_WIDE_INT) INTVAL (op);
717 if ((value & 0xff) != (unsigned)(def & 0xff))
726 return part == -1 ? 0 : (n_parts - 1 - part);
729 else if (GET_CODE (op) == CONST_DOUBLE
730 && GET_MODE (op) == VOIDmode)
732 unsigned HOST_WIDE_INT value;
733 int n_parts = GET_MODE_SIZE (mode);
736 for (i = 0; i < n_parts; i++)
739 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
740 else if (i == HOST_BITS_PER_WIDE_INT / 8)
741 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
745 if ((value & 0xff) != (unsigned)(def & 0xff))
754 return part == -1 ? 0 : (n_parts - 1 - part);
760 /* Extract the QImode part number PART from integer
761 constant OP of mode MODE. */
764 s390_extract_qi (op, mode, part)
766 enum machine_mode mode;
769 int n_parts = GET_MODE_SIZE (mode);
770 if (part < 0 || part >= n_parts)
773 part = n_parts - 1 - part;
775 if (GET_CODE (op) == CONST_INT)
777 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
778 return ((value >> (8 * part)) & 0xff);
780 else if (GET_CODE (op) == CONST_DOUBLE
781 && GET_MODE (op) == VOIDmode)
783 unsigned HOST_WIDE_INT value;
784 if (part < HOST_BITS_PER_WIDE_INT / 8)
785 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
787 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
788 part -= HOST_BITS_PER_WIDE_INT / 8;
790 return ((value >> (8 * part)) & 0xff);
797 /* Change optimizations to be performed, depending on the
800 LEVEL is the optimization level specified; 2 if `-O2' is
801 specified, 1 if `-O' is specified, and 0 if neither is specified.
803 SIZE is nonzero if `-Os' is specified and zero otherwise. */
806 optimization_options (level, size)
807 int level ATTRIBUTE_UNUSED;
808 int size ATTRIBUTE_UNUSED;
815 /* Acquire a unique set number for our register saves and restores. */
816 s390_sr_alias_set = new_alias_set ();
820 /* Map for smallest class containing reg regno. */
822 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
823 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
824 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
825 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
826 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
827 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
828 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
829 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
830 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
831 ADDR_REGS, NO_REGS, ADDR_REGS
835 /* Return true if OP a (const_int 0) operand.
836 OP is the current operation.
837 MODE is the current operation mode. */
840 const0_operand (op, mode)
842 enum machine_mode mode;
844 return op == CONST0_RTX (mode);
847 /* Return true if OP is constant.
848 OP is the current operation.
849 MODE is the current operation mode. */
852 consttable_operand (op, mode)
854 enum machine_mode mode ATTRIBUTE_UNUSED;
856 return CONSTANT_P (op);
859 /* Return true if the mode of operand OP matches MODE.
860 If MODE is set to VOIDmode, set it to the mode of OP. */
863 check_mode (op, mode)
865 enum machine_mode *mode;
867 if (*mode == VOIDmode)
868 *mode = GET_MODE (op);
871 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
877 /* Return true if OP a valid operand for the LARL instruction.
878 OP is the current operation.
879 MODE is the current operation mode. */
882 larl_operand (op, mode)
884 enum machine_mode mode;
886 if (! check_mode (op, &mode))
889 /* Allow labels and local symbols. */
890 if (GET_CODE (op) == LABEL_REF)
892 if (GET_CODE (op) == SYMBOL_REF
893 && (!flag_pic || SYMBOL_REF_FLAG (op)
894 || CONSTANT_POOL_ADDRESS_P (op)))
897 /* Everything else must have a CONST, so strip it. */
898 if (GET_CODE (op) != CONST)
902 /* Allow adding *even* constants. */
903 if (GET_CODE (op) == PLUS)
905 if (GET_CODE (XEXP (op, 1)) != CONST_INT
906 || (INTVAL (XEXP (op, 1)) & 1) != 0)
911 /* Labels and local symbols allowed here as well. */
912 if (GET_CODE (op) == LABEL_REF)
914 if (GET_CODE (op) == SYMBOL_REF
915 && (!flag_pic || SYMBOL_REF_FLAG (op)
916 || CONSTANT_POOL_ADDRESS_P (op)))
919 /* Now we must have a @GOTENT offset or @PLT stub. */
920 if (GET_CODE (op) == UNSPEC
921 && XINT (op, 1) == 111)
923 if (GET_CODE (op) == UNSPEC
924 && XINT (op, 1) == 113)
930 /* Return true if OP is a valid FP-Register.
931 OP is the current operation.
932 MODE is the current operation mode. */
935 fp_operand (op, mode)
937 enum machine_mode mode;
939 register enum rtx_code code = GET_CODE (op);
940 if (! check_mode (op, &mode))
942 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
948 /* Helper routine to implement s_operand and s_imm_operand.
949 OP is the current operation.
950 MODE is the current operation mode.
951 ALLOW_IMMEDIATE specifies whether immediate operands should
952 be accepted or not. */
955 general_s_operand (op, mode, allow_immediate)
957 enum machine_mode mode;
960 struct s390_address addr;
962 /* Call general_operand first, so that we don't have to
963 check for many special cases. */
964 if (!general_operand (op, mode))
967 /* Just like memory_operand, allow (subreg (mem ...))
970 && GET_CODE (op) == SUBREG
971 && GET_CODE (SUBREG_REG (op)) == MEM)
972 op = SUBREG_REG (op);
974 switch (GET_CODE (op))
976 /* Constants that we are sure will be forced to the
977 literal pool in reload are OK as s-operand. Note
978 that we cannot call s390_preferred_reload_class here
979 because it might not be known yet at this point
980 whether the current function is a leaf or not. */
983 if (!allow_immediate || reload_completed)
985 if (!legitimate_reload_constant_p (op))
991 /* Memory operands are OK unless they already use an
994 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
996 if (s390_decompose_address (XEXP (op, 0), &addr)
1008 /* Return true if OP is a valid S-type operand.
1009 OP is the current operation.
1010 MODE is the current operation mode. */
1013 s_operand (op, mode)
1015 enum machine_mode mode;
1017 return general_s_operand (op, mode, 0);
1020 /* Return true if OP is a valid S-type operand or an immediate
1021 operand that can be addressed as S-type operand by forcing
1022 it into the literal pool.
1023 OP is the current operation.
1024 MODE is the current operation mode. */
1027 s_imm_operand (op, mode)
1029 enum machine_mode mode;
1031 return general_s_operand (op, mode, 1);
1034 /* Return true if OP is a valid operand for a 'Q' constraint.
1035 This differs from s_operand in that only memory operands
1036 without index register are accepted, nothing else. */
1042 struct s390_address addr;
1044 if (GET_CODE (op) != MEM)
1047 if (!s390_decompose_address (XEXP (op, 0), &addr))
1056 /* Return the cost of an address rtx ADDR. */
1059 s390_address_cost (addr)
1062 struct s390_address ad;
1063 if (!s390_decompose_address (addr, &ad))
1066 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1069 /* Return true if OP is a valid operand for the BRAS instruction.
1070 OP is the current operation.
1071 MODE is the current operation mode. */
1074 bras_sym_operand (op, mode)
1076 enum machine_mode mode ATTRIBUTE_UNUSED;
1078 register enum rtx_code code = GET_CODE (op);
1080 /* Allow SYMBOL_REFs. */
1081 if (code == SYMBOL_REF)
1084 /* Allow @PLT stubs. */
1086 && GET_CODE (XEXP (op, 0)) == UNSPEC
1087 && XINT (XEXP (op, 0), 1) == 113)
1093 /* Return true if OP is a load multiple operation. It is known to be a
1094 PARALLEL and the first section will be tested.
1095 OP is the current operation.
1096 MODE is the current operation mode. */
1099 load_multiple_operation (op, mode)
1101 enum machine_mode mode ATTRIBUTE_UNUSED;
1103 int count = XVECLEN (op, 0);
1104 unsigned int dest_regno;
1109 /* Perform a quick check so we don't blow up below. */
1111 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1112 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1113 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1116 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1117 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1119 /* Check, is base, or base + displacement. */
1121 if (GET_CODE (src_addr) == REG)
1123 else if (GET_CODE (src_addr) == PLUS
1124 && GET_CODE (XEXP (src_addr, 0)) == REG
1125 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1127 off = INTVAL (XEXP (src_addr, 1));
1128 src_addr = XEXP (src_addr, 0);
1133 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1136 for (i = 1; i < count; i++)
1138 rtx elt = XVECEXP (op, 0, i);
1140 if (GET_CODE (elt) != SET
1141 || GET_CODE (SET_DEST (elt)) != REG
1142 || GET_MODE (SET_DEST (elt)) != Pmode
1143 || REGNO (SET_DEST (elt)) != dest_regno + i
1144 || GET_CODE (SET_SRC (elt)) != MEM
1145 || GET_MODE (SET_SRC (elt)) != Pmode
1146 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1147 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1148 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1149 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1150 != off + i * UNITS_PER_WORD)
1157 /* Return true if OP is a store multiple operation. It is known to be a
1158 PARALLEL and the first section will be tested.
1159 OP is the current operation.
1160 MODE is the current operation mode. */
1163 store_multiple_operation (op, mode)
1165 enum machine_mode mode ATTRIBUTE_UNUSED;
1167 int count = XVECLEN (op, 0);
1168 unsigned int src_regno;
1172 /* Perform a quick check so we don't blow up below. */
1174 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1175 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1176 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1179 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1180 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1182 /* Check, is base, or base + displacement. */
1184 if (GET_CODE (dest_addr) == REG)
1186 else if (GET_CODE (dest_addr) == PLUS
1187 && GET_CODE (XEXP (dest_addr, 0)) == REG
1188 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1190 off = INTVAL (XEXP (dest_addr, 1));
1191 dest_addr = XEXP (dest_addr, 0);
1196 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1199 for (i = 1; i < count; i++)
1201 rtx elt = XVECEXP (op, 0, i);
1203 if (GET_CODE (elt) != SET
1204 || GET_CODE (SET_SRC (elt)) != REG
1205 || GET_MODE (SET_SRC (elt)) != Pmode
1206 || REGNO (SET_SRC (elt)) != src_regno + i
1207 || GET_CODE (SET_DEST (elt)) != MEM
1208 || GET_MODE (SET_DEST (elt)) != Pmode
1209 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1210 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1211 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1212 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1213 != off + i * UNITS_PER_WORD)
1220 /* Return true if OP contains a symbol reference */
1223 symbolic_reference_mentioned_p (op)
1226 register const char *fmt;
1229 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1232 fmt = GET_RTX_FORMAT (GET_CODE (op));
1233 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1239 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1240 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1244 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1252 /* Return true if OP is a legitimate general operand when
1253 generating PIC code. It is given that flag_pic is on
1254 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1257 legitimate_pic_operand_p (op)
1260 /* Accept all non-symbolic constants. */
1261 if (!SYMBOLIC_CONST (op))
1264 /* Reject everything else; must be handled
1265 via emit_pic_move. */
1269 /* Returns true if the constant value OP is a legitimate general operand.
1270 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1273 legitimate_constant_p (op)
1276 /* Accept all non-symbolic constants. */
1277 if (!SYMBOLIC_CONST (op))
1280 /* In the PIC case, symbolic constants must *not* be
1281 forced into the literal pool. We accept them here,
1282 so that they will be handled by emit_pic_move. */
1286 /* Even in the non-PIC case, we can accept immediate
1287 LARL operands here. */
1289 return larl_operand (op, VOIDmode);
1291 /* All remaining non-PIC symbolic constants are
1292 forced into the literal pool. */
1296 /* Returns true if the constant value OP is a legitimate general
1297 operand during and after reload. The difference to
1298 legitimate_constant_p is that this function will not accept
1299 a constant that would need to be forced to the literal pool
1300 before it can be used as operand. */
1303 legitimate_reload_constant_p (op)
1306 /* Accept l(g)hi operands. */
1307 if (GET_CODE (op) == CONST_INT
1308 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1311 /* Accept lliXX operands. */
1313 && s390_single_hi (op, DImode, 0) >= 0)
1316 /* Accept larl operands. */
1318 && larl_operand (op, VOIDmode))
1321 /* Everything else cannot be handled without reload. */
1325 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1326 return the class of reg to actually use. */
1329 s390_preferred_reload_class (op, class)
1331 enum reg_class class;
1333 /* This can happen if a floating point constant is being
1334 reloaded into an integer register. Leave well alone. */
1335 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1336 && class != FP_REGS)
1339 switch (GET_CODE (op))
1341 /* Constants we cannot reload must be forced into the
1342 literal pool. For constants we *could* handle directly,
1343 it might still be preferable to put them in the pool and
1344 use a memory-to-memory instruction.
1346 However, try to avoid needlessly allocating a literal
1347 pool in a routine that wouldn't otherwise need any.
1348 Heuristically, we assume that 64-bit leaf functions
1349 typically don't need a literal pool, all others do. */
1352 if (!legitimate_reload_constant_p (op))
1355 if (TARGET_64BIT && current_function_is_leaf)
1360 /* If a symbolic constant or a PLUS is reloaded,
1361 it is most likely being used as an address, so
1362 prefer ADDR_REGS. If 'class' is not a superset
1363 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1368 if (reg_class_subset_p (ADDR_REGS, class))
1380 /* Return the register class of a scratch register needed to
1381 load IN into a register of class CLASS in MODE.
1383 We need a temporary when loading a PLUS expression which
1384 is not a legitimate operand of the LOAD ADDRESS instruction. */
1387 s390_secondary_input_reload_class (class, mode, in)
1388 enum reg_class class ATTRIBUTE_UNUSED;
1389 enum machine_mode mode;
1392 if (s390_plus_operand (in, mode))
1398 /* Return true if OP is a PLUS that is not a legitimate
1399 operand for the LA instruction.
1400 OP is the current operation.
1401 MODE is the current operation mode. */
1404 s390_plus_operand (op, mode)
1406 enum machine_mode mode;
1408 if (!check_mode (op, &mode) || mode != Pmode)
1411 if (GET_CODE (op) != PLUS)
1414 if (legitimate_la_operand_p (op))
1420 /* Generate code to load SRC, which is PLUS that is not a
1421 legitimate operand for the LA instruction, into TARGET.
1422 SCRATCH may be used as scratch register. */
1425 s390_expand_plus_operand (target, src, scratch)
1426 register rtx target;
1428 register rtx scratch;
1431 struct s390_address ad;
1433 /* src must be a PLUS; get its two operands. */
1434 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1437 /* Check if any of the two operands is already scheduled
1438 for replacement by reload. This can happen e.g. when
1439 float registers occur in an address. */
1440 sum1 = find_replacement (&XEXP (src, 0));
1441 sum2 = find_replacement (&XEXP (src, 1));
1442 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1444 /* If the address is already strictly valid, there's nothing to do. */
1445 if (!s390_decompose_address (src, &ad)
1446 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1447 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1449 /* Otherwise, one of the operands cannot be an address register;
1450 we reload its value into the scratch register. */
1451 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1453 emit_move_insn (scratch, sum1);
1456 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1458 emit_move_insn (scratch, sum2);
1462 /* According to the way these invalid addresses are generated
1463 in reload.c, it should never happen (at least on s390) that
1464 *neither* of the PLUS components, after find_replacements
1465 was applied, is an address register. */
1466 if (sum1 == scratch && sum2 == scratch)
1472 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1475 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1476 is only ever performed on addresses, so we can mark the
1477 sum as legitimate for LA in any case. */
1478 s390_load_address (target, src);
1482 /* Decompose a RTL expression ADDR for a memory address into
1483 its components, returned in OUT.
1485 Returns 0 if ADDR is not a valid memory address, nonzero
1486 otherwise. If OUT is NULL, don't return the components,
1487 but check for validity only.
1489 Note: Only addresses in canonical form are recognized.
1490 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1491 canonical form so that they will be recognized. */
1494 s390_decompose_address (addr, out)
1496 struct s390_address *out;
1498 rtx base = NULL_RTX;
1499 rtx indx = NULL_RTX;
1500 rtx disp = NULL_RTX;
1501 int pointer = FALSE;
1503 /* Decompose address into base + index + displacement. */
1505 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1508 else if (GET_CODE (addr) == PLUS)
1510 rtx op0 = XEXP (addr, 0);
1511 rtx op1 = XEXP (addr, 1);
1512 enum rtx_code code0 = GET_CODE (op0);
1513 enum rtx_code code1 = GET_CODE (op1);
1515 if (code0 == REG || code0 == UNSPEC)
1517 if (code1 == REG || code1 == UNSPEC)
1519 indx = op0; /* index + base */
1525 base = op0; /* base + displacement */
1530 else if (code0 == PLUS)
1532 indx = XEXP (op0, 0); /* index + base + disp */
1533 base = XEXP (op0, 1);
1544 disp = addr; /* displacement */
1547 /* Validate base register. */
1550 if (GET_CODE (base) == UNSPEC)
1552 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1554 base = XVECEXP (base, 0, 0);
1558 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1561 if (REGNO (base) == BASE_REGISTER
1562 || REGNO (base) == STACK_POINTER_REGNUM
1563 || REGNO (base) == FRAME_POINTER_REGNUM
1564 || ((reload_completed || reload_in_progress)
1565 && frame_pointer_needed
1566 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1567 || REGNO (base) == ARG_POINTER_REGNUM
1568 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1569 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1571 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1575 /* Validate index register. */
1578 if (GET_CODE (indx) == UNSPEC)
1580 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1582 indx = XVECEXP (indx, 0, 0);
1586 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1589 if (REGNO (indx) == BASE_REGISTER
1590 || REGNO (indx) == STACK_POINTER_REGNUM
1591 || REGNO (indx) == FRAME_POINTER_REGNUM
1592 || ((reload_completed || reload_in_progress)
1593 && frame_pointer_needed
1594 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1595 || REGNO (indx) == ARG_POINTER_REGNUM
1596 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1597 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1599 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1603 /* Validate displacement. */
1606 /* Allow integer constant in range. */
1607 if (GET_CODE (disp) == CONST_INT)
1609 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1613 /* In the small-PIC case, the linker converts @GOT12
1614 offsets to possible displacements. */
1615 else if (GET_CODE (disp) == CONST
1616 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1617 && XINT (XEXP (disp, 0), 1) == 110)
1625 /* Accept chunkfied literal pool symbol references. */
1626 else if (GET_CODE (disp) == CONST
1627 && GET_CODE (XEXP (disp, 0)) == MINUS
1628 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1629 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1634 /* Likewise if a constant offset is present. */
1635 else if (GET_CODE (disp) == CONST
1636 && GET_CODE (XEXP (disp, 0)) == PLUS
1637 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1638 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1639 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1640 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1645 /* We can convert literal pool addresses to
1646 displacements by basing them off the base register. */
1649 /* In some cases, we can accept an additional
1650 small constant offset. Split these off here. */
1652 unsigned int offset = 0;
1654 if (GET_CODE (disp) == CONST
1655 && GET_CODE (XEXP (disp, 0)) == PLUS
1656 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1658 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1659 disp = XEXP (XEXP (disp, 0), 0);
1662 /* Now we must have a literal pool address. */
1663 if (GET_CODE (disp) != SYMBOL_REF
1664 || !CONSTANT_POOL_ADDRESS_P (disp))
1667 /* In 64-bit PIC mode we cannot accept symbolic
1668 constants in the constant pool. */
1669 if (TARGET_64BIT && flag_pic
1670 && SYMBOLIC_CONST (get_pool_constant (disp)))
1673 /* If we have an offset, make sure it does not
1674 exceed the size of the constant pool entry. */
1675 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1678 /* Either base or index must be free to
1679 hold the base register. */
1683 /* Convert the address. */
1685 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1687 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1689 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1690 disp = gen_rtx_CONST (Pmode, disp);
1693 disp = plus_constant (disp, offset);
1707 out->pointer = pointer;
1713 /* Return nonzero if ADDR is a valid memory address.
1714 STRICT specifies whether strict register checking applies. */
1717 legitimate_address_p (mode, addr, strict)
1718 enum machine_mode mode ATTRIBUTE_UNUSED;
1722 struct s390_address ad;
1723 if (!s390_decompose_address (addr, &ad))
1728 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1730 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1735 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1737 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1744 /* Return 1 if OP is a valid operand for the LA instruction.
1745 In 31-bit, we need to prove that the result is used as an
1746 address, as LA performs only a 31-bit addition. */
1749 legitimate_la_operand_p (op)
1752 struct s390_address addr;
1753 if (!s390_decompose_address (op, &addr))
1756 if (TARGET_64BIT || addr.pointer)
1762 /* Return 1 if OP is a valid operand for the LA instruction,
1763 and we prefer to use LA over addition to compute it.
1764 If STRICT is true, only accept operands that will never
1765 change to something we cannot recognize as preferred. */
1768 preferred_la_operand_p (op, strict)
1772 struct s390_address addr;
1773 if (!s390_decompose_address (op, &addr))
1776 if (!TARGET_64BIT && !addr.pointer)
1783 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
1784 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
1790 /* Emit a forced load-address operation to load SRC into DST.
1791 This will use the LOAD ADDRESS instruction even in situations
1792 where legitimate_la_operand_p (SRC) returns false. */
1795 s390_load_address (dst, src)
1800 emit_move_insn (dst, src);
1802 emit_insn (gen_force_la_31 (dst, src));
1805 /* Return a legitimate reference for ORIG (an address) using the
1806 register REG. If REG is 0, a new pseudo is generated.
1808 There are two types of references that must be handled:
1810 1. Global data references must load the address from the GOT, via
1811 the PIC reg. An insn is emitted to do this load, and the reg is
1814 2. Static data references, constant pool addresses, and code labels
1815 compute the address as an offset from the GOT, whose base is in
1816 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1817 differentiate them from global data objects. The returned
1818 address is the PIC reg + an unspec constant.
1820 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1821 reg also appears in the address. */
1824 legitimize_pic_address (orig, reg)
1832 if (GET_CODE (addr) == LABEL_REF
1833 || (GET_CODE (addr) == SYMBOL_REF
1834 && (SYMBOL_REF_FLAG (addr)
1835 || CONSTANT_POOL_ADDRESS_P (addr))))
1837 /* This is a local symbol. */
1840 /* Access local symbols PC-relative via LARL.
1841 This is the same as in the non-PIC case, so it is
1842 handled automatically ... */
1846 /* Access local symbols relative to the literal pool. */
1848 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1850 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1851 addr = gen_rtx_CONST (SImode, addr);
1852 addr = force_const_mem (SImode, addr);
1853 emit_move_insn (temp, addr);
1855 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1856 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1857 new = gen_rtx_PLUS (Pmode, base, temp);
1861 emit_move_insn (reg, new);
1866 else if (GET_CODE (addr) == SYMBOL_REF)
1869 reg = gen_reg_rtx (Pmode);
1873 /* Assume GOT offset < 4k. This is handled the same way
1874 in both 31- and 64-bit code (@GOT12). */
1876 if (reload_in_progress || reload_completed)
1877 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1879 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1880 new = gen_rtx_CONST (Pmode, new);
1881 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1882 new = gen_rtx_MEM (Pmode, new);
1883 RTX_UNCHANGING_P (new) = 1;
1884 emit_move_insn (reg, new);
1887 else if (TARGET_64BIT)
1889 /* If the GOT offset might be >= 4k, we determine the position
1890 of the GOT entry via a PC-relative LARL (@GOTENT). */
1892 rtx temp = gen_reg_rtx (Pmode);
1894 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1895 new = gen_rtx_CONST (Pmode, new);
1896 emit_move_insn (temp, new);
1898 new = gen_rtx_MEM (Pmode, temp);
1899 RTX_UNCHANGING_P (new) = 1;
1900 emit_move_insn (reg, new);
1905 /* If the GOT offset might be >= 4k, we have to load it
1906 from the literal pool (@GOT). */
1908 rtx temp = gen_reg_rtx (Pmode);
1910 if (reload_in_progress || reload_completed)
1911 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1913 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1914 addr = gen_rtx_CONST (SImode, addr);
1915 addr = force_const_mem (SImode, addr);
1916 emit_move_insn (temp, addr);
1918 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1919 new = gen_rtx_MEM (Pmode, new);
1920 RTX_UNCHANGING_P (new) = 1;
1921 emit_move_insn (reg, new);
1927 if (GET_CODE (addr) == CONST)
1929 addr = XEXP (addr, 0);
1930 if (GET_CODE (addr) == UNSPEC)
1932 if (XVECLEN (addr, 0) != 1)
1934 switch (XINT (addr, 1))
1936 /* If someone moved an @GOT or lt-relative UNSPEC
1937 out of the literal pool, force them back in. */
1941 new = force_const_mem (SImode, orig);
1944 /* @GOTENT is OK as is. */
1948 /* @PLT is OK as is on 64-bit, must be converted to
1949 lt-relative PLT on 31-bit. */
1953 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1955 addr = XVECEXP (addr, 0, 0);
1956 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1957 addr = gen_rtx_CONST (SImode, addr);
1958 addr = force_const_mem (SImode, addr);
1959 emit_move_insn (temp, addr);
1961 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1962 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1963 new = gen_rtx_PLUS (Pmode, base, temp);
1967 emit_move_insn (reg, new);
1973 /* Everything else cannot happen. */
1978 else if (GET_CODE (addr) != PLUS)
1981 if (GET_CODE (addr) == PLUS)
1983 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1984 /* Check first to see if this is a constant offset
1985 from a local symbol reference. */
1986 if ((GET_CODE (op0) == LABEL_REF
1987 || (GET_CODE (op0) == SYMBOL_REF
1988 && (SYMBOL_REF_FLAG (op0)
1989 || CONSTANT_POOL_ADDRESS_P (op0))))
1990 && GET_CODE (op1) == CONST_INT)
1994 if (INTVAL (op1) & 1)
1996 /* LARL can't handle odd offsets, so emit a
1997 pair of LARL and LA. */
1998 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2000 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2002 int even = INTVAL (op1) - 1;
2003 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2004 op0 = gen_rtx_CONST (Pmode, op0);
2008 emit_move_insn (temp, op0);
2009 new = gen_rtx_PLUS (Pmode, temp, op1);
2013 emit_move_insn (reg, new);
2019 /* If the offset is even, we can just use LARL.
2020 This will happen automatically. */
2025 /* Access local symbols relative to the literal pool. */
2027 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2029 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
2030 addr = gen_rtx_PLUS (SImode, addr, op1);
2031 addr = gen_rtx_CONST (SImode, addr);
2032 addr = force_const_mem (SImode, addr);
2033 emit_move_insn (temp, addr);
2035 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2036 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2037 new = gen_rtx_PLUS (Pmode, base, temp);
2041 emit_move_insn (reg, new);
2047 /* Now, check whether it is an LT-relative symbol plus offset
2048 that was pulled out of the literal pool. Force it back in. */
2050 else if (GET_CODE (op0) == UNSPEC
2051 && GET_CODE (op1) == CONST_INT)
2053 if (XVECLEN (op0, 0) != 1)
2055 if (XINT (op0, 1) != 100)
2058 new = force_const_mem (SImode, orig);
2061 /* Otherwise, compute the sum. */
2064 base = legitimize_pic_address (XEXP (addr, 0), reg);
2065 new = legitimize_pic_address (XEXP (addr, 1),
2066 base == reg ? NULL_RTX : reg);
2067 if (GET_CODE (new) == CONST_INT)
2068 new = plus_constant (base, INTVAL (new));
2071 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2073 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2074 new = XEXP (new, 1);
2076 new = gen_rtx_PLUS (Pmode, base, new);
2079 if (GET_CODE (new) == CONST)
2080 new = XEXP (new, 0);
2081 new = force_operand (new, 0);
2088 /* Emit insns to move operands[1] into operands[0]. */
2091 emit_pic_move (operands, mode)
2093 enum machine_mode mode ATTRIBUTE_UNUSED;
2095 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2097 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2098 operands[1] = force_reg (Pmode, operands[1]);
2100 operands[1] = legitimize_pic_address (operands[1], temp);
2103 /* Try machine-dependent ways of modifying an illegitimate address X
2104 to be legitimate. If we find one, return the new, valid address.
2106 OLDX is the address as it was before break_out_memory_refs was called.
2107 In some cases it is useful to look at this to decide what needs to be done.
2109 MODE is the mode of the operand pointed to by X.
2111 When -fpic is used, special handling is needed for symbolic references.
2112 See comments by legitimize_pic_address for details. */
2115 legitimize_address (x, oldx, mode)
2117 register rtx oldx ATTRIBUTE_UNUSED;
2118 enum machine_mode mode ATTRIBUTE_UNUSED;
2120 rtx constant_term = const0_rtx;
2124 if (SYMBOLIC_CONST (x)
2125 || (GET_CODE (x) == PLUS
2126 && (SYMBOLIC_CONST (XEXP (x, 0))
2127 || SYMBOLIC_CONST (XEXP (x, 1)))))
2128 x = legitimize_pic_address (x, 0);
2130 if (legitimate_address_p (mode, x, FALSE))
2134 x = eliminate_constant_term (x, &constant_term);
2136 /* Optimize loading of large displacements by splitting them
2137 into the multiple of 4K and the rest; this allows the
2138 former to be CSE'd if possible.
2140 Don't do this if the displacement is added to a register
2141 pointing into the stack frame, as the offsets will
2142 change later anyway. */
2144 if (GET_CODE (constant_term) == CONST_INT
2145 && (INTVAL (constant_term) < 0
2146 || INTVAL (constant_term) >= 4096)
2147 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2149 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2150 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2152 rtx temp = gen_reg_rtx (Pmode);
2153 rtx val = force_operand (GEN_INT (upper), temp);
2155 emit_move_insn (temp, val);
2157 x = gen_rtx_PLUS (Pmode, x, temp);
2158 constant_term = GEN_INT (lower);
2161 if (GET_CODE (x) == PLUS)
2163 if (GET_CODE (XEXP (x, 0)) == REG)
2165 register rtx temp = gen_reg_rtx (Pmode);
2166 register rtx val = force_operand (XEXP (x, 1), temp);
2168 emit_move_insn (temp, val);
2170 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2173 else if (GET_CODE (XEXP (x, 1)) == REG)
2175 register rtx temp = gen_reg_rtx (Pmode);
2176 register rtx val = force_operand (XEXP (x, 0), temp);
2178 emit_move_insn (temp, val);
2180 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2184 if (constant_term != const0_rtx)
2185 x = gen_rtx_PLUS (Pmode, x, constant_term);
2190 /* Emit code to move LEN bytes from DST to SRC. */
2193 s390_expand_movstr (dst, src, len)
2198 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2199 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2200 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2201 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2204 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2206 if (INTVAL (len) > 0)
2207 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2210 else if (TARGET_MVCLE)
2212 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2213 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2214 rtx reg0 = gen_reg_rtx (double_mode);
2215 rtx reg1 = gen_reg_rtx (double_mode);
2217 emit_move_insn (gen_highpart (single_mode, reg0),
2218 force_operand (XEXP (dst, 0), NULL_RTX));
2219 emit_move_insn (gen_highpart (single_mode, reg1),
2220 force_operand (XEXP (src, 0), NULL_RTX));
2222 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2223 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2225 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2230 rtx dst_addr, src_addr, count, blocks, temp;
2231 rtx end_label = gen_label_rtx ();
2232 enum machine_mode mode;
2235 mode = GET_MODE (len);
2236 if (mode == VOIDmode)
2239 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2243 dst_addr = gen_reg_rtx (Pmode);
2244 src_addr = gen_reg_rtx (Pmode);
2245 count = gen_reg_rtx (mode);
2246 blocks = gen_reg_rtx (mode);
2248 convert_move (count, len, 1);
2249 emit_cmp_and_jump_insns (count, const0_rtx,
2250 EQ, NULL_RTX, mode, 1, end_label);
2252 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2253 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2254 dst = change_address (dst, VOIDmode, dst_addr);
2255 src = change_address (src, VOIDmode, src_addr);
2257 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2259 emit_move_insn (count, temp);
2261 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2263 emit_move_insn (blocks, temp);
2265 expand_start_loop (1);
2266 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2267 make_tree (type, blocks),
2268 make_tree (type, const0_rtx)));
2270 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2271 s390_load_address (dst_addr,
2272 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2273 s390_load_address (src_addr,
2274 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2276 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2278 emit_move_insn (blocks, temp);
2282 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2283 emit_label (end_label);
2287 /* Emit code to clear LEN bytes at DST. */
2290 s390_expand_clrstr (dst, len)
2294 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2295 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2296 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2297 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2300 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2302 if (INTVAL (len) > 0)
2303 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2306 else if (TARGET_MVCLE)
2308 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2309 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2310 rtx reg0 = gen_reg_rtx (double_mode);
2311 rtx reg1 = gen_reg_rtx (double_mode);
2313 emit_move_insn (gen_highpart (single_mode, reg0),
2314 force_operand (XEXP (dst, 0), NULL_RTX));
2315 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2317 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2318 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2320 emit_insn ((*gen_long) (reg0, reg1, reg0));
2325 rtx dst_addr, src_addr, count, blocks, temp;
2326 rtx end_label = gen_label_rtx ();
2327 enum machine_mode mode;
2330 mode = GET_MODE (len);
2331 if (mode == VOIDmode)
2334 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2338 dst_addr = gen_reg_rtx (Pmode);
2339 src_addr = gen_reg_rtx (Pmode);
2340 count = gen_reg_rtx (mode);
2341 blocks = gen_reg_rtx (mode);
2343 convert_move (count, len, 1);
2344 emit_cmp_and_jump_insns (count, const0_rtx,
2345 EQ, NULL_RTX, mode, 1, end_label);
2347 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2348 dst = change_address (dst, VOIDmode, dst_addr);
2350 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2352 emit_move_insn (count, temp);
2354 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2356 emit_move_insn (blocks, temp);
2358 expand_start_loop (1);
2359 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2360 make_tree (type, blocks),
2361 make_tree (type, const0_rtx)));
2363 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2364 s390_load_address (dst_addr,
2365 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2367 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2369 emit_move_insn (blocks, temp);
2373 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2374 emit_label (end_label);
2378 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2379 and return the result in TARGET. */
2382 s390_expand_cmpstr (target, op0, op1, len)
2388 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2389 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2390 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2391 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2392 rtx (*gen_result) PARAMS ((rtx)) =
2393 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2395 op0 = protect_from_queue (op0, 0);
2396 op1 = protect_from_queue (op1, 0);
2397 len = protect_from_queue (len, 0);
2399 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2401 if (INTVAL (len) > 0)
2403 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2404 emit_insn ((*gen_result) (target));
2407 emit_move_insn (target, const0_rtx);
2410 else if (TARGET_MVCLE)
2412 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2413 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2414 rtx reg0 = gen_reg_rtx (double_mode);
2415 rtx reg1 = gen_reg_rtx (double_mode);
2417 emit_move_insn (gen_highpart (single_mode, reg0),
2418 force_operand (XEXP (op0, 0), NULL_RTX));
2419 emit_move_insn (gen_highpart (single_mode, reg1),
2420 force_operand (XEXP (op1, 0), NULL_RTX));
2422 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2423 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2425 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2426 emit_insn ((*gen_result) (target));
2431 rtx addr0, addr1, count, blocks, temp;
2432 rtx end_label = gen_label_rtx ();
2433 enum machine_mode mode;
2436 mode = GET_MODE (len);
2437 if (mode == VOIDmode)
2440 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2444 addr0 = gen_reg_rtx (Pmode);
2445 addr1 = gen_reg_rtx (Pmode);
2446 count = gen_reg_rtx (mode);
2447 blocks = gen_reg_rtx (mode);
2449 convert_move (count, len, 1);
2450 emit_cmp_and_jump_insns (count, const0_rtx,
2451 EQ, NULL_RTX, mode, 1, end_label);
2453 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2454 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2455 op0 = change_address (op0, VOIDmode, addr0);
2456 op1 = change_address (op1, VOIDmode, addr1);
2458 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2460 emit_move_insn (count, temp);
2462 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2464 emit_move_insn (blocks, temp);
2466 expand_start_loop (1);
2467 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2468 make_tree (type, blocks),
2469 make_tree (type, const0_rtx)));
2471 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2472 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2473 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2474 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2475 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2476 emit_jump_insn (temp);
2478 s390_load_address (addr0,
2479 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2480 s390_load_address (addr1,
2481 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2483 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2485 emit_move_insn (blocks, temp);
2489 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2490 emit_label (end_label);
2492 emit_insn ((*gen_result) (target));
2496 /* In the name of slightly smaller debug output, and to cater to
2497 general assembler losage, recognize various UNSPEC sequences
2498 and turn them back into a direct symbol reference. */
2501 s390_simplify_dwarf_addr (orig_x)
2506 if (GET_CODE (x) != MEM)
2510 if (GET_CODE (x) == PLUS
2511 && GET_CODE (XEXP (x, 1)) == CONST
2512 && GET_CODE (XEXP (x, 0)) == REG
2513 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2515 y = XEXP (XEXP (x, 1), 0);
2516 if (GET_CODE (y) == UNSPEC
2517 && XINT (y, 1) == 110)
2518 return XVECEXP (y, 0, 0);
2522 if (GET_CODE (x) == CONST)
2525 if (GET_CODE (y) == UNSPEC
2526 && XINT (y, 1) == 111)
2527 return XVECEXP (y, 0, 0);
2534 /* Output symbolic constant X in assembler syntax to
2535 stdio stream FILE. */
2538 s390_output_symbolic_const (file, x)
2542 switch (GET_CODE (x))
2547 s390_output_symbolic_const (file, XEXP (x, 0));
2551 s390_output_symbolic_const (file, XEXP (x, 0));
2552 fprintf (file, "+");
2553 s390_output_symbolic_const (file, XEXP (x, 1));
2557 s390_output_symbolic_const (file, XEXP (x, 0));
2558 fprintf (file, "-");
2559 s390_output_symbolic_const (file, XEXP (x, 1));
2566 output_addr_const (file, x);
2570 if (XVECLEN (x, 0) != 1)
2571 output_operand_lossage ("invalid UNSPEC as operand (1)");
2572 switch (XINT (x, 1))
2576 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2577 fprintf (file, "-.LT%d", current_function_funcdef_no);
2580 fprintf (file, ".LT%d-", current_function_funcdef_no);
2581 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2584 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2585 fprintf (file, "@GOT12");
2588 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2589 fprintf (file, "@GOTENT");
2592 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2593 fprintf (file, "@GOT");
2596 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2597 fprintf (file, "@PLT");
2600 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2601 fprintf (file, "@PLT-.LT%d", current_function_funcdef_no);
2604 output_operand_lossage ("invalid UNSPEC as operand (2)");
2610 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2615 /* Output address operand ADDR in assembler syntax to
2616 stdio stream FILE. */
2619 print_operand_address (file, addr)
2623 struct s390_address ad;
2625 if (!s390_decompose_address (addr, &ad)
2626 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2627 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2628 output_operand_lossage ("Cannot decompose address.");
2631 s390_output_symbolic_const (file, ad.disp);
2633 fprintf (file, "0");
2635 if (ad.base && ad.indx)
2636 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2637 reg_names[REGNO (ad.base)]);
2639 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2642 /* Output operand X in assembler syntax to stdio stream FILE.
2643 CODE specified the format flag. The following format flags
2646 'C': print opcode suffix for branch condition.
2647 'D': print opcode suffix for inverse branch condition.
2648 'O': print only the displacement of a memory reference.
2649 'R': print only the base register of a memory reference.
2650 'N': print the second word of a DImode operand.
2651 'M': print the second word of a TImode operand.
2653 'b': print integer X as if it's an unsigned byte.
2654 'x': print integer X as if it's an unsigned word.
2655 'h': print integer X as if it's a signed word. */
2658 print_operand (file, x, code)
2666 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2670 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2675 struct s390_address ad;
2677 if (GET_CODE (x) != MEM
2678 || !s390_decompose_address (XEXP (x, 0), &ad)
2679 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2684 s390_output_symbolic_const (file, ad.disp);
2686 fprintf (file, "0");
2692 struct s390_address ad;
2694 if (GET_CODE (x) != MEM
2695 || !s390_decompose_address (XEXP (x, 0), &ad)
2696 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2701 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2703 fprintf (file, "0");
2708 if (GET_CODE (x) == REG)
2709 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2710 else if (GET_CODE (x) == MEM)
2711 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2717 if (GET_CODE (x) == REG)
2718 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2719 else if (GET_CODE (x) == MEM)
2720 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2726 switch (GET_CODE (x))
2729 fprintf (file, "%s", reg_names[REGNO (x)]);
2733 output_address (XEXP (x, 0));
2740 s390_output_symbolic_const (file, x);
2745 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2746 else if (code == 'x')
2747 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2748 else if (code == 'h')
2749 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2751 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2755 if (GET_MODE (x) != VOIDmode)
2758 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2759 else if (code == 'x')
2760 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2761 else if (code == 'h')
2762 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2768 fatal_insn ("UNKNOWN in print_operand !?", x);
2773 /* Target hook for assembling integer objects. We need to define it
2774 here to work a round a bug in some versions of GAS, which couldn't
2775 handle values smaller than INT_MIN when printed in decimal. */
2778 s390_assemble_integer (x, size, aligned_p)
2783 if (size == 8 && aligned_p
2784 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2786 fputs ("\t.quad\t", asm_out_file);
2787 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2788 putc ('\n', asm_out_file);
2791 return default_assemble_integer (x, size, aligned_p);
2795 #define DEBUG_SCHED 0
2797 /* Returns true if register REGNO is used for forming
2798 a memory address in expression X. */
2801 reg_used_in_mem_p (regno, x)
2805 enum rtx_code code = GET_CODE (x);
2811 if (refers_to_regno_p (regno, regno+1,
2815 else if (code == SET
2816 && GET_CODE (SET_DEST (x)) == PC)
2818 if (refers_to_regno_p (regno, regno+1,
2823 fmt = GET_RTX_FORMAT (code);
2824 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2827 && reg_used_in_mem_p (regno, XEXP (x, i)))
2830 else if (fmt[i] == 'E')
2831 for (j = 0; j < XVECLEN (x, i); j++)
2832 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2838 /* Returns true if expression DEP_RTX sets an address register
2839 used by instruction INSN to address memory. */
2842 addr_generation_dependency_p (dep_rtx, insn)
2848 if (GET_CODE (dep_rtx) == SET)
2850 target = SET_DEST (dep_rtx);
2851 if (GET_CODE (target) == STRICT_LOW_PART)
2852 target = XEXP (target, 0);
2853 while (GET_CODE (target) == SUBREG)
2854 target = SUBREG_REG (target);
2856 if (GET_CODE (target) == REG)
2858 int regno = REGNO (target);
2860 if (get_attr_type (insn) == TYPE_LA)
2862 pat = PATTERN (insn);
2863 if (GET_CODE (pat) == PARALLEL)
2865 if (XVECLEN (pat, 0) != 2)
2867 pat = XVECEXP (pat, 0, 0);
2869 if (GET_CODE (pat) == SET)
2870 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2874 else if (get_attr_atype (insn) == ATYPE_MEM)
2875 return reg_used_in_mem_p (regno, PATTERN (insn));
2882 /* Return the modified cost of the dependency of instruction INSN
2883 on instruction DEP_INSN through the link LINK. COST is the
2884 default cost of that dependency.
2886 Data dependencies are all handled without delay. However, if a
2887 register is modified and subsequently used as base or index
2888 register of a memory reference, at least 4 cycles need to pass
2889 between setting and using the register to avoid pipeline stalls.
2890 An exception is the LA instruction. An address generated by LA can
2891 be used by introducing only a one cycle stall on the pipeline. */
2894 s390_adjust_cost (insn, link, dep_insn, cost)
2903 /* If the dependence is an anti-dependence, there is no cost. For an
2904 output dependence, there is sometimes a cost, but it doesn't seem
2905 worth handling those few cases. */
2907 if (REG_NOTE_KIND (link) != 0)
2910 /* If we can't recognize the insns, we can't really do anything. */
2911 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2914 dep_rtx = PATTERN (dep_insn);
2916 if (GET_CODE (dep_rtx) == SET)
2918 if (addr_generation_dependency_p (dep_rtx, insn))
2920 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2923 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2925 debug_rtx (dep_insn);
2930 else if (GET_CODE (dep_rtx) == PARALLEL)
2932 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2934 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2937 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2940 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2942 debug_rtx (dep_insn);
2953 /* A C statement (sans semicolon) to update the integer scheduling priority
2954 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2955 increase the priority to execute INSN later. Do not define this macro if
2956 you do not need to adjust the scheduling priorities of insns.
2958 A LA instruction maybe scheduled later, since the pipeline bypasses the
2959 calculated value. */
2962 s390_adjust_priority (insn, priority)
2963 rtx insn ATTRIBUTE_UNUSED;
2966 if (! INSN_P (insn))
2969 if (GET_CODE (PATTERN (insn)) == USE
2970 || GET_CODE (PATTERN (insn)) == CLOBBER)
2973 switch (get_attr_type (insn))
2979 if (priority >= 0 && priority < 0x01000000)
2983 /* LM in epilogue should never be scheduled. This
2984 is due to literal access done in function body.
2985 The usage of register 13 is not mentioned explicitly,
2986 leading to scheduling 'LM' accross this instructions.
2988 priority = 0x7fffffff;
2996 /* Split all branches that exceed the maximum distance.
2997 Returns true if this created a new literal pool entry.
2999 Code generated by this routine is allowed to use
3000 TEMP_REG as temporary scratch register. If this is
3001 done, TEMP_USED is set to true. */
3004 s390_split_branches (temp_reg, temp_used)
3008 int new_literal = 0;
3009 rtx insn, pat, tmp, target;
3012 /* We need correct insn addresses. */
3014 shorten_branches (get_insns ());
3016 /* Find all branches that exceed 64KB, and split them. */
3018 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3020 if (GET_CODE (insn) != JUMP_INSN)
3023 pat = PATTERN (insn);
3024 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3025 pat = XVECEXP (pat, 0, 0);
3026 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3029 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3031 label = &SET_SRC (pat);
3033 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3035 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3036 label = &XEXP (SET_SRC (pat), 1);
3037 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3038 label = &XEXP (SET_SRC (pat), 2);
3045 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3052 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3053 INSN_ADDRESSES_NEW (tmp, -1);
3060 tmp = force_const_mem (Pmode, *label);
3061 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3062 INSN_ADDRESSES_NEW (tmp, -1);
3069 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3070 tmp = gen_rtx_CONST (SImode, tmp);
3071 tmp = force_const_mem (SImode, tmp);
3072 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3073 INSN_ADDRESSES_NEW (tmp, -1);
3075 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3076 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3079 if (!validate_change (insn, label, target, 0))
3087 /* Find a literal pool symbol referenced in RTX X, and store
3088 it at REF. Will abort if X contains references to more than
3089 one such pool symbol; multiple references to the same symbol
3090 are allowed, however.
3092 The rtx pointed to by REF must be initialized to NULL_RTX
3093 by the caller before calling this routine. */
3096 find_constant_pool_ref (x, ref)
3103 if (GET_CODE (x) == SYMBOL_REF
3104 && CONSTANT_POOL_ADDRESS_P (x))
3106 if (*ref == NULL_RTX)
3112 fmt = GET_RTX_FORMAT (GET_CODE (x));
3113 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3117 find_constant_pool_ref (XEXP (x, i), ref);
3119 else if (fmt[i] == 'E')
3121 for (j = 0; j < XVECLEN (x, i); j++)
3122 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3127 /* Replace every reference to the literal pool symbol REF
3128 in X by the address ADDR. Fix up MEMs as required. */
3131 replace_constant_pool_ref (x, ref, addr)
3142 /* Literal pool references can only occur inside a MEM ... */
3143 if (GET_CODE (*x) == MEM)
3145 rtx memref = XEXP (*x, 0);
3149 *x = replace_equiv_address (*x, addr);
3153 if (GET_CODE (memref) == CONST
3154 && GET_CODE (XEXP (memref, 0)) == PLUS
3155 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3156 && XEXP (XEXP (memref, 0), 0) == ref)
3158 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3159 *x = replace_equiv_address (*x, plus_constant (addr, off));
3164 /* ... or a load-address type pattern. */
3165 if (GET_CODE (*x) == SET)
3167 rtx addrref = SET_SRC (*x);
3171 SET_SRC (*x) = addr;
3175 if (GET_CODE (addrref) == CONST
3176 && GET_CODE (XEXP (addrref, 0)) == PLUS
3177 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3178 && XEXP (XEXP (addrref, 0), 0) == ref)
3180 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3181 SET_SRC (*x) = plus_constant (addr, off);
3186 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3187 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3191 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3193 else if (fmt[i] == 'E')
3195 for (j = 0; j < XVECLEN (*x, i); j++)
3196 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3201 /* Check whether ADDR is an address that uses the base register,
3202 without actually constituting a literal pool access. (This happens
3203 in 31-bit PIC mode, where the base register is used as anchor for
3204 relative addressing of local symbols.)
3206 Returns 1 if the base register occupies the base slot,
3207 returns 2 if the base register occupies the index slot,
3208 returns 0 if the address is not of this form. */
3211 find_base_register_in_addr (addr)
3212 struct s390_address *addr;
3214 /* If DISP is complex, we might have a literal pool reference. */
3215 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3218 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3221 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3227 /* Return true if X contains an address that uses the base register,
3228 without actually constituting a literal pool access. */
3231 find_base_register_ref (x)
3235 struct s390_address addr;
3239 /* Addresses can only occur inside a MEM ... */
3240 if (GET_CODE (x) == MEM)
3242 if (s390_decompose_address (XEXP (x, 0), &addr)
3243 && find_base_register_in_addr (&addr))
3247 /* ... or a load-address type pattern. */
3248 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3250 if (s390_decompose_address (SET_SRC (x), &addr)
3251 && find_base_register_in_addr (&addr))
3255 fmt = GET_RTX_FORMAT (GET_CODE (x));
3256 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3260 retv |= find_base_register_ref (XEXP (x, i));
3262 else if (fmt[i] == 'E')
3264 for (j = 0; j < XVECLEN (x, i); j++)
3265 retv |= find_base_register_ref (XVECEXP (x, i, j));
3272 /* If X contains an address that uses the base register,
3273 without actually constituting a literal pool access,
3274 replace the base register with REPL in all such cases.
3276 Handles both MEMs and load address patterns. */
3279 replace_base_register_ref (x, repl)
3283 struct s390_address addr;
3288 /* Addresses can only occur inside a MEM ... */
3289 if (GET_CODE (*x) == MEM)
3291 if (s390_decompose_address (XEXP (*x, 0), &addr)
3292 && (pos = find_base_register_in_addr (&addr)))
3299 new_addr = addr.base;
3301 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3303 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3305 *x = replace_equiv_address (*x, new_addr);
3310 /* ... or a load-address type pattern. */
3311 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3313 if (s390_decompose_address (SET_SRC (*x), &addr)
3314 && (pos = find_base_register_in_addr (&addr)))
3321 new_addr = addr.base;
3323 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3325 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3327 SET_SRC (*x) = new_addr;
3332 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3333 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3337 replace_base_register_ref (&XEXP (*x, i), repl);
3339 else if (fmt[i] == 'E')
3341 for (j = 0; j < XVECLEN (*x, i); j++)
3342 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
3348 /* We keep a list of constants we which we have to add to internal
3349 constant tables in the middle of large functions. */
3351 #define NR_C_MODES 6
3352 enum machine_mode constant_modes[NR_C_MODES] =
3360 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3362 gen_consttable_df, gen_consttable_di,
3363 gen_consttable_sf, gen_consttable_si,
3370 struct constant *next;
3375 struct constant_pool
3377 struct constant_pool *next;
3382 struct constant *constants[NR_C_MODES];
3388 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
3389 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
3390 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
3392 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3393 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3394 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
3395 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3396 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3397 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3398 static void s390_add_anchor PARAMS ((struct constant_pool *));
3399 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3400 static void s390_free_pool PARAMS ((struct constant_pool *));
3402 /* Create new constant pool covering instructions starting at INSN
3403 and chain it to the end of POOL_LIST. */
3405 static struct constant_pool *
3406 s390_start_pool (pool_list, insn)
3407 struct constant_pool **pool_list;
3410 struct constant_pool *pool, **prev;
3413 pool = (struct constant_pool *) xmalloc (sizeof *pool);
3415 for (i = 0; i < NR_C_MODES; i++)
3416 pool->constants[i] = NULL;
3418 pool->label = gen_label_rtx ();
3419 pool->first_insn = insn;
3420 pool->pool_insn = NULL_RTX;
3421 pool->insns = BITMAP_XMALLOC ();
3423 pool->anchor = FALSE;
3425 for (prev = pool_list; *prev; prev = &(*prev)->next)
3432 /* End range of instructions covered by POOL at INSN and emit
3433 placeholder insn representing the pool. */
3436 s390_end_pool (pool, insn)
3437 struct constant_pool *pool;
3440 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
3443 insn = get_last_insn ();
3445 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
3446 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
3449 /* Add INSN to the list of insns covered by POOL. */
3452 s390_add_pool_insn (pool, insn)
3453 struct constant_pool *pool;
3456 bitmap_set_bit (pool->insns, INSN_UID (insn));
3459 /* Return pool out of POOL_LIST that covers INSN. */
3461 static struct constant_pool *
3462 s390_find_pool (pool_list, insn)
3463 struct constant_pool *pool_list;
3466 struct constant_pool *pool;
3468 for (pool = pool_list; pool; pool = pool->next)
3469 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
3475 /* Add constant VAL of mode MODE to the constant pool POOL. */
3478 s390_add_constant (pool, val, mode)
3479 struct constant_pool *pool;
3481 enum machine_mode mode;
3486 for (i = 0; i < NR_C_MODES; i++)
3487 if (constant_modes[i] == mode)
3489 if (i == NR_C_MODES)
3492 for (c = pool->constants[i]; c != NULL; c = c->next)
3493 if (rtx_equal_p (val, c->value))
3498 c = (struct constant *) xmalloc (sizeof *c);
3500 c->label = gen_label_rtx ();
3501 c->next = pool->constants[i];
3502 pool->constants[i] = c;
3503 pool->size += GET_MODE_SIZE (mode);
3507 /* Find constant VAL of mode MODE in the constant pool POOL.
3508 Return an RTX describing the distance from the start of
3509 the pool to the location of the new constant. */
3512 s390_find_constant (pool, val, mode)
3513 struct constant_pool *pool;
3515 enum machine_mode mode;
3521 for (i = 0; i < NR_C_MODES; i++)
3522 if (constant_modes[i] == mode)
3524 if (i == NR_C_MODES)
3527 for (c = pool->constants[i]; c != NULL; c = c->next)
3528 if (rtx_equal_p (val, c->value))
3534 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
3535 gen_rtx_LABEL_REF (Pmode, pool->label));
3536 offset = gen_rtx_CONST (Pmode, offset);
3540 /* Set 'anchor' flag in POOL. */
3543 s390_add_anchor (pool)
3544 struct constant_pool *pool;
3548 pool->anchor = TRUE;
3553 /* Dump out the constants in POOL. */
3556 s390_dump_pool (pool)
3557 struct constant_pool *pool;
3563 /* Pool start insn switches to proper section
3564 and guarantees necessary alignment. */
3566 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
3568 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
3569 INSN_ADDRESSES_NEW (insn, -1);
3571 insn = emit_label_after (pool->label, insn);
3572 INSN_ADDRESSES_NEW (insn, -1);
3574 /* Emit anchor if we need one. */
3577 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
3578 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
3579 anchor = gen_rtx_CONST (VOIDmode, anchor);
3580 insn = emit_insn_after (gen_consttable_si (anchor), insn);
3581 INSN_ADDRESSES_NEW (insn, -1);
3584 /* Dump constants in descending alignment requirement order,
3585 ensuring proper alignment for every constant. */
3586 for (i = 0; i < NR_C_MODES; i++)
3587 for (c = pool->constants[i]; c; c = c->next)
3589 /* Convert 104 unspecs to pool-relative references. */
3590 rtx value = c->value;
3591 if (GET_CODE (value) == CONST
3592 && GET_CODE (XEXP (value, 0)) == UNSPEC
3593 && XINT (XEXP (value, 0), 1) == 104
3594 && XVECLEN (XEXP (value, 0), 0) == 1)
3596 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
3597 gen_rtx_LABEL_REF (VOIDmode, pool->label));
3598 value = gen_rtx_CONST (VOIDmode, value);
3601 insn = emit_label_after (c->label, insn);
3602 INSN_ADDRESSES_NEW (insn, -1);
3603 insn = emit_insn_after (gen_consttable[i] (value), insn);
3604 INSN_ADDRESSES_NEW (insn, -1);
3607 /* Pool end insn switches back to previous section
3608 and guarantees necessary alignment. */
3610 insn = emit_insn_after (gen_pool_end_64 (), insn);
3612 insn = emit_insn_after (gen_pool_end_31 (), insn);
3613 INSN_ADDRESSES_NEW (insn, -1);
3615 insn = emit_barrier_after (insn);
3616 INSN_ADDRESSES_NEW (insn, -1);
3618 /* Remove placeholder insn. */
3619 remove_insn (pool->pool_insn);
3624 /* Free all memory used by POOL. */
3627 s390_free_pool (pool)
3628 struct constant_pool *pool;
3632 for (i = 0; i < NR_C_MODES; i++)
3634 struct constant *c = pool->constants[i];
3637 struct constant *next = c->next;
3643 BITMAP_XFREE (pool->insns);
3648 /* Chunkify the literal pool if required.
3650 Code generated by this routine is allowed to use
3651 TEMP_REG as temporary scratch register. If this is
3652 done, TEMP_USED is set to true. */
3654 #define S390_POOL_CHUNK_MIN 0xc00
3655 #define S390_POOL_CHUNK_MAX 0xe00
3657 static struct constant_pool *
3658 s390_chunkify_start (temp_reg, temp_used)
3662 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3664 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3669 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
3670 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
3673 /* Do we need to chunkify the literal pool? */
3675 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3678 /* We need correct insn addresses. */
3680 shorten_branches (get_insns ());
3682 /* Scan all insns and move literals to pool chunks.
3683 Also, emit anchor reload insns before every insn that uses
3684 the literal pool base register as anchor pointer. */
3686 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3688 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3690 rtx pool_ref = NULL_RTX;
3691 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3695 curr_pool = s390_start_pool (&pool_list, insn);
3697 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
3698 get_pool_mode (pool_ref));
3699 s390_add_pool_insn (curr_pool, insn);
3702 else if (!TARGET_64BIT && flag_pic
3703 && find_base_register_ref (PATTERN (insn)))
3705 rtx new = gen_reload_anchor (temp_reg, base_reg);
3706 new = emit_insn_before (new, insn);
3707 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
3712 curr_pool = s390_start_pool (&pool_list, new);
3714 s390_add_anchor (curr_pool);
3715 s390_add_pool_insn (curr_pool, insn);
3719 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
3721 s390_add_pool_insn (curr_pool, insn);
3724 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3725 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3730 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3733 s390_end_pool (curr_pool, NULL_RTX);
3738 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3739 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3742 /* We will later have to insert base register reload insns.
3743 Those will have an effect on code size, which we need to
3744 consider here. This calculation makes rather pessimistic
3745 worst-case assumptions. */
3746 if (GET_CODE (insn) == CODE_LABEL)
3749 if (chunk_size < S390_POOL_CHUNK_MIN
3750 && curr_pool->size < S390_POOL_CHUNK_MIN)
3753 /* Pool chunks can only be inserted after BARRIERs ... */
3754 if (GET_CODE (insn) == BARRIER)
3756 s390_end_pool (curr_pool, insn);
3761 /* ... so if we don't find one in time, create one. */
3762 else if ((chunk_size > S390_POOL_CHUNK_MAX
3763 || curr_pool->size > S390_POOL_CHUNK_MAX))
3765 rtx label, jump, barrier;
3767 /* We can insert the barrier only after a 'real' insn. */
3768 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
3770 if (get_attr_length (insn) == 0)
3773 /* Don't separate insns created by s390_split_branches. */
3774 if (GET_CODE (insn) == INSN
3775 && GET_CODE (PATTERN (insn)) == SET
3776 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
3779 label = gen_label_rtx ();
3780 jump = emit_jump_insn_after (gen_jump (label), insn);
3781 barrier = emit_barrier_after (jump);
3782 insn = emit_label_after (label, barrier);
3783 JUMP_LABEL (jump) = label;
3784 LABEL_NUSES (label) = 1;
3786 INSN_ADDRESSES_NEW (jump, -1);
3787 INSN_ADDRESSES_NEW (barrier, -1);
3788 INSN_ADDRESSES_NEW (insn, -1);
3790 s390_end_pool (curr_pool, barrier);
3798 s390_end_pool (curr_pool, NULL_RTX);
3801 /* Find all labels that are branched into
3802 from an insn belonging to a different chunk. */
3804 far_labels = BITMAP_XMALLOC ();
3806 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3808 /* Labels marked with LABEL_PRESERVE_P can be target
3809 of non-local jumps, so we have to mark them.
3810 The same holds for named labels.
3812 Don't do that, however, if it is the label before
3815 if (GET_CODE (insn) == CODE_LABEL
3816 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3818 rtx vec_insn = next_real_insn (insn);
3819 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3820 PATTERN (vec_insn) : NULL_RTX;
3822 || !(GET_CODE (vec_pat) == ADDR_VEC
3823 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3824 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3827 /* If we have a direct jump (conditional or unconditional)
3828 or a casesi jump, check all potential targets. */
3829 else if (GET_CODE (insn) == JUMP_INSN)
3831 rtx pat = PATTERN (insn);
3832 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3833 pat = XVECEXP (pat, 0, 0);
3835 if (GET_CODE (pat) == SET)
3837 rtx label = JUMP_LABEL (insn);
3840 if (s390_find_pool (pool_list, label)
3841 != s390_find_pool (pool_list, insn))
3842 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3845 else if (GET_CODE (pat) == PARALLEL
3846 && XVECLEN (pat, 0) == 2
3847 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3848 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3849 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3851 /* Find the jump table used by this casesi jump. */
3852 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3853 rtx vec_insn = next_real_insn (vec_label);
3854 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3855 PATTERN (vec_insn) : NULL_RTX;
3857 && (GET_CODE (vec_pat) == ADDR_VEC
3858 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3860 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3862 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3864 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3866 if (s390_find_pool (pool_list, label)
3867 != s390_find_pool (pool_list, insn))
3868 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3875 /* Insert base register reload insns before every pool. */
3877 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3879 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3880 rtx insn = curr_pool->first_insn;
3881 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3884 /* Insert base register reload insns at every far label. */
3886 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3887 if (GET_CODE (insn) == CODE_LABEL
3888 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3890 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3893 rtx new_insn = gen_reload_base (base_reg, pool->label);
3894 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3899 BITMAP_XFREE (far_labels);
3902 /* Recompute insn addresses. */
3904 init_insn_lengths ();
3905 shorten_branches (get_insns ());
3910 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3911 After we have decided to use this list, finish implementing
3912 all changes to the current function as required.
3914 Code generated by this routine is allowed to use
3915 TEMP_REG as temporary scratch register. */
3918 s390_chunkify_finish (pool_list, temp_reg)
3919 struct constant_pool *pool_list;
3922 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3923 struct constant_pool *curr_pool = NULL;
3927 /* Replace all literal pool references. */
3929 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3931 curr_pool = s390_find_pool (pool_list, insn);
3935 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3937 rtx addr, pool_ref = NULL_RTX;
3938 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3941 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
3942 get_pool_mode (pool_ref));
3943 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
3944 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
3945 INSN_CODE (insn) = -1;
3948 else if (!TARGET_64BIT && flag_pic
3949 && find_base_register_ref (PATTERN (insn)))
3951 replace_base_register_ref (&PATTERN (insn), temp_reg);
3956 /* Dump out all literal pools. */
3958 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3959 s390_dump_pool (curr_pool);
3961 /* Free pool list. */
3965 struct constant_pool *next = pool_list->next;
3966 s390_free_pool (pool_list);
3971 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3972 We have decided we cannot use this list, so revert all changes
3973 to the current function that were done by s390_chunkify_start. */
3976 s390_chunkify_cancel (pool_list)
3977 struct constant_pool *pool_list;
3979 struct constant_pool *curr_pool = NULL;
3982 /* Remove all pool placeholder insns. */
3984 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3986 /* Did we insert an extra barrier? Remove it. */
3987 rtx barrier = PREV_INSN (curr_pool->pool_insn);
3988 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
3989 rtx label = NEXT_INSN (curr_pool->pool_insn);
3991 if (jump && GET_CODE (jump) == JUMP_INSN
3992 && barrier && GET_CODE (barrier) == BARRIER
3993 && label && GET_CODE (label) == CODE_LABEL
3994 && GET_CODE (PATTERN (jump)) == SET
3995 && SET_DEST (PATTERN (jump)) == pc_rtx
3996 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
3997 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4000 remove_insn (barrier);
4001 remove_insn (label);
4004 remove_insn (curr_pool->pool_insn);
4007 /* Remove all base/anchor register reload insns. */
4009 for (insn = get_insns (); insn; )
4011 rtx next_insn = NEXT_INSN (insn);
4013 if (GET_CODE (insn) == INSN
4014 && GET_CODE (PATTERN (insn)) == SET
4015 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4016 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4017 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4023 /* Free pool list. */
4027 struct constant_pool *next = pool_list->next;
4028 s390_free_pool (pool_list);
4034 /* Index of constant pool chunk that is currently being processed.
4035 Set to -1 before function output has started. */
4036 int s390_pool_count = -1;
4038 /* Number of elements of current constant pool. */
4039 int s390_nr_constants;
4041 /* Output main constant pool to stdio stream FILE. */
4044 s390_output_constant_pool (file)
4047 /* Output constant pool. */
4048 if (s390_nr_constants)
4052 fprintf (file, "\tlarl\t%s,.LT%d\n", reg_names[BASE_REGISTER],
4053 current_function_funcdef_no);
4054 readonly_data_section ();
4055 ASM_OUTPUT_ALIGN (file, 3);
4059 fprintf (file, "\tbras\t%s,.LTN%d\n", reg_names[BASE_REGISTER],
4060 current_function_funcdef_no);
4062 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4064 s390_pool_count = 0;
4065 output_constant_pool (current_function_name, current_function_decl);
4066 s390_pool_count = -1;
4069 function_section (current_function_decl);
4071 fprintf (file, ".LTN%d:\n", current_function_funcdef_no);
4074 /* If no pool required, at least output the anchor label. */
4075 else if (!TARGET_64BIT && flag_pic)
4076 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4080 /* Rework the prolog/epilog to avoid saving/restoring
4081 registers unnecessarily. If TEMP_REGNO is nonnegative,
4082 it specifies the number of a caller-saved register used
4083 as temporary scratch register by code emitted during
4084 machine dependent reorg. */
4087 s390_optimize_prolog (temp_regno)
4090 int save_first, save_last, restore_first, restore_last;
4092 rtx insn, new_insn, next_insn;
4094 struct s390_frame frame;
4095 s390_frame_info (&frame);
4097 /* Recompute regs_ever_live data for special registers. */
4098 regs_ever_live[BASE_REGISTER] = 0;
4099 regs_ever_live[RETURN_REGNUM] = 0;
4100 regs_ever_live[STACK_POINTER_REGNUM] = frame.frame_size > 0;
4102 /* If there is (possibly) any pool entry, we need to
4103 load the base register.
4104 ??? FIXME: this should be more precise. */
4105 if (get_pool_size ())
4106 regs_ever_live[BASE_REGISTER] = 1;
4108 /* In non-leaf functions, the prolog/epilog code relies
4109 on RETURN_REGNUM being saved in any case. */
4110 if (!current_function_is_leaf)
4111 regs_ever_live[RETURN_REGNUM] = 1;
4113 /* We need to save/restore the temporary register. */
4114 if (temp_regno >= 0)
4115 regs_ever_live[temp_regno] = 1;
4118 /* Find first and last gpr to be saved. */
4120 for (i = 6; i < 16; i++)
4121 if (regs_ever_live[i])
4124 for (j = 15; j > i; j--)
4125 if (regs_ever_live[j])
4130 /* Nothing to save/restore. */
4131 save_first = restore_first = -1;
4132 save_last = restore_last = -1;
4136 /* Save/restore from i to j. */
4137 save_first = restore_first = i;
4138 save_last = restore_last = j;
4141 /* Varargs functions need to save gprs 2 to 6. */
4142 if (current_function_stdarg)
4150 /* If all special registers are in fact used, there's nothing we
4151 can do, so no point in walking the insn list. */
4152 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4153 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4157 /* Search for prolog/epilog insns and replace them. */
4159 for (insn = get_insns (); insn; insn = next_insn)
4161 int first, last, off;
4162 rtx set, base, offset;
4164 next_insn = NEXT_INSN (insn);
4166 if (GET_CODE (insn) != INSN)
4168 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4171 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4173 set = XVECEXP (PATTERN (insn), 0, 0);
4174 first = REGNO (SET_SRC (set));
4175 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4176 offset = const0_rtx;
4177 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4178 off = INTVAL (offset) - first * UNITS_PER_WORD;
4180 if (GET_CODE (base) != REG || off < 0)
4182 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4184 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4187 if (save_first != -1)
4189 new_insn = save_gprs (base, off, save_first, save_last);
4190 new_insn = emit_insn_before (new_insn, insn);
4191 INSN_ADDRESSES_NEW (new_insn, -1);
4197 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4199 set = XVECEXP (PATTERN (insn), 0, 0);
4200 first = REGNO (SET_DEST (set));
4201 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4202 offset = const0_rtx;
4203 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4204 off = INTVAL (offset) - first * UNITS_PER_WORD;
4206 if (GET_CODE (base) != REG || off < 0)
4208 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4210 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4213 if (restore_first != -1)
4215 new_insn = restore_gprs (base, off, restore_first, restore_last);
4216 new_insn = emit_insn_before (new_insn, insn);
4217 INSN_ADDRESSES_NEW (new_insn, -1);
4225 /* Check whether any insn in the function makes use of the original
4226 value of RETURN_REG (e.g. for __builtin_return_address).
4227 If so, insert an insn reloading that value.
4229 Return true if any such insn was found. */
4232 s390_fixup_clobbered_return_reg (return_reg)
4235 bool replacement_done = 0;
4238 struct s390_frame frame;
4239 s390_frame_info (&frame);
4241 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4243 rtx reg, off, new_insn;
4245 if (GET_CODE (insn) != INSN)
4247 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4249 if (GET_CODE (PATTERN (insn)) == PARALLEL
4250 && store_multiple_operation (PATTERN (insn), VOIDmode))
4253 if (frame.frame_pointer_p)
4254 reg = hard_frame_pointer_rtx;
4256 reg = stack_pointer_rtx;
4258 off = GEN_INT (frame.frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4259 if (INTVAL (off) >= 4096)
4261 off = force_const_mem (Pmode, off);
4262 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4263 new_insn = emit_insn_before (new_insn, insn);
4264 INSN_ADDRESSES_NEW (new_insn, -1);
4268 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4269 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4270 new_insn = emit_insn_before (new_insn, insn);
4271 INSN_ADDRESSES_NEW (new_insn, -1);
4273 replacement_done = 1;
4276 return replacement_done;
4279 /* Perform machine-dependent processing. */
4282 s390_machine_dependent_reorg (first)
4283 rtx first ATTRIBUTE_UNUSED;
4285 bool fixed_up_clobbered_return_reg = 0;
4286 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4289 /* Make sure all splits have been performed; splits after
4290 machine_dependent_reorg might confuse insn length counts. */
4291 split_all_insns_noflow ();
4294 /* There are two problematic situations we need to correct:
4296 - the literal pool might be > 4096 bytes in size, so that
4297 some of its elements cannot be directly accessed
4299 - a branch target might be > 64K away from the branch, so that
4300 it is not possible to use a PC-relative instruction.
4302 To fix those, we split the single literal pool into multiple
4303 pool chunks, reloading the pool base register at various
4304 points throughout the function to ensure it always points to
4305 the pool chunk the following code expects, and / or replace
4306 PC-relative branches by absolute branches.
4308 However, the two problems are interdependent: splitting the
4309 literal pool can move a branch further away from its target,
4310 causing the 64K limit to overflow, and on the other hand,
4311 replacing a PC-relative branch by an absolute branch means
4312 we need to put the branch target address into the literal
4313 pool, possibly causing it to overflow.
4315 So, we loop trying to fix up both problems until we manage
4316 to satisfy both conditions at the same time. Note that the
4317 loop is guaranteed to terminate as every pass of the loop
4318 strictly decreases the total number of PC-relative branches
4319 in the function. (This is not completely true as there
4320 might be branch-over-pool insns introduced by chunkify_start.
4321 Those never need to be split however.) */
4325 struct constant_pool *pool_list;
4327 /* Try to chunkify the literal pool. */
4328 pool_list = s390_chunkify_start (temp_reg, &temp_used);
4330 /* Split out-of-range branches. If this has created new
4331 literal pool entries, cancel current chunk list and
4333 if (s390_split_branches (temp_reg, &temp_used))
4336 s390_chunkify_cancel (pool_list);
4341 /* Check whether we have clobbered a use of the return
4342 register (e.g. for __builtin_return_address). If so,
4343 add insns reloading the register where necessary. */
4344 if (temp_used && !fixed_up_clobbered_return_reg
4345 && s390_fixup_clobbered_return_reg (temp_reg))
4347 fixed_up_clobbered_return_reg = 1;
4349 /* The fixup insns might have caused a jump to overflow. */
4351 s390_chunkify_cancel (pool_list);
4356 /* If we made it up to here, both conditions are satisfied.
4357 Finish up pool chunkification if required. */
4359 s390_chunkify_finish (pool_list, temp_reg);
4364 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4368 /* Find first call clobbered register unsused in a function.
4369 This could be used as base register in a leaf function
4370 or for holding the return address before epilogue. */
4373 find_unused_clobbered_reg ()
4376 for (i = 0; i < 6; i++)
4377 if (!regs_ever_live[i])
4382 /* Fill FRAME with info about frame of current function. */
4385 s390_frame_info (frame)
4386 struct s390_frame *frame;
4388 char gprs_ever_live[16];
4390 HOST_WIDE_INT fsize = get_frame_size ();
4392 if (fsize > 0x7fff0000)
4393 fatal_error ("Total size of local variables exceeds architecture limit.");
4395 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
4396 frame->save_fprs_p = 0;
4398 for (i = 24; i < 32; i++)
4399 if (regs_ever_live[i])
4401 frame->save_fprs_p = 1;
4405 frame->frame_size = fsize + frame->save_fprs_p * 64;
4407 /* Does function need to setup frame and save area. */
4409 if (! current_function_is_leaf
4410 || frame->frame_size > 0
4411 || current_function_calls_alloca
4412 || current_function_stdarg)
4413 frame->frame_size += STARTING_FRAME_OFFSET;
4415 /* Frame pointer needed. */
4417 frame->frame_pointer_p = frame_pointer_needed;
4419 /* Find first and last gpr to be saved. Note that at this point,
4420 we assume the return register and the base register always
4421 need to be saved. This is done because the usage of these
4422 register might change even after the prolog was emitted.
4423 If it turns out later that we really don't need them, the
4424 prolog/epilog code is modified again. */
4426 for (i = 0; i < 16; i++)
4427 gprs_ever_live[i] = regs_ever_live[i];
4429 gprs_ever_live[BASE_REGISTER] = 1;
4430 gprs_ever_live[RETURN_REGNUM] = 1;
4431 gprs_ever_live[STACK_POINTER_REGNUM] = frame->frame_size > 0;
4433 for (i = 6; i < 16; i++)
4434 if (gprs_ever_live[i])
4437 for (j = 15; j > i; j--)
4438 if (gprs_ever_live[j])
4442 /* Save / Restore from gpr i to j. */
4443 frame->first_save_gpr = i;
4444 frame->first_restore_gpr = i;
4445 frame->last_save_gpr = j;
4447 /* Varargs functions need to save gprs 2 to 6. */
4448 if (current_function_stdarg)
4449 frame->first_save_gpr = 2;
4452 /* Return offset between argument pointer and frame pointer
4453 initially after prologue. */
4456 s390_arg_frame_offset ()
4458 struct s390_frame frame;
4460 /* Compute frame_info. */
4462 s390_frame_info (&frame);
4464 return frame.frame_size + STACK_POINTER_OFFSET;
4467 /* Emit insn to save fpr REGNUM at offset OFFSET relative
4468 to register BASE. Return generated insn. */
4471 save_fpr (base, offset, regnum)
4477 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4478 set_mem_alias_set (addr, s390_sr_alias_set);
4480 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
4483 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
4484 to register BASE. Return generated insn. */
4487 restore_fpr (base, offset, regnum)
4493 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4494 set_mem_alias_set (addr, s390_sr_alias_set);
4496 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4499 /* Generate insn to save registers FIRST to LAST into
4500 the register save area located at offset OFFSET
4501 relative to register BASE. */
4504 save_gprs (base, offset, first, last)
4510 rtx addr, insn, note;
4513 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4514 addr = gen_rtx_MEM (Pmode, addr);
4515 set_mem_alias_set (addr, s390_sr_alias_set);
4517 /* Special-case single register. */
4521 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
4523 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
4525 RTX_FRAME_RELATED_P (insn) = 1;
4530 insn = gen_store_multiple (addr,
4531 gen_rtx_REG (Pmode, first),
4532 GEN_INT (last - first + 1));
4535 /* We need to set the FRAME_RELATED flag on all SETs
4536 inside the store-multiple pattern.
4538 However, we must not emit DWARF records for registers 2..5
4539 if they are stored for use by variable arguments ...
4541 ??? Unfortunately, it is not enough to simply not the the
4542 FRAME_RELATED flags for those SETs, because the first SET
4543 of the PARALLEL is always treated as if it had the flag
4544 set, even if it does not. Therefore we emit a new pattern
4545 without those registers as REG_FRAME_RELATED_EXPR note. */
4549 rtx pat = PATTERN (insn);
4551 for (i = 0; i < XVECLEN (pat, 0); i++)
4552 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
4553 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
4555 RTX_FRAME_RELATED_P (insn) = 1;
4559 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
4560 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
4561 gen_rtx_REG (Pmode, 6),
4562 GEN_INT (last - 6 + 1));
4563 note = PATTERN (note);
4566 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4567 note, REG_NOTES (insn));
4569 for (i = 0; i < XVECLEN (note, 0); i++)
4570 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
4571 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
4573 RTX_FRAME_RELATED_P (insn) = 1;
4579 /* Generate insn to restore registers FIRST to LAST from
4580 the register save area located at offset OFFSET
4581 relative to register BASE. */
4584 restore_gprs (base, offset, first, last)
4592 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4593 addr = gen_rtx_MEM (Pmode, addr);
4594 set_mem_alias_set (addr, s390_sr_alias_set);
4596 /* Special-case single register. */
4600 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
4602 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
4607 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
4609 GEN_INT (last - first + 1));
4613 /* Expand the prologue into a bunch of separate insns. */
4616 s390_emit_prologue ()
4618 struct s390_frame frame;
4623 /* Compute frame_info. */
4625 s390_frame_info (&frame);
4627 /* Choose best register to use for temp use within prologue. */
4629 if (!current_function_is_leaf
4630 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
4631 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
4632 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4634 temp_reg = gen_rtx_REG (Pmode, 1);
4636 /* Save call saved gprs. */
4638 insn = save_gprs (stack_pointer_rtx, 0,
4639 frame.first_save_gpr, frame.last_save_gpr);
4642 /* Dump constant pool and set constant pool register (13). */
4644 insn = emit_insn (gen_lit ());
4646 /* Save fprs for variable args. */
4648 if (current_function_stdarg)
4650 /* Save fpr 0 and 2. */
4652 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
4653 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
4657 /* Save fpr 4 and 6. */
4659 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4660 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4664 /* Save fprs 4 and 6 if used (31 bit ABI). */
4668 /* Save fpr 4 and 6. */
4669 if (regs_ever_live[18])
4671 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4672 RTX_FRAME_RELATED_P (insn) = 1;
4674 if (regs_ever_live[19])
4676 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4677 RTX_FRAME_RELATED_P (insn) = 1;
4681 /* Decrement stack pointer. */
4683 if (frame.frame_size > 0)
4685 rtx frame_off = GEN_INT (-frame.frame_size);
4687 /* Save incoming stack pointer into temp reg. */
4689 if (TARGET_BACKCHAIN || frame.save_fprs_p)
4691 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
4694 /* Substract frame size from stack pointer. */
4696 frame_off = GEN_INT (-frame.frame_size);
4697 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
4698 frame_off = force_const_mem (Pmode, frame_off);
4700 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
4701 RTX_FRAME_RELATED_P (insn) = 1;
4703 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4704 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
4705 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4706 GEN_INT (-frame.frame_size))),
4709 /* Set backchain. */
4711 if (TARGET_BACKCHAIN)
4713 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4714 set_mem_alias_set (addr, s390_sr_alias_set);
4715 insn = emit_insn (gen_move_insn (addr, temp_reg));
4719 /* Save fprs 8 - 15 (64 bit ABI). */
4721 if (frame.save_fprs_p)
4723 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
4725 for (i = 24; i < 32; i++)
4726 if (regs_ever_live[i])
4728 rtx addr = plus_constant (stack_pointer_rtx,
4729 frame.frame_size - 64 + (i-24)*8);
4731 insn = save_fpr (temp_reg, (i-24)*8, i);
4732 RTX_FRAME_RELATED_P (insn) = 1;
4734 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4735 gen_rtx_SET (VOIDmode,
4736 gen_rtx_MEM (DFmode, addr),
4737 gen_rtx_REG (DFmode, i)),
4742 /* Set frame pointer, if needed. */
4744 if (frame.frame_pointer_p)
4746 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4747 RTX_FRAME_RELATED_P (insn) = 1;
4750 /* Set up got pointer, if needed. */
4752 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4754 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4755 SYMBOL_REF_FLAG (got_symbol) = 1;
4759 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
4762 /* It can happen that the GOT pointer isn't really needed ... */
4763 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4768 got_symbol = gen_rtx_UNSPEC (VOIDmode,
4769 gen_rtvec (1, got_symbol), 100);
4770 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
4771 got_symbol = force_const_mem (Pmode, got_symbol);
4772 insn = emit_move_insn (pic_offset_table_rtx,
4774 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4777 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
4778 gen_rtx_REG (Pmode, BASE_REGISTER)));
4779 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4785 /* Expand the epilogue into a bunch of separate insns. */
4788 s390_emit_epilogue ()
4790 struct s390_frame frame;
4791 rtx frame_pointer, return_reg;
4792 int area_bottom, area_top, offset;
4795 /* Compute frame_info. */
4797 s390_frame_info (&frame);
4799 /* Check whether to use frame or stack pointer for restore. */
4801 frame_pointer = frame.frame_pointer_p ?
4802 hard_frame_pointer_rtx : stack_pointer_rtx;
4804 /* Compute which parts of the save area we need to access. */
4806 if (frame.first_restore_gpr != -1)
4808 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
4809 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
4813 area_bottom = INT_MAX;
4819 if (frame.save_fprs_p)
4821 if (area_bottom > -64)
4829 if (regs_ever_live[18])
4831 if (area_bottom > STACK_POINTER_OFFSET - 16)
4832 area_bottom = STACK_POINTER_OFFSET - 16;
4833 if (area_top < STACK_POINTER_OFFSET - 8)
4834 area_top = STACK_POINTER_OFFSET - 8;
4836 if (regs_ever_live[19])
4838 if (area_bottom > STACK_POINTER_OFFSET - 8)
4839 area_bottom = STACK_POINTER_OFFSET - 8;
4840 if (area_top < STACK_POINTER_OFFSET)
4841 area_top = STACK_POINTER_OFFSET;
4845 /* Check whether we can access the register save area.
4846 If not, increment the frame pointer as required. */
4848 if (area_top <= area_bottom)
4850 /* Nothing to restore. */
4852 else if (frame.frame_size + area_bottom >= 0
4853 && frame.frame_size + area_top <= 4096)
4855 /* Area is in range. */
4856 offset = frame.frame_size;
4860 rtx insn, frame_off;
4862 offset = area_bottom < 0 ? -area_bottom : 0;
4863 frame_off = GEN_INT (frame.frame_size - offset);
4865 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
4866 frame_off = force_const_mem (Pmode, frame_off);
4868 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
4871 /* Restore call saved fprs. */
4877 if (frame.save_fprs_p)
4878 for (i = 24; i < 32; i++)
4879 if (regs_ever_live[i] && !global_regs[i])
4880 restore_fpr (frame_pointer,
4881 offset - 64 + (i-24) * 8, i);
4885 if (regs_ever_live[18] && !global_regs[18])
4886 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
4887 if (regs_ever_live[19] && !global_regs[19])
4888 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
4891 /* Return register. */
4893 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4895 /* Restore call saved gprs. */
4897 if (frame.first_restore_gpr != -1)
4902 /* Check for global register and save them
4903 to stack location from where they get restored. */
4905 for (i = frame.first_restore_gpr;
4906 i <= frame.last_save_gpr;
4909 /* These registers are special and need to be
4910 restored in any case. */
4911 if (i == STACK_POINTER_REGNUM
4912 || i == RETURN_REGNUM
4913 || i == BASE_REGISTER
4914 || (flag_pic && i == PIC_OFFSET_TABLE_REGNUM))
4919 addr = plus_constant (frame_pointer,
4920 offset + i * UNITS_PER_WORD);
4921 addr = gen_rtx_MEM (Pmode, addr);
4922 set_mem_alias_set (addr, s390_sr_alias_set);
4923 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
4927 /* Fetch return address from stack before load multiple,
4928 this will do good for scheduling. */
4930 if (!current_function_is_leaf)
4932 int return_regnum = find_unused_clobbered_reg();
4935 return_reg = gen_rtx_REG (Pmode, return_regnum);
4937 addr = plus_constant (frame_pointer,
4938 offset + RETURN_REGNUM * UNITS_PER_WORD);
4939 addr = gen_rtx_MEM (Pmode, addr);
4940 set_mem_alias_set (addr, s390_sr_alias_set);
4941 emit_move_insn (return_reg, addr);
4944 /* ??? As references to the base register are not made
4945 explicit in insn RTX code, we have to add a barrier here
4946 to prevent incorrect scheduling. */
4948 emit_insn (gen_blockage());
4950 insn = restore_gprs (frame_pointer, offset,
4951 frame.first_restore_gpr, frame.last_save_gpr);
4955 /* Return to caller. */
4957 p = rtvec_alloc (2);
4959 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
4960 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
4961 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
4965 /* Return the size in bytes of a function argument of
4966 type TYPE and/or mode MODE. At least one of TYPE or
4967 MODE must be specified. */
4970 s390_function_arg_size (mode, type)
4971 enum machine_mode mode;
4975 return int_size_in_bytes (type);
4977 /* No type info available for some library calls ... */
4978 if (mode != BLKmode)
4979 return GET_MODE_SIZE (mode);
4981 /* If we have neither type nor mode, abort */
4985 /* Return 1 if a function argument of type TYPE and mode MODE
4986 is to be passed by reference. The ABI specifies that only
4987 structures of size 1, 2, 4, or 8 bytes are passed by value,
4988 all other structures (and complex numbers) are passed by
4992 s390_function_arg_pass_by_reference (mode, type)
4993 enum machine_mode mode;
4996 int size = s390_function_arg_size (mode, type);
5000 if (AGGREGATE_TYPE_P (type) &&
5001 size != 1 && size != 2 && size != 4 && size != 8)
5004 if (TREE_CODE (type) == COMPLEX_TYPE)
5011 /* Update the data in CUM to advance over an argument of mode MODE and
5012 data type TYPE. (TYPE is null for libcalls where that information
5013 may not be available.). The boolean NAMED specifies whether the
5014 argument is a named argument (as opposed to an unnamed argument
5015 matching an ellipsis). */
5018 s390_function_arg_advance (cum, mode, type, named)
5019 CUMULATIVE_ARGS *cum;
5020 enum machine_mode mode;
5022 int named ATTRIBUTE_UNUSED;
5024 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5028 else if (s390_function_arg_pass_by_reference (mode, type))
5034 int size = s390_function_arg_size (mode, type);
5035 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5039 /* Define where to put the arguments to a function.
5040 Value is zero to push the argument on the stack,
5041 or a hard register in which to store the argument.
5043 MODE is the argument's machine mode.
5044 TYPE is the data type of the argument (as a tree).
5045 This is null for libcalls where that information may
5047 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5048 the preceding args and about the function being called.
5049 NAMED is nonzero if this argument is a named parameter
5050 (otherwise it is an extra parameter matching an ellipsis).
5052 On S/390, we use general purpose registers 2 through 6 to
5053 pass integer, pointer, and certain structure arguments, and
5054 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5055 to pass floating point arguments. All remaining arguments
5056 are pushed to the stack. */
5059 s390_function_arg (cum, mode, type, named)
5060 CUMULATIVE_ARGS *cum;
5061 enum machine_mode mode;
5063 int named ATTRIBUTE_UNUSED;
5065 if (s390_function_arg_pass_by_reference (mode, type))
5068 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5070 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5073 return gen_rtx (REG, mode, cum->fprs + 16);
5077 int size = s390_function_arg_size (mode, type);
5078 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5080 if (cum->gprs + n_gprs > 5)
5083 return gen_rtx (REG, mode, cum->gprs + 2);
5088 /* Create and return the va_list datatype.
5090 On S/390, va_list is an array type equivalent to
5092 typedef struct __va_list_tag
5096 void *__overflow_arg_area;
5097 void *__reg_save_area;
5101 where __gpr and __fpr hold the number of general purpose
5102 or floating point arguments used up to now, respectively,
5103 __overflow_arg_area points to the stack location of the
5104 next argument passed on the stack, and __reg_save_area
5105 always points to the start of the register area in the
5106 call frame of the current function. The function prologue
5107 saves all registers used for argument passing into this
5108 area if the function uses variable arguments. */
5111 s390_build_va_list ()
5113 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5115 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5118 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5120 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5121 long_integer_type_node);
5122 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5123 long_integer_type_node);
5124 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5126 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5129 DECL_FIELD_CONTEXT (f_gpr) = record;
5130 DECL_FIELD_CONTEXT (f_fpr) = record;
5131 DECL_FIELD_CONTEXT (f_ovf) = record;
5132 DECL_FIELD_CONTEXT (f_sav) = record;
5134 TREE_CHAIN (record) = type_decl;
5135 TYPE_NAME (record) = type_decl;
5136 TYPE_FIELDS (record) = f_gpr;
5137 TREE_CHAIN (f_gpr) = f_fpr;
5138 TREE_CHAIN (f_fpr) = f_ovf;
5139 TREE_CHAIN (f_ovf) = f_sav;
5141 layout_type (record);
5143 /* The correct type is an array type of one element. */
5144 return build_array_type (record, build_index_type (size_zero_node));
5147 /* Implement va_start by filling the va_list structure VALIST.
5148 STDARG_P is always true, and ignored.
5149 NEXTARG points to the first anonymous stack argument.
5151 The following global variables are used to initialize
5152 the va_list structure:
5154 current_function_args_info:
5155 holds number of gprs and fprs used for named arguments.
5156 current_function_arg_offset_rtx:
5157 holds the offset of the first anonymous stack argument
5158 (relative to the virtual arg pointer). */
5161 s390_va_start (valist, nextarg)
5163 rtx nextarg ATTRIBUTE_UNUSED;
5165 HOST_WIDE_INT n_gpr, n_fpr;
5167 tree f_gpr, f_fpr, f_ovf, f_sav;
5168 tree gpr, fpr, ovf, sav, t;
5170 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5171 f_fpr = TREE_CHAIN (f_gpr);
5172 f_ovf = TREE_CHAIN (f_fpr);
5173 f_sav = TREE_CHAIN (f_ovf);
5175 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5176 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5177 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5178 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5179 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5181 /* Count number of gp and fp argument registers used. */
5183 n_gpr = current_function_args_info.gprs;
5184 n_fpr = current_function_args_info.fprs;
5186 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5187 TREE_SIDE_EFFECTS (t) = 1;
5188 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5190 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5191 TREE_SIDE_EFFECTS (t) = 1;
5192 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5194 /* Find the overflow area. */
5195 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5197 off = INTVAL (current_function_arg_offset_rtx);
5198 off = off < 0 ? 0 : off;
5199 if (TARGET_DEBUG_ARG)
5200 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5201 (int)n_gpr, (int)n_fpr, off);
5203 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5205 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5206 TREE_SIDE_EFFECTS (t) = 1;
5207 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5209 /* Find the register save area. */
5210 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5211 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5212 build_int_2 (-STACK_POINTER_OFFSET, -1));
5213 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5214 TREE_SIDE_EFFECTS (t) = 1;
5215 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5218 /* Implement va_arg by updating the va_list structure
5219 VALIST as required to retrieve an argument of type
5220 TYPE, and returning that argument.
5222 Generates code equivalent to:
5224 if (integral value) {
5225 if (size <= 4 && args.gpr < 5 ||
5226 size > 4 && args.gpr < 4 )
5227 ret = args.reg_save_area[args.gpr+8]
5229 ret = *args.overflow_arg_area++;
5230 } else if (float value) {
5232 ret = args.reg_save_area[args.fpr+64]
5234 ret = *args.overflow_arg_area++;
5235 } else if (aggregate value) {
5237 ret = *args.reg_save_area[args.gpr]
5239 ret = **args.overflow_arg_area++;
5243 s390_va_arg (valist, type)
5247 tree f_gpr, f_fpr, f_ovf, f_sav;
5248 tree gpr, fpr, ovf, sav, reg, t, u;
5249 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5250 rtx lab_false, lab_over, addr_rtx, r;
5252 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5253 f_fpr = TREE_CHAIN (f_gpr);
5254 f_ovf = TREE_CHAIN (f_fpr);
5255 f_sav = TREE_CHAIN (f_ovf);
5257 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5258 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5259 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5260 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5261 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5263 size = int_size_in_bytes (type);
5265 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5267 if (TARGET_DEBUG_ARG)
5269 fprintf (stderr, "va_arg: aggregate type");
5273 /* Aggregates are passed by reference. */
5277 sav_ofs = 2 * UNITS_PER_WORD;
5278 sav_scale = UNITS_PER_WORD;
5279 size = UNITS_PER_WORD;
5282 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5284 if (TARGET_DEBUG_ARG)
5286 fprintf (stderr, "va_arg: float type");
5290 /* FP args go in FP registers, if present. */
5294 sav_ofs = 16 * UNITS_PER_WORD;
5296 /* TARGET_64BIT has up to 4 parameter in fprs */
5297 max_reg = TARGET_64BIT ? 3 : 1;
5301 if (TARGET_DEBUG_ARG)
5303 fprintf (stderr, "va_arg: other type");
5307 /* Otherwise into GP registers. */
5310 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5311 sav_ofs = 2 * UNITS_PER_WORD;
5313 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
5314 TYPE_MODE (type) == HImode ? 6 :
5315 TYPE_MODE (type) == QImode ? 7 : 0;
5317 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
5318 TYPE_MODE (type) == QImode ? 3 : 0;
5320 sav_scale = UNITS_PER_WORD;
5327 /* Pull the value out of the saved registers ... */
5329 lab_false = gen_label_rtx ();
5330 lab_over = gen_label_rtx ();
5331 addr_rtx = gen_reg_rtx (Pmode);
5333 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
5335 GT, const1_rtx, Pmode, 0, lab_false);
5338 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5342 u = build (MULT_EXPR, long_integer_type_node,
5343 reg, build_int_2 (sav_scale, 0));
5344 TREE_SIDE_EFFECTS (u) = 1;
5346 t = build (PLUS_EXPR, ptr_type_node, t, u);
5347 TREE_SIDE_EFFECTS (t) = 1;
5349 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5351 emit_move_insn (addr_rtx, r);
5354 emit_jump_insn (gen_jump (lab_over));
5356 emit_label (lab_false);
5358 /* ... Otherwise out of the overflow area. */
5360 t = save_expr (ovf);
5363 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
5364 if (size < UNITS_PER_WORD)
5366 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
5367 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5368 TREE_SIDE_EFFECTS (t) = 1;
5369 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5371 t = save_expr (ovf);
5374 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5376 emit_move_insn (addr_rtx, r);
5378 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5379 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5380 TREE_SIDE_EFFECTS (t) = 1;
5381 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5383 emit_label (lab_over);
5385 /* If less than max_regs a registers are retrieved out
5386 of register save area, increment. */
5388 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
5389 build_int_2 (n_reg, 0));
5390 TREE_SIDE_EFFECTS (u) = 1;
5391 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5395 r = gen_rtx_MEM (Pmode, addr_rtx);
5396 set_mem_alias_set (r, get_varargs_alias_set ());
5397 emit_move_insn (addr_rtx, r);
5405 /* Output assembly code for the trampoline template to
5408 On S/390, we use gpr 1 internally in the trampoline code;
5409 gpr 0 is used to hold the static chain. */
5412 s390_trampoline_template (file)
5417 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
5418 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
5419 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
5420 fprintf (file, "br\t%s\n", reg_names[1]);
5421 fprintf (file, "0:\t.quad\t0\n");
5422 fprintf (file, ".quad\t0\n");
5426 fprintf (file, "basr\t%s,0\n", reg_names[1]);
5427 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
5428 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
5429 fprintf (file, "br\t%s\n", reg_names[1]);
5430 fprintf (file, ".long\t0\n");
5431 fprintf (file, ".long\t0\n");
5435 /* Emit RTL insns to initialize the variable parts of a trampoline.
5436 FNADDR is an RTX for the address of the function's pure code.
5437 CXT is an RTX for the static chain value for the function. */
5440 s390_initialize_trampoline (addr, fnaddr, cxt)
5445 emit_move_insn (gen_rtx
5447 memory_address (Pmode,
5448 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
5449 emit_move_insn (gen_rtx
5451 memory_address (Pmode,
5452 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
5455 /* Return rtx for 64-bit constant formed from the 32-bit subwords
5456 LOW and HIGH, independent of the host word size. */
5459 s390_gen_rtx_const_DI (high, low)
5463 #if HOST_BITS_PER_WIDE_INT >= 64
5465 val = (HOST_WIDE_INT)high;
5467 val |= (HOST_WIDE_INT)low;
5469 return GEN_INT (val);
5471 #if HOST_BITS_PER_WIDE_INT >= 32
5472 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
5479 /* Output assembler code to FILE to increment profiler label # LABELNO
5480 for profiling a function entry. */
5483 s390_function_profiler (file, labelno)
5490 sprintf (label, "%sP%d", LPREFIX, labelno);
5492 fprintf (file, "# function profiler \n");
5494 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
5495 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
5496 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
5498 op[2] = gen_rtx_REG (Pmode, 1);
5499 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
5500 SYMBOL_REF_FLAG (op[3]) = 1;
5502 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
5505 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
5506 op[4] = gen_rtx_CONST (Pmode, op[4]);
5511 output_asm_insn ("stg\t%0,%1", op);
5512 output_asm_insn ("larl\t%2,%3", op);
5513 output_asm_insn ("brasl\t%0,%4", op);
5514 output_asm_insn ("lg\t%0,%1", op);
5518 op[6] = gen_label_rtx ();
5520 output_asm_insn ("st\t%0,%1", op);
5521 output_asm_insn ("bras\t%2,%l6", op);
5522 output_asm_insn (".long\t%4", op);
5523 output_asm_insn (".long\t%3", op);
5524 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5525 output_asm_insn ("l\t%0,0(%2)", op);
5526 output_asm_insn ("l\t%2,4(%2)", op);
5527 output_asm_insn ("basr\t%0,%0", op);
5528 output_asm_insn ("l\t%0,%1", op);
5532 op[5] = gen_label_rtx ();
5533 op[6] = gen_label_rtx ();
5535 output_asm_insn ("st\t%0,%1", op);
5536 output_asm_insn ("bras\t%2,%l6", op);
5537 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5538 output_asm_insn (".long\t%4-%l5", op);
5539 output_asm_insn (".long\t%3-%l5", op);
5540 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5541 output_asm_insn ("lr\t%0,%2", op);
5542 output_asm_insn ("a\t%0,0(%2)", op);
5543 output_asm_insn ("a\t%2,4(%2)", op);
5544 output_asm_insn ("basr\t%0,%0", op);
5545 output_asm_insn ("l\t%0,%1", op);
5549 /* Select section for constant in constant pool. In 32-bit mode,
5550 constants go in the function section; in 64-bit mode in .rodata. */
5553 s390_select_rtx_section (mode, x, align)
5554 enum machine_mode mode ATTRIBUTE_UNUSED;
5555 rtx x ATTRIBUTE_UNUSED;
5556 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
5559 readonly_data_section ();
5561 function_section (current_function_decl);
5564 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
5565 may access it directly in the GOT. */
5568 s390_encode_section_info (decl, first)
5570 int first ATTRIBUTE_UNUSED;
5574 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5575 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
5577 if (GET_CODE (rtl) == MEM)
5579 SYMBOL_REF_FLAG (XEXP (rtl, 0))
5580 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5581 || ! TREE_PUBLIC (decl));