1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
42 #include "basic-block.h"
43 #include "integrate.h"
46 #include "target-def.h"
48 #include "langhooks.h"
51 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
52 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
53 static int s390_adjust_priority PARAMS ((rtx, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
57 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
58 HOST_WIDE_INT, tree));
60 #undef TARGET_ASM_ALIGNED_HI_OP
61 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
62 #undef TARGET_ASM_ALIGNED_DI_OP
63 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
64 #undef TARGET_ASM_INTEGER
65 #define TARGET_ASM_INTEGER s390_assemble_integer
67 #undef TARGET_ASM_OPEN_PAREN
68 #define TARGET_ASM_OPEN_PAREN ""
70 #undef TARGET_ASM_CLOSE_PAREN
71 #define TARGET_ASM_CLOSE_PAREN ""
73 #undef TARGET_ASM_SELECT_RTX_SECTION
74 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
76 #undef TARGET_SCHED_ADJUST_COST
77 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
79 #undef TARGET_SCHED_ADJUST_PRIORITY
80 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
82 #undef TARGET_ENCODE_SECTION_INFO
83 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
85 #undef TARGET_ASM_OUTPUT_MI_THUNK
86 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
87 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
88 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
90 struct gcc_target targetm = TARGET_INITIALIZER;
92 extern int reload_completed;
94 /* The alias set for prologue/epilogue register save/restore. */
95 static int s390_sr_alias_set = 0;
97 /* Save information from a "cmpxx" operation until the branch or scc is
99 rtx s390_compare_op0, s390_compare_op1;
101 /* Structure used to hold the components of a S/390 memory
102 address. A legitimate address on S/390 is of the general
104 base + index + displacement
105 where any of the components is optional.
107 base and index are registers of the class ADDR_REGS,
108 displacement is an unsigned 12-bit immediate constant. */
118 /* Structure containing information for prologue and epilogue. */
125 int first_restore_gpr;
127 int arg_frame_offset;
129 HOST_WIDE_INT frame_size;
132 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
133 static int s390_branch_condition_mask PARAMS ((rtx));
134 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
135 static int check_mode PARAMS ((rtx, enum machine_mode *));
136 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
137 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
138 static int reg_used_in_mem_p PARAMS ((int, rtx));
139 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
140 static int s390_split_branches PARAMS ((rtx, bool *));
141 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
142 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
143 static int find_base_register_in_addr PARAMS ((struct s390_address *));
144 static bool find_base_register_ref PARAMS ((rtx));
145 static void replace_base_register_ref PARAMS ((rtx *, rtx));
146 static void s390_optimize_prolog PARAMS ((int));
147 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
148 static int find_unused_clobbered_reg PARAMS ((void));
149 static void s390_frame_info PARAMS ((struct s390_frame *));
150 static rtx save_fpr PARAMS ((rtx, int, int));
151 static rtx restore_fpr PARAMS ((rtx, int, int));
152 static rtx save_gprs PARAMS ((rtx, int, int, int));
153 static rtx restore_gprs PARAMS ((rtx, int, int, int));
154 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
157 /* Return true if SET either doesn't set the CC register, or else
158 the source and destination have matching CC modes and that
159 CC mode is at least as constrained as REQ_MODE. */
162 s390_match_ccmode_set (set, req_mode)
164 enum machine_mode req_mode;
166 enum machine_mode set_mode;
168 if (GET_CODE (set) != SET)
171 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
174 set_mode = GET_MODE (SET_DEST (set));
187 if (req_mode != set_mode)
192 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
193 && req_mode != CCSRmode && req_mode != CCURmode)
199 if (req_mode != CCAmode)
207 return (GET_MODE (SET_SRC (set)) == set_mode);
210 /* Return true if every SET in INSN that sets the CC register
211 has source and destination with matching CC modes and that
212 CC mode is at least as constrained as REQ_MODE.
213 If REQ_MODE is VOIDmode, always return false. */
216 s390_match_ccmode (insn, req_mode)
218 enum machine_mode req_mode;
222 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
223 if (req_mode == VOIDmode)
226 if (GET_CODE (PATTERN (insn)) == SET)
227 return s390_match_ccmode_set (PATTERN (insn), req_mode);
229 if (GET_CODE (PATTERN (insn)) == PARALLEL)
230 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
232 rtx set = XVECEXP (PATTERN (insn), 0, i);
233 if (GET_CODE (set) == SET)
234 if (!s390_match_ccmode_set (set, req_mode))
241 /* If a test-under-mask instruction can be used to implement
242 (compare (and ... OP1) OP2), return the CC mode required
243 to do that. Otherwise, return VOIDmode.
244 MIXED is true if the instruction can distinguish between
245 CC1 and CC2 for mixed selected bits (TMxx), it is false
246 if the instruction cannot (TM). */
249 s390_tm_ccmode (op1, op2, mixed)
256 /* ??? Fixme: should work on CONST_DOUBLE as well. */
257 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
260 /* Selected bits all zero: CC0. */
261 if (INTVAL (op2) == 0)
264 /* Selected bits all one: CC3. */
265 if (INTVAL (op2) == INTVAL (op1))
268 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
271 bit1 = exact_log2 (INTVAL (op2));
272 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
273 if (bit0 != -1 && bit1 != -1)
274 return bit0 > bit1 ? CCT1mode : CCT2mode;
280 /* Given a comparison code OP (EQ, NE, etc.) and the operands
281 OP0 and OP1 of a COMPARE, return the mode to be used for the
285 s390_select_ccmode (code, op0, op1)
294 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
295 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
297 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
298 || GET_CODE (op1) == NEG)
301 if (GET_CODE (op0) == AND)
303 /* Check whether we can potentially do it via TM. */
304 enum machine_mode ccmode;
305 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
306 if (ccmode != VOIDmode)
308 /* Relax CCTmode to CCZmode to allow fall-back to AND
309 if that turns out to be beneficial. */
310 return ccmode == CCTmode ? CCZmode : ccmode;
314 if (register_operand (op0, HImode)
315 && GET_CODE (op1) == CONST_INT
316 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
318 if (register_operand (op0, QImode)
319 && GET_CODE (op1) == CONST_INT
320 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
329 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
330 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
332 if (INTVAL (XEXP((op0), 1)) < 0)
345 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
346 && GET_CODE (op1) != CONST_INT)
352 if (GET_CODE (op0) == PLUS)
355 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
356 && GET_CODE (op1) != CONST_INT)
362 if (GET_CODE (op0) == MINUS)
365 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
366 && GET_CODE (op1) != CONST_INT)
375 /* Return branch condition mask to implement a branch
376 specified by CODE. */
379 s390_branch_condition_mask (code)
382 const int CC0 = 1 << 3;
383 const int CC1 = 1 << 2;
384 const int CC2 = 1 << 1;
385 const int CC3 = 1 << 0;
387 if (GET_CODE (XEXP (code, 0)) != REG
388 || REGNO (XEXP (code, 0)) != CC_REGNUM
389 || XEXP (code, 1) != const0_rtx)
392 switch (GET_MODE (XEXP (code, 0)))
395 switch (GET_CODE (code))
398 case NE: return CC1 | CC2 | CC3;
405 switch (GET_CODE (code))
408 case NE: return CC0 | CC2 | CC3;
415 switch (GET_CODE (code))
418 case NE: return CC0 | CC1 | CC3;
425 switch (GET_CODE (code))
428 case NE: return CC0 | CC1 | CC2;
435 switch (GET_CODE (code))
437 case EQ: return CC0 | CC2;
438 case NE: return CC1 | CC3;
445 switch (GET_CODE (code))
447 case LTU: return CC2 | CC3; /* carry */
448 case GEU: return CC0 | CC1; /* no carry */
455 switch (GET_CODE (code))
457 case GTU: return CC0 | CC1; /* borrow */
458 case LEU: return CC2 | CC3; /* no borrow */
465 switch (GET_CODE (code))
468 case NE: return CC1 | CC2 | CC3;
469 case LTU: return CC1;
470 case GTU: return CC2;
471 case LEU: return CC0 | CC1;
472 case GEU: return CC0 | CC2;
479 switch (GET_CODE (code))
482 case NE: return CC2 | CC1 | CC3;
483 case LTU: return CC2;
484 case GTU: return CC1;
485 case LEU: return CC0 | CC2;
486 case GEU: return CC0 | CC1;
493 switch (GET_CODE (code))
496 case NE: return CC1 | CC2 | CC3;
497 case LT: return CC1 | CC3;
499 case LE: return CC0 | CC1 | CC3;
500 case GE: return CC0 | CC2;
507 switch (GET_CODE (code))
510 case NE: return CC1 | CC2 | CC3;
512 case GT: return CC2 | CC3;
513 case LE: return CC0 | CC1;
514 case GE: return CC0 | CC2 | CC3;
521 switch (GET_CODE (code))
524 case NE: return CC1 | CC2 | CC3;
527 case LE: return CC0 | CC1;
528 case GE: return CC0 | CC2;
529 case UNORDERED: return CC3;
530 case ORDERED: return CC0 | CC1 | CC2;
531 case UNEQ: return CC0 | CC3;
532 case UNLT: return CC1 | CC3;
533 case UNGT: return CC2 | CC3;
534 case UNLE: return CC0 | CC1 | CC3;
535 case UNGE: return CC0 | CC2 | CC3;
536 case LTGT: return CC1 | CC2;
543 switch (GET_CODE (code))
546 case NE: return CC2 | CC1 | CC3;
549 case LE: return CC0 | CC2;
550 case GE: return CC0 | CC1;
551 case UNORDERED: return CC3;
552 case ORDERED: return CC0 | CC2 | CC1;
553 case UNEQ: return CC0 | CC3;
554 case UNLT: return CC2 | CC3;
555 case UNGT: return CC1 | CC3;
556 case UNLE: return CC0 | CC2 | CC3;
557 case UNGE: return CC0 | CC1 | CC3;
558 case LTGT: return CC2 | CC1;
569 /* If INV is false, return assembler mnemonic string to implement
570 a branch specified by CODE. If INV is true, return mnemonic
571 for the corresponding inverted branch. */
574 s390_branch_condition_mnemonic (code, inv)
578 static const char *const mnemonic[16] =
580 NULL, "o", "h", "nle",
581 "l", "nhe", "lh", "ne",
582 "e", "nlh", "he", "nl",
583 "le", "nh", "no", NULL
586 int mask = s390_branch_condition_mask (code);
591 if (mask < 1 || mask > 14)
594 return mnemonic[mask];
597 /* If OP is an integer constant of mode MODE with exactly one
598 HImode subpart unequal to DEF, return the number of that
599 subpart. As a special case, all HImode subparts of OP are
600 equal to DEF, return zero. Otherwise, return -1. */
603 s390_single_hi (op, mode, def)
605 enum machine_mode mode;
608 if (GET_CODE (op) == CONST_INT)
610 unsigned HOST_WIDE_INT value = 0;
611 int n_parts = GET_MODE_SIZE (mode) / 2;
614 for (i = 0; i < n_parts; i++)
617 value = (unsigned HOST_WIDE_INT) INTVAL (op);
621 if ((value & 0xffff) != (unsigned)(def & 0xffff))
630 return part == -1 ? 0 : (n_parts - 1 - part);
633 else if (GET_CODE (op) == CONST_DOUBLE
634 && GET_MODE (op) == VOIDmode)
636 unsigned HOST_WIDE_INT value = 0;
637 int n_parts = GET_MODE_SIZE (mode) / 2;
640 for (i = 0; i < n_parts; i++)
643 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
644 else if (i == HOST_BITS_PER_WIDE_INT / 16)
645 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
649 if ((value & 0xffff) != (unsigned)(def & 0xffff))
658 return part == -1 ? 0 : (n_parts - 1 - part);
664 /* Extract the HImode part number PART from integer
665 constant OP of mode MODE. */
668 s390_extract_hi (op, mode, part)
670 enum machine_mode mode;
673 int n_parts = GET_MODE_SIZE (mode) / 2;
674 if (part < 0 || part >= n_parts)
677 part = n_parts - 1 - part;
679 if (GET_CODE (op) == CONST_INT)
681 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
682 return ((value >> (16 * part)) & 0xffff);
684 else if (GET_CODE (op) == CONST_DOUBLE
685 && GET_MODE (op) == VOIDmode)
687 unsigned HOST_WIDE_INT value;
688 if (part < HOST_BITS_PER_WIDE_INT / 16)
689 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
691 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
692 part -= HOST_BITS_PER_WIDE_INT / 16;
694 return ((value >> (16 * part)) & 0xffff);
700 /* If OP is an integer constant of mode MODE with exactly one
701 QImode subpart unequal to DEF, return the number of that
702 subpart. As a special case, all QImode subparts of OP are
703 equal to DEF, return zero. Otherwise, return -1. */
706 s390_single_qi (op, mode, def)
708 enum machine_mode mode;
711 if (GET_CODE (op) == CONST_INT)
713 unsigned HOST_WIDE_INT value = 0;
714 int n_parts = GET_MODE_SIZE (mode);
717 for (i = 0; i < n_parts; i++)
720 value = (unsigned HOST_WIDE_INT) INTVAL (op);
724 if ((value & 0xff) != (unsigned)(def & 0xff))
733 return part == -1 ? 0 : (n_parts - 1 - part);
736 else if (GET_CODE (op) == CONST_DOUBLE
737 && GET_MODE (op) == VOIDmode)
739 unsigned HOST_WIDE_INT value = 0;
740 int n_parts = GET_MODE_SIZE (mode);
743 for (i = 0; i < n_parts; i++)
746 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
747 else if (i == HOST_BITS_PER_WIDE_INT / 8)
748 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
752 if ((value & 0xff) != (unsigned)(def & 0xff))
761 return part == -1 ? 0 : (n_parts - 1 - part);
767 /* Extract the QImode part number PART from integer
768 constant OP of mode MODE. */
771 s390_extract_qi (op, mode, part)
773 enum machine_mode mode;
776 int n_parts = GET_MODE_SIZE (mode);
777 if (part < 0 || part >= n_parts)
780 part = n_parts - 1 - part;
782 if (GET_CODE (op) == CONST_INT)
784 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
785 return ((value >> (8 * part)) & 0xff);
787 else if (GET_CODE (op) == CONST_DOUBLE
788 && GET_MODE (op) == VOIDmode)
790 unsigned HOST_WIDE_INT value;
791 if (part < HOST_BITS_PER_WIDE_INT / 8)
792 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
794 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
795 part -= HOST_BITS_PER_WIDE_INT / 8;
797 return ((value >> (8 * part)) & 0xff);
804 /* Change optimizations to be performed, depending on the
807 LEVEL is the optimization level specified; 2 if `-O2' is
808 specified, 1 if `-O' is specified, and 0 if neither is specified.
810 SIZE is nonzero if `-Os' is specified and zero otherwise. */
813 optimization_options (level, size)
814 int level ATTRIBUTE_UNUSED;
815 int size ATTRIBUTE_UNUSED;
817 /* ??? There are apparently still problems with -fcaller-saves. */
818 flag_caller_saves = 0;
824 /* Acquire a unique set number for our register saves and restores. */
825 s390_sr_alias_set = new_alias_set ();
829 /* Map for smallest class containing reg regno. */
831 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
832 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
833 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
834 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
835 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
836 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
837 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
838 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
839 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
840 ADDR_REGS, NO_REGS, ADDR_REGS
844 /* Return true if OP a (const_int 0) operand.
845 OP is the current operation.
846 MODE is the current operation mode. */
849 const0_operand (op, mode)
851 enum machine_mode mode;
853 return op == CONST0_RTX (mode);
856 /* Return true if OP is constant.
857 OP is the current operation.
858 MODE is the current operation mode. */
861 consttable_operand (op, mode)
863 enum machine_mode mode ATTRIBUTE_UNUSED;
865 return CONSTANT_P (op);
868 /* Return true if the mode of operand OP matches MODE.
869 If MODE is set to VOIDmode, set it to the mode of OP. */
872 check_mode (op, mode)
874 enum machine_mode *mode;
876 if (*mode == VOIDmode)
877 *mode = GET_MODE (op);
880 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
886 /* Return true if OP a valid operand for the LARL instruction.
887 OP is the current operation.
888 MODE is the current operation mode. */
891 larl_operand (op, mode)
893 enum machine_mode mode;
895 if (! check_mode (op, &mode))
898 /* Allow labels and local symbols. */
899 if (GET_CODE (op) == LABEL_REF)
901 if (GET_CODE (op) == SYMBOL_REF
902 && (!flag_pic || SYMBOL_REF_FLAG (op)
903 || CONSTANT_POOL_ADDRESS_P (op)))
906 /* Everything else must have a CONST, so strip it. */
907 if (GET_CODE (op) != CONST)
911 /* Allow adding *even* constants. */
912 if (GET_CODE (op) == PLUS)
914 if (GET_CODE (XEXP (op, 1)) != CONST_INT
915 || (INTVAL (XEXP (op, 1)) & 1) != 0)
920 /* Labels and local symbols allowed here as well. */
921 if (GET_CODE (op) == LABEL_REF)
923 if (GET_CODE (op) == SYMBOL_REF
924 && (!flag_pic || SYMBOL_REF_FLAG (op)
925 || CONSTANT_POOL_ADDRESS_P (op)))
928 /* Now we must have a @GOTENT offset or @PLT stub. */
929 if (GET_CODE (op) == UNSPEC
930 && XINT (op, 1) == 111)
932 if (GET_CODE (op) == UNSPEC
933 && XINT (op, 1) == 113)
939 /* Helper routine to implement s_operand and s_imm_operand.
940 OP is the current operation.
941 MODE is the current operation mode.
942 ALLOW_IMMEDIATE specifies whether immediate operands should
943 be accepted or not. */
946 general_s_operand (op, mode, allow_immediate)
948 enum machine_mode mode;
951 struct s390_address addr;
953 /* Call general_operand first, so that we don't have to
954 check for many special cases. */
955 if (!general_operand (op, mode))
958 /* Just like memory_operand, allow (subreg (mem ...))
961 && GET_CODE (op) == SUBREG
962 && GET_CODE (SUBREG_REG (op)) == MEM)
963 op = SUBREG_REG (op);
965 switch (GET_CODE (op))
967 /* Constants that we are sure will be forced to the
968 literal pool in reload are OK as s-operand. Note
969 that we cannot call s390_preferred_reload_class here
970 because it might not be known yet at this point
971 whether the current function is a leaf or not. */
974 if (!allow_immediate || reload_completed)
976 if (!legitimate_reload_constant_p (op))
982 /* Memory operands are OK unless they already use an
985 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
987 if (s390_decompose_address (XEXP (op, 0), &addr)
999 /* Return true if OP is a valid S-type operand.
1000 OP is the current operation.
1001 MODE is the current operation mode. */
1004 s_operand (op, mode)
1006 enum machine_mode mode;
1008 return general_s_operand (op, mode, 0);
1011 /* Return true if OP is a valid S-type operand or an immediate
1012 operand that can be addressed as S-type operand by forcing
1013 it into the literal pool.
1014 OP is the current operation.
1015 MODE is the current operation mode. */
1018 s_imm_operand (op, mode)
1020 enum machine_mode mode;
1022 return general_s_operand (op, mode, 1);
1025 /* Return true if OP is a valid operand for a 'Q' constraint.
1026 This differs from s_operand in that only memory operands
1027 without index register are accepted, nothing else. */
1033 struct s390_address addr;
1035 if (GET_CODE (op) != MEM)
1038 if (!s390_decompose_address (XEXP (op, 0), &addr))
1047 /* Return the cost of an address rtx ADDR. */
1050 s390_address_cost (addr)
1053 struct s390_address ad;
1054 if (!s390_decompose_address (addr, &ad))
1057 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1060 /* Return true if OP is a valid operand for the BRAS instruction.
1061 OP is the current operation.
1062 MODE is the current operation mode. */
1065 bras_sym_operand (op, mode)
1067 enum machine_mode mode ATTRIBUTE_UNUSED;
1069 register enum rtx_code code = GET_CODE (op);
1071 /* Allow SYMBOL_REFs. */
1072 if (code == SYMBOL_REF)
1075 /* Allow @PLT stubs. */
1077 && GET_CODE (XEXP (op, 0)) == UNSPEC
1078 && XINT (XEXP (op, 0), 1) == 113)
1084 /* Return true if OP is a load multiple operation. It is known to be a
1085 PARALLEL and the first section will be tested.
1086 OP is the current operation.
1087 MODE is the current operation mode. */
1090 load_multiple_operation (op, mode)
1092 enum machine_mode mode ATTRIBUTE_UNUSED;
1094 int count = XVECLEN (op, 0);
1095 unsigned int dest_regno;
1100 /* Perform a quick check so we don't blow up below. */
1102 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1103 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1104 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1107 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1108 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1110 /* Check, is base, or base + displacement. */
1112 if (GET_CODE (src_addr) == REG)
1114 else if (GET_CODE (src_addr) == PLUS
1115 && GET_CODE (XEXP (src_addr, 0)) == REG
1116 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1118 off = INTVAL (XEXP (src_addr, 1));
1119 src_addr = XEXP (src_addr, 0);
1124 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1127 for (i = 1; i < count; i++)
1129 rtx elt = XVECEXP (op, 0, i);
1131 if (GET_CODE (elt) != SET
1132 || GET_CODE (SET_DEST (elt)) != REG
1133 || GET_MODE (SET_DEST (elt)) != Pmode
1134 || REGNO (SET_DEST (elt)) != dest_regno + i
1135 || GET_CODE (SET_SRC (elt)) != MEM
1136 || GET_MODE (SET_SRC (elt)) != Pmode
1137 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1138 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1139 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1140 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1141 != off + i * UNITS_PER_WORD)
1148 /* Return true if OP is a store multiple operation. It is known to be a
1149 PARALLEL and the first section will be tested.
1150 OP is the current operation.
1151 MODE is the current operation mode. */
1154 store_multiple_operation (op, mode)
1156 enum machine_mode mode ATTRIBUTE_UNUSED;
1158 int count = XVECLEN (op, 0);
1159 unsigned int src_regno;
1163 /* Perform a quick check so we don't blow up below. */
1165 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1166 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1167 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1170 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1171 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1173 /* Check, is base, or base + displacement. */
1175 if (GET_CODE (dest_addr) == REG)
1177 else if (GET_CODE (dest_addr) == PLUS
1178 && GET_CODE (XEXP (dest_addr, 0)) == REG
1179 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1181 off = INTVAL (XEXP (dest_addr, 1));
1182 dest_addr = XEXP (dest_addr, 0);
1187 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1190 for (i = 1; i < count; i++)
1192 rtx elt = XVECEXP (op, 0, i);
1194 if (GET_CODE (elt) != SET
1195 || GET_CODE (SET_SRC (elt)) != REG
1196 || GET_MODE (SET_SRC (elt)) != Pmode
1197 || REGNO (SET_SRC (elt)) != src_regno + i
1198 || GET_CODE (SET_DEST (elt)) != MEM
1199 || GET_MODE (SET_DEST (elt)) != Pmode
1200 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1201 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1202 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1203 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1204 != off + i * UNITS_PER_WORD)
1211 /* Return true if OP contains a symbol reference */
1214 symbolic_reference_mentioned_p (op)
1217 register const char *fmt;
1220 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1223 fmt = GET_RTX_FORMAT (GET_CODE (op));
1224 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1230 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1231 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1235 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1243 /* Return true if OP is a legitimate general operand when
1244 generating PIC code. It is given that flag_pic is on
1245 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1248 legitimate_pic_operand_p (op)
1251 /* Accept all non-symbolic constants. */
1252 if (!SYMBOLIC_CONST (op))
1255 /* Reject everything else; must be handled
1256 via emit_pic_move. */
1260 /* Returns true if the constant value OP is a legitimate general operand.
1261 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1264 legitimate_constant_p (op)
1267 /* Accept all non-symbolic constants. */
1268 if (!SYMBOLIC_CONST (op))
1271 /* In the PIC case, symbolic constants must *not* be
1272 forced into the literal pool. We accept them here,
1273 so that they will be handled by emit_pic_move. */
1277 /* Even in the non-PIC case, we can accept immediate
1278 LARL operands here. */
1280 return larl_operand (op, VOIDmode);
1282 /* All remaining non-PIC symbolic constants are
1283 forced into the literal pool. */
1287 /* Returns true if the constant value OP is a legitimate general
1288 operand during and after reload. The difference to
1289 legitimate_constant_p is that this function will not accept
1290 a constant that would need to be forced to the literal pool
1291 before it can be used as operand. */
1294 legitimate_reload_constant_p (op)
1297 /* Accept l(g)hi operands. */
1298 if (GET_CODE (op) == CONST_INT
1299 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1302 /* Accept lliXX operands. */
1304 && s390_single_hi (op, DImode, 0) >= 0)
1307 /* Accept larl operands. */
1309 && larl_operand (op, VOIDmode))
1312 /* Everything else cannot be handled without reload. */
1316 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1317 return the class of reg to actually use. */
1320 s390_preferred_reload_class (op, class)
1322 enum reg_class class;
1324 /* This can happen if a floating point constant is being
1325 reloaded into an integer register. Leave well alone. */
1326 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1327 && class != FP_REGS)
1330 switch (GET_CODE (op))
1332 /* Constants we cannot reload must be forced into the
1333 literal pool. For constants we *could* handle directly,
1334 it might still be preferable to put them in the pool and
1335 use a memory-to-memory instruction.
1337 However, try to avoid needlessly allocating a literal
1338 pool in a routine that wouldn't otherwise need any.
1339 Heuristically, we assume that 64-bit leaf functions
1340 typically don't need a literal pool, all others do. */
1343 if (!legitimate_reload_constant_p (op))
1346 if (TARGET_64BIT && current_function_is_leaf)
1351 /* If a symbolic constant or a PLUS is reloaded,
1352 it is most likely being used as an address, so
1353 prefer ADDR_REGS. If 'class' is not a superset
1354 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1359 if (reg_class_subset_p (ADDR_REGS, class))
1371 /* Return the register class of a scratch register needed to
1372 load IN into a register of class CLASS in MODE.
1374 We need a temporary when loading a PLUS expression which
1375 is not a legitimate operand of the LOAD ADDRESS instruction. */
1378 s390_secondary_input_reload_class (class, mode, in)
1379 enum reg_class class ATTRIBUTE_UNUSED;
1380 enum machine_mode mode;
1383 if (s390_plus_operand (in, mode))
1389 /* Return true if OP is a PLUS that is not a legitimate
1390 operand for the LA instruction.
1391 OP is the current operation.
1392 MODE is the current operation mode. */
1395 s390_plus_operand (op, mode)
1397 enum machine_mode mode;
1399 if (!check_mode (op, &mode) || mode != Pmode)
1402 if (GET_CODE (op) != PLUS)
1405 if (legitimate_la_operand_p (op))
1411 /* Generate code to load SRC, which is PLUS that is not a
1412 legitimate operand for the LA instruction, into TARGET.
1413 SCRATCH may be used as scratch register. */
1416 s390_expand_plus_operand (target, src, scratch)
1417 register rtx target;
1419 register rtx scratch;
1422 struct s390_address ad;
1424 /* src must be a PLUS; get its two operands. */
1425 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1428 /* Check if any of the two operands is already scheduled
1429 for replacement by reload. This can happen e.g. when
1430 float registers occur in an address. */
1431 sum1 = find_replacement (&XEXP (src, 0));
1432 sum2 = find_replacement (&XEXP (src, 1));
1433 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1435 /* If the address is already strictly valid, there's nothing to do. */
1436 if (!s390_decompose_address (src, &ad)
1437 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1438 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1440 /* Otherwise, one of the operands cannot be an address register;
1441 we reload its value into the scratch register. */
1442 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1444 emit_move_insn (scratch, sum1);
1447 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1449 emit_move_insn (scratch, sum2);
1453 /* According to the way these invalid addresses are generated
1454 in reload.c, it should never happen (at least on s390) that
1455 *neither* of the PLUS components, after find_replacements
1456 was applied, is an address register. */
1457 if (sum1 == scratch && sum2 == scratch)
1463 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1466 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1467 is only ever performed on addresses, so we can mark the
1468 sum as legitimate for LA in any case. */
1469 s390_load_address (target, src);
1473 /* Decompose a RTL expression ADDR for a memory address into
1474 its components, returned in OUT.
1476 Returns 0 if ADDR is not a valid memory address, nonzero
1477 otherwise. If OUT is NULL, don't return the components,
1478 but check for validity only.
1480 Note: Only addresses in canonical form are recognized.
1481 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1482 canonical form so that they will be recognized. */
1485 s390_decompose_address (addr, out)
1487 struct s390_address *out;
1489 rtx base = NULL_RTX;
1490 rtx indx = NULL_RTX;
1491 rtx disp = NULL_RTX;
1492 int pointer = FALSE;
1494 /* Decompose address into base + index + displacement. */
1496 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1499 else if (GET_CODE (addr) == PLUS)
1501 rtx op0 = XEXP (addr, 0);
1502 rtx op1 = XEXP (addr, 1);
1503 enum rtx_code code0 = GET_CODE (op0);
1504 enum rtx_code code1 = GET_CODE (op1);
1506 if (code0 == REG || code0 == UNSPEC)
1508 if (code1 == REG || code1 == UNSPEC)
1510 indx = op0; /* index + base */
1516 base = op0; /* base + displacement */
1521 else if (code0 == PLUS)
1523 indx = XEXP (op0, 0); /* index + base + disp */
1524 base = XEXP (op0, 1);
1535 disp = addr; /* displacement */
1538 /* Validate base register. */
1541 if (GET_CODE (base) == UNSPEC)
1543 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1545 base = XVECEXP (base, 0, 0);
1549 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1552 if (REGNO (base) == BASE_REGISTER
1553 || REGNO (base) == STACK_POINTER_REGNUM
1554 || REGNO (base) == FRAME_POINTER_REGNUM
1555 || ((reload_completed || reload_in_progress)
1556 && frame_pointer_needed
1557 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1558 || REGNO (base) == ARG_POINTER_REGNUM
1559 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1560 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1562 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1566 /* Validate index register. */
1569 if (GET_CODE (indx) == UNSPEC)
1571 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1573 indx = XVECEXP (indx, 0, 0);
1577 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1580 if (REGNO (indx) == BASE_REGISTER
1581 || REGNO (indx) == STACK_POINTER_REGNUM
1582 || REGNO (indx) == FRAME_POINTER_REGNUM
1583 || ((reload_completed || reload_in_progress)
1584 && frame_pointer_needed
1585 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1586 || REGNO (indx) == ARG_POINTER_REGNUM
1587 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1588 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1590 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1594 /* Validate displacement. */
1597 /* Allow integer constant in range. */
1598 if (GET_CODE (disp) == CONST_INT)
1600 /* If the argument pointer is involved, the displacement will change
1601 later anyway as the argument pointer gets eliminated. This could
1602 make a valid displacement invalid, but it is more likely to make
1603 an invalid displacement valid, because we sometimes access the
1604 register save area via negative offsets to the arg pointer.
1605 Thus we don't check the displacement for validity here. If after
1606 elimination the displacement turns out to be invalid after all,
1607 this is fixed up by reload in any case. */
1608 if ((base && REGNO (base) == ARG_POINTER_REGNUM)
1609 || (indx && REGNO (indx) == ARG_POINTER_REGNUM))
1612 else if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1616 /* In the small-PIC case, the linker converts @GOT12
1617 offsets to possible displacements. */
1618 else if (GET_CODE (disp) == CONST
1619 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1620 && XINT (XEXP (disp, 0), 1) == 110)
1628 /* Accept chunkfied literal pool symbol references. */
1629 else if (GET_CODE (disp) == CONST
1630 && GET_CODE (XEXP (disp, 0)) == MINUS
1631 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1632 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1637 /* Likewise if a constant offset is present. */
1638 else if (GET_CODE (disp) == CONST
1639 && GET_CODE (XEXP (disp, 0)) == PLUS
1640 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1641 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1642 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1643 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1648 /* We can convert literal pool addresses to
1649 displacements by basing them off the base register. */
1652 /* In some cases, we can accept an additional
1653 small constant offset. Split these off here. */
1655 unsigned int offset = 0;
1657 if (GET_CODE (disp) == CONST
1658 && GET_CODE (XEXP (disp, 0)) == PLUS
1659 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1661 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1662 disp = XEXP (XEXP (disp, 0), 0);
1665 /* Now we must have a literal pool address. */
1666 if (GET_CODE (disp) != SYMBOL_REF
1667 || !CONSTANT_POOL_ADDRESS_P (disp))
1670 /* In 64-bit PIC mode we cannot accept symbolic
1671 constants in the constant pool. */
1672 if (TARGET_64BIT && flag_pic
1673 && SYMBOLIC_CONST (get_pool_constant (disp)))
1676 /* If we have an offset, make sure it does not
1677 exceed the size of the constant pool entry. */
1678 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1681 /* Either base or index must be free to
1682 hold the base register. */
1686 /* Convert the address. */
1688 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1690 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1692 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1693 disp = gen_rtx_CONST (Pmode, disp);
1696 disp = plus_constant (disp, offset);
1710 out->pointer = pointer;
1716 /* Return nonzero if ADDR is a valid memory address.
1717 STRICT specifies whether strict register checking applies. */
1720 legitimate_address_p (mode, addr, strict)
1721 enum machine_mode mode ATTRIBUTE_UNUSED;
1725 struct s390_address ad;
1726 if (!s390_decompose_address (addr, &ad))
1731 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1733 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1738 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1740 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1747 /* Return 1 if OP is a valid operand for the LA instruction.
1748 In 31-bit, we need to prove that the result is used as an
1749 address, as LA performs only a 31-bit addition. */
1752 legitimate_la_operand_p (op)
1755 struct s390_address addr;
1756 if (!s390_decompose_address (op, &addr))
1759 if (TARGET_64BIT || addr.pointer)
1765 /* Return 1 if OP is a valid operand for the LA instruction,
1766 and we prefer to use LA over addition to compute it.
1767 If STRICT is true, only accept operands that will never
1768 change to something we cannot recognize as preferred. */
1771 preferred_la_operand_p (op, strict)
1775 struct s390_address addr;
1776 if (!s390_decompose_address (op, &addr))
1779 if (!TARGET_64BIT && !addr.pointer)
1786 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
1787 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
1793 /* Emit a forced load-address operation to load SRC into DST.
1794 This will use the LOAD ADDRESS instruction even in situations
1795 where legitimate_la_operand_p (SRC) returns false. */
1798 s390_load_address (dst, src)
1803 emit_move_insn (dst, src);
1805 emit_insn (gen_force_la_31 (dst, src));
1808 /* Return a legitimate reference for ORIG (an address) using the
1809 register REG. If REG is 0, a new pseudo is generated.
1811 There are two types of references that must be handled:
1813 1. Global data references must load the address from the GOT, via
1814 the PIC reg. An insn is emitted to do this load, and the reg is
1817 2. Static data references, constant pool addresses, and code labels
1818 compute the address as an offset from the GOT, whose base is in
1819 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1820 differentiate them from global data objects. The returned
1821 address is the PIC reg + an unspec constant.
1823 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1824 reg also appears in the address. */
1827 legitimize_pic_address (orig, reg)
1835 if (GET_CODE (addr) == LABEL_REF
1836 || (GET_CODE (addr) == SYMBOL_REF
1837 && (SYMBOL_REF_FLAG (addr)
1838 || CONSTANT_POOL_ADDRESS_P (addr))))
1840 /* This is a local symbol. */
1843 /* Access local symbols PC-relative via LARL.
1844 This is the same as in the non-PIC case, so it is
1845 handled automatically ... */
1849 /* Access local symbols relative to the literal pool. */
1851 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1853 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1854 addr = gen_rtx_CONST (SImode, addr);
1855 addr = force_const_mem (SImode, addr);
1856 emit_move_insn (temp, addr);
1858 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1859 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1860 new = gen_rtx_PLUS (Pmode, base, temp);
1864 emit_move_insn (reg, new);
1869 else if (GET_CODE (addr) == SYMBOL_REF)
1872 reg = gen_reg_rtx (Pmode);
1876 /* Assume GOT offset < 4k. This is handled the same way
1877 in both 31- and 64-bit code (@GOT12). */
1879 if (reload_in_progress || reload_completed)
1880 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1882 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1883 new = gen_rtx_CONST (Pmode, new);
1884 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1885 new = gen_rtx_MEM (Pmode, new);
1886 RTX_UNCHANGING_P (new) = 1;
1887 emit_move_insn (reg, new);
1890 else if (TARGET_64BIT)
1892 /* If the GOT offset might be >= 4k, we determine the position
1893 of the GOT entry via a PC-relative LARL (@GOTENT). */
1895 rtx temp = gen_reg_rtx (Pmode);
1897 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1898 new = gen_rtx_CONST (Pmode, new);
1899 emit_move_insn (temp, new);
1901 new = gen_rtx_MEM (Pmode, temp);
1902 RTX_UNCHANGING_P (new) = 1;
1903 emit_move_insn (reg, new);
1908 /* If the GOT offset might be >= 4k, we have to load it
1909 from the literal pool (@GOT). */
1911 rtx temp = gen_reg_rtx (Pmode);
1913 if (reload_in_progress || reload_completed)
1914 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1916 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1917 addr = gen_rtx_CONST (SImode, addr);
1918 addr = force_const_mem (SImode, addr);
1919 emit_move_insn (temp, addr);
1921 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1922 new = gen_rtx_MEM (Pmode, new);
1923 RTX_UNCHANGING_P (new) = 1;
1924 emit_move_insn (reg, new);
1930 if (GET_CODE (addr) == CONST)
1932 addr = XEXP (addr, 0);
1933 if (GET_CODE (addr) == UNSPEC)
1935 if (XVECLEN (addr, 0) != 1)
1937 switch (XINT (addr, 1))
1939 /* If someone moved an @GOT or lt-relative UNSPEC
1940 out of the literal pool, force them back in. */
1944 new = force_const_mem (SImode, orig);
1947 /* @GOTENT is OK as is. */
1951 /* @PLT is OK as is on 64-bit, must be converted to
1952 lt-relative PLT on 31-bit. */
1956 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1958 addr = XVECEXP (addr, 0, 0);
1959 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1960 addr = gen_rtx_CONST (SImode, addr);
1961 addr = force_const_mem (SImode, addr);
1962 emit_move_insn (temp, addr);
1964 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1965 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1966 new = gen_rtx_PLUS (Pmode, base, temp);
1970 emit_move_insn (reg, new);
1976 /* Everything else cannot happen. */
1981 else if (GET_CODE (addr) != PLUS)
1984 if (GET_CODE (addr) == PLUS)
1986 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1987 /* Check first to see if this is a constant offset
1988 from a local symbol reference. */
1989 if ((GET_CODE (op0) == LABEL_REF
1990 || (GET_CODE (op0) == SYMBOL_REF
1991 && (SYMBOL_REF_FLAG (op0)
1992 || CONSTANT_POOL_ADDRESS_P (op0))))
1993 && GET_CODE (op1) == CONST_INT)
1997 if (INTVAL (op1) & 1)
1999 /* LARL can't handle odd offsets, so emit a
2000 pair of LARL and LA. */
2001 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2003 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2005 int even = INTVAL (op1) - 1;
2006 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2007 op0 = gen_rtx_CONST (Pmode, op0);
2011 emit_move_insn (temp, op0);
2012 new = gen_rtx_PLUS (Pmode, temp, op1);
2016 emit_move_insn (reg, new);
2022 /* If the offset is even, we can just use LARL.
2023 This will happen automatically. */
2028 /* Access local symbols relative to the literal pool. */
2030 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2032 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
2033 addr = gen_rtx_PLUS (SImode, addr, op1);
2034 addr = gen_rtx_CONST (SImode, addr);
2035 addr = force_const_mem (SImode, addr);
2036 emit_move_insn (temp, addr);
2038 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2039 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2040 new = gen_rtx_PLUS (Pmode, base, temp);
2044 emit_move_insn (reg, new);
2050 /* Now, check whether it is an LT-relative symbol plus offset
2051 that was pulled out of the literal pool. Force it back in. */
2053 else if (GET_CODE (op0) == UNSPEC
2054 && GET_CODE (op1) == CONST_INT)
2056 if (XVECLEN (op0, 0) != 1)
2058 if (XINT (op0, 1) != 100)
2061 new = force_const_mem (SImode, orig);
2064 /* Otherwise, compute the sum. */
2067 base = legitimize_pic_address (XEXP (addr, 0), reg);
2068 new = legitimize_pic_address (XEXP (addr, 1),
2069 base == reg ? NULL_RTX : reg);
2070 if (GET_CODE (new) == CONST_INT)
2071 new = plus_constant (base, INTVAL (new));
2074 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2076 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2077 new = XEXP (new, 1);
2079 new = gen_rtx_PLUS (Pmode, base, new);
2082 if (GET_CODE (new) == CONST)
2083 new = XEXP (new, 0);
2084 new = force_operand (new, 0);
2091 /* Emit insns to move operands[1] into operands[0]. */
2094 emit_pic_move (operands, mode)
2096 enum machine_mode mode ATTRIBUTE_UNUSED;
2098 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2100 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2101 operands[1] = force_reg (Pmode, operands[1]);
2103 operands[1] = legitimize_pic_address (operands[1], temp);
2106 /* Try machine-dependent ways of modifying an illegitimate address X
2107 to be legitimate. If we find one, return the new, valid address.
2109 OLDX is the address as it was before break_out_memory_refs was called.
2110 In some cases it is useful to look at this to decide what needs to be done.
2112 MODE is the mode of the operand pointed to by X.
2114 When -fpic is used, special handling is needed for symbolic references.
2115 See comments by legitimize_pic_address for details. */
2118 legitimize_address (x, oldx, mode)
2120 register rtx oldx ATTRIBUTE_UNUSED;
2121 enum machine_mode mode ATTRIBUTE_UNUSED;
2123 rtx constant_term = const0_rtx;
2127 if (SYMBOLIC_CONST (x)
2128 || (GET_CODE (x) == PLUS
2129 && (SYMBOLIC_CONST (XEXP (x, 0))
2130 || SYMBOLIC_CONST (XEXP (x, 1)))))
2131 x = legitimize_pic_address (x, 0);
2133 if (legitimate_address_p (mode, x, FALSE))
2137 x = eliminate_constant_term (x, &constant_term);
2139 /* Optimize loading of large displacements by splitting them
2140 into the multiple of 4K and the rest; this allows the
2141 former to be CSE'd if possible.
2143 Don't do this if the displacement is added to a register
2144 pointing into the stack frame, as the offsets will
2145 change later anyway. */
2147 if (GET_CODE (constant_term) == CONST_INT
2148 && (INTVAL (constant_term) < 0
2149 || INTVAL (constant_term) >= 4096)
2150 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2152 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2153 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2155 rtx temp = gen_reg_rtx (Pmode);
2156 rtx val = force_operand (GEN_INT (upper), temp);
2158 emit_move_insn (temp, val);
2160 x = gen_rtx_PLUS (Pmode, x, temp);
2161 constant_term = GEN_INT (lower);
2164 if (GET_CODE (x) == PLUS)
2166 if (GET_CODE (XEXP (x, 0)) == REG)
2168 register rtx temp = gen_reg_rtx (Pmode);
2169 register rtx val = force_operand (XEXP (x, 1), temp);
2171 emit_move_insn (temp, val);
2173 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2176 else if (GET_CODE (XEXP (x, 1)) == REG)
2178 register rtx temp = gen_reg_rtx (Pmode);
2179 register rtx val = force_operand (XEXP (x, 0), temp);
2181 emit_move_insn (temp, val);
2183 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2187 if (constant_term != const0_rtx)
2188 x = gen_rtx_PLUS (Pmode, x, constant_term);
2193 /* Emit code to move LEN bytes from DST to SRC. */
2196 s390_expand_movstr (dst, src, len)
2201 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2202 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2203 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2204 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2207 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2209 if (INTVAL (len) > 0)
2210 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2213 else if (TARGET_MVCLE)
2215 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2216 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2217 rtx reg0 = gen_reg_rtx (double_mode);
2218 rtx reg1 = gen_reg_rtx (double_mode);
2220 emit_move_insn (gen_highpart (single_mode, reg0),
2221 force_operand (XEXP (dst, 0), NULL_RTX));
2222 emit_move_insn (gen_highpart (single_mode, reg1),
2223 force_operand (XEXP (src, 0), NULL_RTX));
2225 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2226 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2228 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2233 rtx dst_addr, src_addr, count, blocks, temp;
2234 rtx end_label = gen_label_rtx ();
2235 enum machine_mode mode;
2238 mode = GET_MODE (len);
2239 if (mode == VOIDmode)
2242 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2246 dst_addr = gen_reg_rtx (Pmode);
2247 src_addr = gen_reg_rtx (Pmode);
2248 count = gen_reg_rtx (mode);
2249 blocks = gen_reg_rtx (mode);
2251 convert_move (count, len, 1);
2252 emit_cmp_and_jump_insns (count, const0_rtx,
2253 EQ, NULL_RTX, mode, 1, end_label);
2255 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2256 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2257 dst = change_address (dst, VOIDmode, dst_addr);
2258 src = change_address (src, VOIDmode, src_addr);
2260 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2262 emit_move_insn (count, temp);
2264 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2266 emit_move_insn (blocks, temp);
2268 expand_start_loop (1);
2269 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2270 make_tree (type, blocks),
2271 make_tree (type, const0_rtx)));
2273 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2274 s390_load_address (dst_addr,
2275 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2276 s390_load_address (src_addr,
2277 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2279 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2281 emit_move_insn (blocks, temp);
2285 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2286 emit_label (end_label);
2290 /* Emit code to clear LEN bytes at DST. */
2293 s390_expand_clrstr (dst, len)
2297 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2298 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2299 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2300 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2303 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2305 if (INTVAL (len) > 0)
2306 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2309 else if (TARGET_MVCLE)
2311 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2312 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2313 rtx reg0 = gen_reg_rtx (double_mode);
2314 rtx reg1 = gen_reg_rtx (double_mode);
2316 emit_move_insn (gen_highpart (single_mode, reg0),
2317 force_operand (XEXP (dst, 0), NULL_RTX));
2318 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2320 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2321 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2323 emit_insn ((*gen_long) (reg0, reg1, reg0));
2328 rtx dst_addr, src_addr, count, blocks, temp;
2329 rtx end_label = gen_label_rtx ();
2330 enum machine_mode mode;
2333 mode = GET_MODE (len);
2334 if (mode == VOIDmode)
2337 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2341 dst_addr = gen_reg_rtx (Pmode);
2342 src_addr = gen_reg_rtx (Pmode);
2343 count = gen_reg_rtx (mode);
2344 blocks = gen_reg_rtx (mode);
2346 convert_move (count, len, 1);
2347 emit_cmp_and_jump_insns (count, const0_rtx,
2348 EQ, NULL_RTX, mode, 1, end_label);
2350 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2351 dst = change_address (dst, VOIDmode, dst_addr);
2353 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2355 emit_move_insn (count, temp);
2357 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2359 emit_move_insn (blocks, temp);
2361 expand_start_loop (1);
2362 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2363 make_tree (type, blocks),
2364 make_tree (type, const0_rtx)));
2366 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2367 s390_load_address (dst_addr,
2368 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2370 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2372 emit_move_insn (blocks, temp);
2376 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2377 emit_label (end_label);
2381 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2382 and return the result in TARGET. */
2385 s390_expand_cmpstr (target, op0, op1, len)
2391 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2392 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2393 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2394 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2395 rtx (*gen_result) PARAMS ((rtx)) =
2396 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2398 op0 = protect_from_queue (op0, 0);
2399 op1 = protect_from_queue (op1, 0);
2400 len = protect_from_queue (len, 0);
2402 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2404 if (INTVAL (len) > 0)
2406 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2407 emit_insn ((*gen_result) (target));
2410 emit_move_insn (target, const0_rtx);
2413 else if (TARGET_MVCLE)
2415 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2416 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2417 rtx reg0 = gen_reg_rtx (double_mode);
2418 rtx reg1 = gen_reg_rtx (double_mode);
2420 emit_move_insn (gen_highpart (single_mode, reg0),
2421 force_operand (XEXP (op0, 0), NULL_RTX));
2422 emit_move_insn (gen_highpart (single_mode, reg1),
2423 force_operand (XEXP (op1, 0), NULL_RTX));
2425 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2426 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2428 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2429 emit_insn ((*gen_result) (target));
2434 rtx addr0, addr1, count, blocks, temp;
2435 rtx end_label = gen_label_rtx ();
2436 enum machine_mode mode;
2439 mode = GET_MODE (len);
2440 if (mode == VOIDmode)
2443 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2447 addr0 = gen_reg_rtx (Pmode);
2448 addr1 = gen_reg_rtx (Pmode);
2449 count = gen_reg_rtx (mode);
2450 blocks = gen_reg_rtx (mode);
2452 convert_move (count, len, 1);
2453 emit_cmp_and_jump_insns (count, const0_rtx,
2454 EQ, NULL_RTX, mode, 1, end_label);
2456 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2457 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2458 op0 = change_address (op0, VOIDmode, addr0);
2459 op1 = change_address (op1, VOIDmode, addr1);
2461 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2463 emit_move_insn (count, temp);
2465 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2467 emit_move_insn (blocks, temp);
2469 expand_start_loop (1);
2470 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2471 make_tree (type, blocks),
2472 make_tree (type, const0_rtx)));
2474 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2475 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2476 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2477 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2478 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2479 emit_jump_insn (temp);
2481 s390_load_address (addr0,
2482 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2483 s390_load_address (addr1,
2484 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2486 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2488 emit_move_insn (blocks, temp);
2492 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2493 emit_label (end_label);
2495 emit_insn ((*gen_result) (target));
2499 /* In the name of slightly smaller debug output, and to cater to
2500 general assembler losage, recognize various UNSPEC sequences
2501 and turn them back into a direct symbol reference. */
2504 s390_simplify_dwarf_addr (orig_x)
2509 if (GET_CODE (x) != MEM)
2513 if (GET_CODE (x) == PLUS
2514 && GET_CODE (XEXP (x, 1)) == CONST
2515 && GET_CODE (XEXP (x, 0)) == REG
2516 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2518 y = XEXP (XEXP (x, 1), 0);
2519 if (GET_CODE (y) == UNSPEC
2520 && XINT (y, 1) == 110)
2521 return XVECEXP (y, 0, 0);
2525 if (GET_CODE (x) == CONST)
2528 if (GET_CODE (y) == UNSPEC
2529 && XINT (y, 1) == 111)
2530 return XVECEXP (y, 0, 0);
2537 /* Output symbolic constant X in assembler syntax to
2538 stdio stream FILE. */
2541 s390_output_symbolic_const (file, x)
2545 switch (GET_CODE (x))
2550 s390_output_symbolic_const (file, XEXP (x, 0));
2554 s390_output_symbolic_const (file, XEXP (x, 0));
2555 fprintf (file, "+");
2556 s390_output_symbolic_const (file, XEXP (x, 1));
2560 s390_output_symbolic_const (file, XEXP (x, 0));
2561 fprintf (file, "-");
2562 s390_output_symbolic_const (file, XEXP (x, 1));
2569 output_addr_const (file, x);
2573 if (XVECLEN (x, 0) != 1)
2574 output_operand_lossage ("invalid UNSPEC as operand (1)");
2575 switch (XINT (x, 1))
2579 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2580 fprintf (file, "-.LT%d", current_function_funcdef_no);
2583 fprintf (file, ".LT%d-", current_function_funcdef_no);
2584 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2587 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2588 fprintf (file, "@GOT12");
2591 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2592 fprintf (file, "@GOTENT");
2595 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2596 fprintf (file, "@GOT");
2599 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2600 fprintf (file, "@PLT");
2603 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2604 fprintf (file, "@PLT-.LT%d", current_function_funcdef_no);
2607 output_operand_lossage ("invalid UNSPEC as operand (2)");
2613 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2618 /* Output address operand ADDR in assembler syntax to
2619 stdio stream FILE. */
2622 print_operand_address (file, addr)
2626 struct s390_address ad;
2628 if (!s390_decompose_address (addr, &ad)
2629 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2630 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2631 output_operand_lossage ("Cannot decompose address.");
2634 s390_output_symbolic_const (file, ad.disp);
2636 fprintf (file, "0");
2638 if (ad.base && ad.indx)
2639 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2640 reg_names[REGNO (ad.base)]);
2642 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2645 /* Output operand X in assembler syntax to stdio stream FILE.
2646 CODE specified the format flag. The following format flags
2649 'C': print opcode suffix for branch condition.
2650 'D': print opcode suffix for inverse branch condition.
2651 'O': print only the displacement of a memory reference.
2652 'R': print only the base register of a memory reference.
2653 'N': print the second word of a DImode operand.
2654 'M': print the second word of a TImode operand.
2656 'b': print integer X as if it's an unsigned byte.
2657 'x': print integer X as if it's an unsigned word.
2658 'h': print integer X as if it's a signed word. */
2661 print_operand (file, x, code)
2669 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2673 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2678 struct s390_address ad;
2680 if (GET_CODE (x) != MEM
2681 || !s390_decompose_address (XEXP (x, 0), &ad)
2682 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2687 s390_output_symbolic_const (file, ad.disp);
2689 fprintf (file, "0");
2695 struct s390_address ad;
2697 if (GET_CODE (x) != MEM
2698 || !s390_decompose_address (XEXP (x, 0), &ad)
2699 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2704 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2706 fprintf (file, "0");
2711 if (GET_CODE (x) == REG)
2712 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2713 else if (GET_CODE (x) == MEM)
2714 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2720 if (GET_CODE (x) == REG)
2721 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2722 else if (GET_CODE (x) == MEM)
2723 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2729 switch (GET_CODE (x))
2732 fprintf (file, "%s", reg_names[REGNO (x)]);
2736 output_address (XEXP (x, 0));
2743 s390_output_symbolic_const (file, x);
2748 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2749 else if (code == 'x')
2750 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2751 else if (code == 'h')
2752 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2754 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2758 if (GET_MODE (x) != VOIDmode)
2761 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2762 else if (code == 'x')
2763 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2764 else if (code == 'h')
2765 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2771 fatal_insn ("UNKNOWN in print_operand !?", x);
2776 /* Target hook for assembling integer objects. We need to define it
2777 here to work a round a bug in some versions of GAS, which couldn't
2778 handle values smaller than INT_MIN when printed in decimal. */
2781 s390_assemble_integer (x, size, aligned_p)
2786 if (size == 8 && aligned_p
2787 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2789 fputs ("\t.quad\t", asm_out_file);
2790 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2791 putc ('\n', asm_out_file);
2794 return default_assemble_integer (x, size, aligned_p);
2798 #define DEBUG_SCHED 0
2800 /* Returns true if register REGNO is used for forming
2801 a memory address in expression X. */
2804 reg_used_in_mem_p (regno, x)
2808 enum rtx_code code = GET_CODE (x);
2814 if (refers_to_regno_p (regno, regno+1,
2818 else if (code == SET
2819 && GET_CODE (SET_DEST (x)) == PC)
2821 if (refers_to_regno_p (regno, regno+1,
2826 fmt = GET_RTX_FORMAT (code);
2827 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2830 && reg_used_in_mem_p (regno, XEXP (x, i)))
2833 else if (fmt[i] == 'E')
2834 for (j = 0; j < XVECLEN (x, i); j++)
2835 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2841 /* Returns true if expression DEP_RTX sets an address register
2842 used by instruction INSN to address memory. */
2845 addr_generation_dependency_p (dep_rtx, insn)
2851 if (GET_CODE (dep_rtx) == SET)
2853 target = SET_DEST (dep_rtx);
2854 if (GET_CODE (target) == STRICT_LOW_PART)
2855 target = XEXP (target, 0);
2856 while (GET_CODE (target) == SUBREG)
2857 target = SUBREG_REG (target);
2859 if (GET_CODE (target) == REG)
2861 int regno = REGNO (target);
2863 if (get_attr_type (insn) == TYPE_LA)
2865 pat = PATTERN (insn);
2866 if (GET_CODE (pat) == PARALLEL)
2868 if (XVECLEN (pat, 0) != 2)
2870 pat = XVECEXP (pat, 0, 0);
2872 if (GET_CODE (pat) == SET)
2873 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2877 else if (get_attr_atype (insn) == ATYPE_MEM)
2878 return reg_used_in_mem_p (regno, PATTERN (insn));
2885 /* Return the modified cost of the dependency of instruction INSN
2886 on instruction DEP_INSN through the link LINK. COST is the
2887 default cost of that dependency.
2889 Data dependencies are all handled without delay. However, if a
2890 register is modified and subsequently used as base or index
2891 register of a memory reference, at least 4 cycles need to pass
2892 between setting and using the register to avoid pipeline stalls.
2893 An exception is the LA instruction. An address generated by LA can
2894 be used by introducing only a one cycle stall on the pipeline. */
2897 s390_adjust_cost (insn, link, dep_insn, cost)
2906 /* If the dependence is an anti-dependence, there is no cost. For an
2907 output dependence, there is sometimes a cost, but it doesn't seem
2908 worth handling those few cases. */
2910 if (REG_NOTE_KIND (link) != 0)
2913 /* If we can't recognize the insns, we can't really do anything. */
2914 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2917 dep_rtx = PATTERN (dep_insn);
2919 if (GET_CODE (dep_rtx) == SET)
2921 if (addr_generation_dependency_p (dep_rtx, insn))
2923 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2926 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2928 debug_rtx (dep_insn);
2933 else if (GET_CODE (dep_rtx) == PARALLEL)
2935 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2937 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2940 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2943 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2945 debug_rtx (dep_insn);
2956 /* A C statement (sans semicolon) to update the integer scheduling priority
2957 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2958 increase the priority to execute INSN later. Do not define this macro if
2959 you do not need to adjust the scheduling priorities of insns.
2961 A LA instruction maybe scheduled later, since the pipeline bypasses the
2962 calculated value. */
2965 s390_adjust_priority (insn, priority)
2966 rtx insn ATTRIBUTE_UNUSED;
2969 if (! INSN_P (insn))
2972 if (GET_CODE (PATTERN (insn)) == USE
2973 || GET_CODE (PATTERN (insn)) == CLOBBER)
2976 switch (get_attr_type (insn))
2982 if (priority >= 0 && priority < 0x01000000)
2986 /* LM in epilogue should never be scheduled. This
2987 is due to literal access done in function body.
2988 The usage of register 13 is not mentioned explicitly,
2989 leading to scheduling 'LM' accross this instructions.
2991 priority = 0x7fffffff;
2999 /* Split all branches that exceed the maximum distance.
3000 Returns true if this created a new literal pool entry.
3002 Code generated by this routine is allowed to use
3003 TEMP_REG as temporary scratch register. If this is
3004 done, TEMP_USED is set to true. */
3007 s390_split_branches (temp_reg, temp_used)
3011 int new_literal = 0;
3012 rtx insn, pat, tmp, target;
3015 /* We need correct insn addresses. */
3017 shorten_branches (get_insns ());
3019 /* Find all branches that exceed 64KB, and split them. */
3021 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3023 if (GET_CODE (insn) != JUMP_INSN)
3026 pat = PATTERN (insn);
3027 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3028 pat = XVECEXP (pat, 0, 0);
3029 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3032 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3034 label = &SET_SRC (pat);
3036 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3038 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3039 label = &XEXP (SET_SRC (pat), 1);
3040 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3041 label = &XEXP (SET_SRC (pat), 2);
3048 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3055 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3056 INSN_ADDRESSES_NEW (tmp, -1);
3063 tmp = force_const_mem (Pmode, *label);
3064 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3065 INSN_ADDRESSES_NEW (tmp, -1);
3072 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3073 tmp = gen_rtx_CONST (SImode, tmp);
3074 tmp = force_const_mem (SImode, tmp);
3075 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3076 INSN_ADDRESSES_NEW (tmp, -1);
3078 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3079 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3082 if (!validate_change (insn, label, target, 0))
3090 /* Find a literal pool symbol referenced in RTX X, and store
3091 it at REF. Will abort if X contains references to more than
3092 one such pool symbol; multiple references to the same symbol
3093 are allowed, however.
3095 The rtx pointed to by REF must be initialized to NULL_RTX
3096 by the caller before calling this routine. */
3099 find_constant_pool_ref (x, ref)
3106 if (GET_CODE (x) == SYMBOL_REF
3107 && CONSTANT_POOL_ADDRESS_P (x))
3109 if (*ref == NULL_RTX)
3115 fmt = GET_RTX_FORMAT (GET_CODE (x));
3116 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3120 find_constant_pool_ref (XEXP (x, i), ref);
3122 else if (fmt[i] == 'E')
3124 for (j = 0; j < XVECLEN (x, i); j++)
3125 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3130 /* Replace every reference to the literal pool symbol REF
3131 in X by the address ADDR. Fix up MEMs as required. */
3134 replace_constant_pool_ref (x, ref, addr)
3145 /* Literal pool references can only occur inside a MEM ... */
3146 if (GET_CODE (*x) == MEM)
3148 rtx memref = XEXP (*x, 0);
3152 *x = replace_equiv_address (*x, addr);
3156 if (GET_CODE (memref) == CONST
3157 && GET_CODE (XEXP (memref, 0)) == PLUS
3158 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3159 && XEXP (XEXP (memref, 0), 0) == ref)
3161 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3162 *x = replace_equiv_address (*x, plus_constant (addr, off));
3167 /* ... or a load-address type pattern. */
3168 if (GET_CODE (*x) == SET)
3170 rtx addrref = SET_SRC (*x);
3174 SET_SRC (*x) = addr;
3178 if (GET_CODE (addrref) == CONST
3179 && GET_CODE (XEXP (addrref, 0)) == PLUS
3180 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3181 && XEXP (XEXP (addrref, 0), 0) == ref)
3183 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3184 SET_SRC (*x) = plus_constant (addr, off);
3189 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3190 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3194 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3196 else if (fmt[i] == 'E')
3198 for (j = 0; j < XVECLEN (*x, i); j++)
3199 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3204 /* Check whether ADDR is an address that uses the base register,
3205 without actually constituting a literal pool access. (This happens
3206 in 31-bit PIC mode, where the base register is used as anchor for
3207 relative addressing of local symbols.)
3209 Returns 1 if the base register occupies the base slot,
3210 returns 2 if the base register occupies the index slot,
3211 returns 0 if the address is not of this form. */
3214 find_base_register_in_addr (addr)
3215 struct s390_address *addr;
3217 /* If DISP is complex, we might have a literal pool reference. */
3218 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3221 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3224 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3230 /* Return true if X contains an address that uses the base register,
3231 without actually constituting a literal pool access. */
3234 find_base_register_ref (x)
3238 struct s390_address addr;
3242 /* Addresses can only occur inside a MEM ... */
3243 if (GET_CODE (x) == MEM)
3245 if (s390_decompose_address (XEXP (x, 0), &addr)
3246 && find_base_register_in_addr (&addr))
3250 /* ... or a load-address type pattern. */
3251 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3253 if (s390_decompose_address (SET_SRC (x), &addr)
3254 && find_base_register_in_addr (&addr))
3258 fmt = GET_RTX_FORMAT (GET_CODE (x));
3259 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3263 retv |= find_base_register_ref (XEXP (x, i));
3265 else if (fmt[i] == 'E')
3267 for (j = 0; j < XVECLEN (x, i); j++)
3268 retv |= find_base_register_ref (XVECEXP (x, i, j));
3275 /* If X contains an address that uses the base register,
3276 without actually constituting a literal pool access,
3277 replace the base register with REPL in all such cases.
3279 Handles both MEMs and load address patterns. */
3282 replace_base_register_ref (x, repl)
3286 struct s390_address addr;
3291 /* Addresses can only occur inside a MEM ... */
3292 if (GET_CODE (*x) == MEM)
3294 if (s390_decompose_address (XEXP (*x, 0), &addr)
3295 && (pos = find_base_register_in_addr (&addr)))
3302 new_addr = addr.base;
3304 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3306 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3308 *x = replace_equiv_address (*x, new_addr);
3313 /* ... or a load-address type pattern. */
3314 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3316 if (s390_decompose_address (SET_SRC (*x), &addr)
3317 && (pos = find_base_register_in_addr (&addr)))
3324 new_addr = addr.base;
3326 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3328 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3330 SET_SRC (*x) = new_addr;
3335 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3336 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3340 replace_base_register_ref (&XEXP (*x, i), repl);
3342 else if (fmt[i] == 'E')
3344 for (j = 0; j < XVECLEN (*x, i); j++)
3345 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
3351 /* We keep a list of constants we which we have to add to internal
3352 constant tables in the middle of large functions. */
3354 #define NR_C_MODES 6
3355 enum machine_mode constant_modes[NR_C_MODES] =
3363 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3365 gen_consttable_df, gen_consttable_di,
3366 gen_consttable_sf, gen_consttable_si,
3373 struct constant *next;
3378 struct constant_pool
3380 struct constant_pool *next;
3385 struct constant *constants[NR_C_MODES];
3391 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
3392 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
3393 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
3395 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3396 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3397 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
3398 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3399 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3400 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3401 static void s390_add_anchor PARAMS ((struct constant_pool *));
3402 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3403 static void s390_free_pool PARAMS ((struct constant_pool *));
3405 /* Create new constant pool covering instructions starting at INSN
3406 and chain it to the end of POOL_LIST. */
3408 static struct constant_pool *
3409 s390_start_pool (pool_list, insn)
3410 struct constant_pool **pool_list;
3413 struct constant_pool *pool, **prev;
3416 pool = (struct constant_pool *) xmalloc (sizeof *pool);
3418 for (i = 0; i < NR_C_MODES; i++)
3419 pool->constants[i] = NULL;
3421 pool->label = gen_label_rtx ();
3422 pool->first_insn = insn;
3423 pool->pool_insn = NULL_RTX;
3424 pool->insns = BITMAP_XMALLOC ();
3426 pool->anchor = FALSE;
3428 for (prev = pool_list; *prev; prev = &(*prev)->next)
3435 /* End range of instructions covered by POOL at INSN and emit
3436 placeholder insn representing the pool. */
3439 s390_end_pool (pool, insn)
3440 struct constant_pool *pool;
3443 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
3446 insn = get_last_insn ();
3448 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
3449 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
3452 /* Add INSN to the list of insns covered by POOL. */
3455 s390_add_pool_insn (pool, insn)
3456 struct constant_pool *pool;
3459 bitmap_set_bit (pool->insns, INSN_UID (insn));
3462 /* Return pool out of POOL_LIST that covers INSN. */
3464 static struct constant_pool *
3465 s390_find_pool (pool_list, insn)
3466 struct constant_pool *pool_list;
3469 struct constant_pool *pool;
3471 for (pool = pool_list; pool; pool = pool->next)
3472 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
3478 /* Add constant VAL of mode MODE to the constant pool POOL. */
3481 s390_add_constant (pool, val, mode)
3482 struct constant_pool *pool;
3484 enum machine_mode mode;
3489 for (i = 0; i < NR_C_MODES; i++)
3490 if (constant_modes[i] == mode)
3492 if (i == NR_C_MODES)
3495 for (c = pool->constants[i]; c != NULL; c = c->next)
3496 if (rtx_equal_p (val, c->value))
3501 c = (struct constant *) xmalloc (sizeof *c);
3503 c->label = gen_label_rtx ();
3504 c->next = pool->constants[i];
3505 pool->constants[i] = c;
3506 pool->size += GET_MODE_SIZE (mode);
3510 /* Find constant VAL of mode MODE in the constant pool POOL.
3511 Return an RTX describing the distance from the start of
3512 the pool to the location of the new constant. */
3515 s390_find_constant (pool, val, mode)
3516 struct constant_pool *pool;
3518 enum machine_mode mode;
3524 for (i = 0; i < NR_C_MODES; i++)
3525 if (constant_modes[i] == mode)
3527 if (i == NR_C_MODES)
3530 for (c = pool->constants[i]; c != NULL; c = c->next)
3531 if (rtx_equal_p (val, c->value))
3537 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
3538 gen_rtx_LABEL_REF (Pmode, pool->label));
3539 offset = gen_rtx_CONST (Pmode, offset);
3543 /* Set 'anchor' flag in POOL. */
3546 s390_add_anchor (pool)
3547 struct constant_pool *pool;
3551 pool->anchor = TRUE;
3556 /* Dump out the constants in POOL. */
3559 s390_dump_pool (pool)
3560 struct constant_pool *pool;
3566 /* Pool start insn switches to proper section
3567 and guarantees necessary alignment. */
3569 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
3571 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
3572 INSN_ADDRESSES_NEW (insn, -1);
3574 insn = emit_label_after (pool->label, insn);
3575 INSN_ADDRESSES_NEW (insn, -1);
3577 /* Emit anchor if we need one. */
3580 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
3581 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
3582 anchor = gen_rtx_CONST (VOIDmode, anchor);
3583 insn = emit_insn_after (gen_consttable_si (anchor), insn);
3584 INSN_ADDRESSES_NEW (insn, -1);
3587 /* Dump constants in descending alignment requirement order,
3588 ensuring proper alignment for every constant. */
3589 for (i = 0; i < NR_C_MODES; i++)
3590 for (c = pool->constants[i]; c; c = c->next)
3592 /* Convert 104 unspecs to pool-relative references. */
3593 rtx value = c->value;
3594 if (GET_CODE (value) == CONST
3595 && GET_CODE (XEXP (value, 0)) == UNSPEC
3596 && XINT (XEXP (value, 0), 1) == 104
3597 && XVECLEN (XEXP (value, 0), 0) == 1)
3599 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
3600 gen_rtx_LABEL_REF (VOIDmode, pool->label));
3601 value = gen_rtx_CONST (VOIDmode, value);
3604 insn = emit_label_after (c->label, insn);
3605 INSN_ADDRESSES_NEW (insn, -1);
3606 insn = emit_insn_after (gen_consttable[i] (value), insn);
3607 INSN_ADDRESSES_NEW (insn, -1);
3610 /* Pool end insn switches back to previous section
3611 and guarantees necessary alignment. */
3613 insn = emit_insn_after (gen_pool_end_64 (), insn);
3615 insn = emit_insn_after (gen_pool_end_31 (), insn);
3616 INSN_ADDRESSES_NEW (insn, -1);
3618 insn = emit_barrier_after (insn);
3619 INSN_ADDRESSES_NEW (insn, -1);
3621 /* Remove placeholder insn. */
3622 remove_insn (pool->pool_insn);
3627 /* Free all memory used by POOL. */
3630 s390_free_pool (pool)
3631 struct constant_pool *pool;
3635 for (i = 0; i < NR_C_MODES; i++)
3637 struct constant *c = pool->constants[i];
3640 struct constant *next = c->next;
3646 BITMAP_XFREE (pool->insns);
3651 /* Chunkify the literal pool if required.
3653 Code generated by this routine is allowed to use
3654 TEMP_REG as temporary scratch register. If this is
3655 done, TEMP_USED is set to true. */
3657 #define S390_POOL_CHUNK_MIN 0xc00
3658 #define S390_POOL_CHUNK_MAX 0xe00
3660 static struct constant_pool *
3661 s390_chunkify_start (temp_reg, temp_used)
3665 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3667 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3672 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
3673 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
3676 /* Do we need to chunkify the literal pool? */
3678 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3681 /* We need correct insn addresses. */
3683 shorten_branches (get_insns ());
3685 /* Scan all insns and move literals to pool chunks.
3686 Also, emit anchor reload insns before every insn that uses
3687 the literal pool base register as anchor pointer. */
3689 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3691 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3693 rtx pool_ref = NULL_RTX;
3694 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3698 curr_pool = s390_start_pool (&pool_list, insn);
3700 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
3701 get_pool_mode (pool_ref));
3702 s390_add_pool_insn (curr_pool, insn);
3705 else if (!TARGET_64BIT && flag_pic
3706 && find_base_register_ref (PATTERN (insn)))
3708 rtx new = gen_reload_anchor (temp_reg, base_reg);
3709 new = emit_insn_before (new, insn);
3710 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
3715 curr_pool = s390_start_pool (&pool_list, new);
3717 s390_add_anchor (curr_pool);
3718 s390_add_pool_insn (curr_pool, insn);
3722 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
3724 s390_add_pool_insn (curr_pool, insn);
3727 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3728 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3733 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3736 s390_end_pool (curr_pool, NULL_RTX);
3741 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3742 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3745 /* We will later have to insert base register reload insns.
3746 Those will have an effect on code size, which we need to
3747 consider here. This calculation makes rather pessimistic
3748 worst-case assumptions. */
3749 if (GET_CODE (insn) == CODE_LABEL)
3752 if (chunk_size < S390_POOL_CHUNK_MIN
3753 && curr_pool->size < S390_POOL_CHUNK_MIN)
3756 /* Pool chunks can only be inserted after BARRIERs ... */
3757 if (GET_CODE (insn) == BARRIER)
3759 s390_end_pool (curr_pool, insn);
3764 /* ... so if we don't find one in time, create one. */
3765 else if ((chunk_size > S390_POOL_CHUNK_MAX
3766 || curr_pool->size > S390_POOL_CHUNK_MAX))
3768 rtx label, jump, barrier;
3770 /* We can insert the barrier only after a 'real' insn. */
3771 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
3773 if (get_attr_length (insn) == 0)
3776 /* Don't separate insns created by s390_split_branches. */
3777 if (GET_CODE (insn) == INSN
3778 && GET_CODE (PATTERN (insn)) == SET
3779 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
3782 label = gen_label_rtx ();
3783 jump = emit_jump_insn_after (gen_jump (label), insn);
3784 barrier = emit_barrier_after (jump);
3785 insn = emit_label_after (label, barrier);
3786 JUMP_LABEL (jump) = label;
3787 LABEL_NUSES (label) = 1;
3789 INSN_ADDRESSES_NEW (jump, -1);
3790 INSN_ADDRESSES_NEW (barrier, -1);
3791 INSN_ADDRESSES_NEW (insn, -1);
3793 s390_end_pool (curr_pool, barrier);
3801 s390_end_pool (curr_pool, NULL_RTX);
3804 /* Find all labels that are branched into
3805 from an insn belonging to a different chunk. */
3807 far_labels = BITMAP_XMALLOC ();
3809 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3811 /* Labels marked with LABEL_PRESERVE_P can be target
3812 of non-local jumps, so we have to mark them.
3813 The same holds for named labels.
3815 Don't do that, however, if it is the label before
3818 if (GET_CODE (insn) == CODE_LABEL
3819 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3821 rtx vec_insn = next_real_insn (insn);
3822 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3823 PATTERN (vec_insn) : NULL_RTX;
3825 || !(GET_CODE (vec_pat) == ADDR_VEC
3826 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3827 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3830 /* If we have a direct jump (conditional or unconditional)
3831 or a casesi jump, check all potential targets. */
3832 else if (GET_CODE (insn) == JUMP_INSN)
3834 rtx pat = PATTERN (insn);
3835 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3836 pat = XVECEXP (pat, 0, 0);
3838 if (GET_CODE (pat) == SET)
3840 rtx label = JUMP_LABEL (insn);
3843 if (s390_find_pool (pool_list, label)
3844 != s390_find_pool (pool_list, insn))
3845 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3848 else if (GET_CODE (pat) == PARALLEL
3849 && XVECLEN (pat, 0) == 2
3850 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3851 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3852 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3854 /* Find the jump table used by this casesi jump. */
3855 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3856 rtx vec_insn = next_real_insn (vec_label);
3857 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3858 PATTERN (vec_insn) : NULL_RTX;
3860 && (GET_CODE (vec_pat) == ADDR_VEC
3861 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3863 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3865 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3867 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3869 if (s390_find_pool (pool_list, label)
3870 != s390_find_pool (pool_list, insn))
3871 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3878 /* Insert base register reload insns before every pool. */
3880 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3882 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3883 rtx insn = curr_pool->first_insn;
3884 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3887 /* Insert base register reload insns at every far label. */
3889 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3890 if (GET_CODE (insn) == CODE_LABEL
3891 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3893 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3896 rtx new_insn = gen_reload_base (base_reg, pool->label);
3897 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3902 BITMAP_XFREE (far_labels);
3905 /* Recompute insn addresses. */
3907 init_insn_lengths ();
3908 shorten_branches (get_insns ());
3913 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3914 After we have decided to use this list, finish implementing
3915 all changes to the current function as required.
3917 Code generated by this routine is allowed to use
3918 TEMP_REG as temporary scratch register. */
3921 s390_chunkify_finish (pool_list, temp_reg)
3922 struct constant_pool *pool_list;
3925 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3926 struct constant_pool *curr_pool = NULL;
3930 /* Replace all literal pool references. */
3932 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3934 curr_pool = s390_find_pool (pool_list, insn);
3938 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3940 rtx addr, pool_ref = NULL_RTX;
3941 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3944 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
3945 get_pool_mode (pool_ref));
3946 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
3947 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
3948 INSN_CODE (insn) = -1;
3951 else if (!TARGET_64BIT && flag_pic
3952 && find_base_register_ref (PATTERN (insn)))
3954 replace_base_register_ref (&PATTERN (insn), temp_reg);
3959 /* Dump out all literal pools. */
3961 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3962 s390_dump_pool (curr_pool);
3964 /* Free pool list. */
3968 struct constant_pool *next = pool_list->next;
3969 s390_free_pool (pool_list);
3974 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
3975 We have decided we cannot use this list, so revert all changes
3976 to the current function that were done by s390_chunkify_start. */
3979 s390_chunkify_cancel (pool_list)
3980 struct constant_pool *pool_list;
3982 struct constant_pool *curr_pool = NULL;
3985 /* Remove all pool placeholder insns. */
3987 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3989 /* Did we insert an extra barrier? Remove it. */
3990 rtx barrier = PREV_INSN (curr_pool->pool_insn);
3991 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
3992 rtx label = NEXT_INSN (curr_pool->pool_insn);
3994 if (jump && GET_CODE (jump) == JUMP_INSN
3995 && barrier && GET_CODE (barrier) == BARRIER
3996 && label && GET_CODE (label) == CODE_LABEL
3997 && GET_CODE (PATTERN (jump)) == SET
3998 && SET_DEST (PATTERN (jump)) == pc_rtx
3999 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4000 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4003 remove_insn (barrier);
4004 remove_insn (label);
4007 remove_insn (curr_pool->pool_insn);
4010 /* Remove all base/anchor register reload insns. */
4012 for (insn = get_insns (); insn; )
4014 rtx next_insn = NEXT_INSN (insn);
4016 if (GET_CODE (insn) == INSN
4017 && GET_CODE (PATTERN (insn)) == SET
4018 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4019 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4020 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4026 /* Free pool list. */
4030 struct constant_pool *next = pool_list->next;
4031 s390_free_pool (pool_list);
4037 /* Index of constant pool chunk that is currently being processed.
4038 Set to -1 before function output has started. */
4039 int s390_pool_count = -1;
4041 /* Number of elements of current constant pool. */
4042 int s390_nr_constants;
4044 /* Output main constant pool to stdio stream FILE. */
4047 s390_output_constant_pool (file)
4050 /* Output constant pool. */
4051 if (s390_nr_constants)
4055 fprintf (file, "\tlarl\t%s,.LT%d\n", reg_names[BASE_REGISTER],
4056 current_function_funcdef_no);
4057 readonly_data_section ();
4058 ASM_OUTPUT_ALIGN (file, 3);
4062 fprintf (file, "\tbras\t%s,.LTN%d\n", reg_names[BASE_REGISTER],
4063 current_function_funcdef_no);
4065 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4067 s390_pool_count = 0;
4068 output_constant_pool (current_function_name, current_function_decl);
4069 s390_pool_count = -1;
4072 function_section (current_function_decl);
4074 fprintf (file, ".LTN%d:\n", current_function_funcdef_no);
4077 /* If no pool required, at least output the anchor label. */
4078 else if (!TARGET_64BIT && flag_pic)
4079 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
4083 /* Rework the prolog/epilog to avoid saving/restoring
4084 registers unnecessarily. If TEMP_REGNO is nonnegative,
4085 it specifies the number of a caller-saved register used
4086 as temporary scratch register by code emitted during
4087 machine dependent reorg. */
4090 s390_optimize_prolog (temp_regno)
4093 int save_first, save_last, restore_first, restore_last;
4095 rtx insn, new_insn, next_insn;
4097 struct s390_frame frame;
4098 s390_frame_info (&frame);
4100 /* Recompute regs_ever_live data for special registers. */
4101 regs_ever_live[BASE_REGISTER] = 0;
4102 regs_ever_live[RETURN_REGNUM] = 0;
4103 regs_ever_live[STACK_POINTER_REGNUM] = frame.frame_size > 0;
4105 /* If there is (possibly) any pool entry, we need to
4106 load the base register.
4107 ??? FIXME: this should be more precise. */
4108 if (get_pool_size ())
4109 regs_ever_live[BASE_REGISTER] = 1;
4111 /* In non-leaf functions, the prolog/epilog code relies
4112 on RETURN_REGNUM being saved in any case. */
4113 if (!current_function_is_leaf)
4114 regs_ever_live[RETURN_REGNUM] = 1;
4116 /* We need to save/restore the temporary register. */
4117 if (temp_regno >= 0)
4118 regs_ever_live[temp_regno] = 1;
4121 /* Find first and last gpr to be saved. */
4123 for (i = 6; i < 16; i++)
4124 if (regs_ever_live[i])
4127 for (j = 15; j > i; j--)
4128 if (regs_ever_live[j])
4133 /* Nothing to save/restore. */
4134 save_first = restore_first = -1;
4135 save_last = restore_last = -1;
4139 /* Save/restore from i to j. */
4140 save_first = restore_first = i;
4141 save_last = restore_last = j;
4144 /* Varargs functions need to save gprs 2 to 6. */
4145 if (current_function_stdarg)
4153 /* If all special registers are in fact used, there's nothing we
4154 can do, so no point in walking the insn list. */
4155 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4156 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4160 /* Search for prolog/epilog insns and replace them. */
4162 for (insn = get_insns (); insn; insn = next_insn)
4164 int first, last, off;
4165 rtx set, base, offset;
4167 next_insn = NEXT_INSN (insn);
4169 if (GET_CODE (insn) != INSN)
4171 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4174 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4176 set = XVECEXP (PATTERN (insn), 0, 0);
4177 first = REGNO (SET_SRC (set));
4178 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4179 offset = const0_rtx;
4180 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4181 off = INTVAL (offset) - first * UNITS_PER_WORD;
4183 if (GET_CODE (base) != REG || off < 0)
4185 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4187 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4190 if (save_first != -1)
4192 new_insn = save_gprs (base, off, save_first, save_last);
4193 new_insn = emit_insn_before (new_insn, insn);
4194 INSN_ADDRESSES_NEW (new_insn, -1);
4200 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4202 set = XVECEXP (PATTERN (insn), 0, 0);
4203 first = REGNO (SET_DEST (set));
4204 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4205 offset = const0_rtx;
4206 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4207 off = INTVAL (offset) - first * UNITS_PER_WORD;
4209 if (GET_CODE (base) != REG || off < 0)
4211 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4213 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4216 if (restore_first != -1)
4218 new_insn = restore_gprs (base, off, restore_first, restore_last);
4219 new_insn = emit_insn_before (new_insn, insn);
4220 INSN_ADDRESSES_NEW (new_insn, -1);
4228 /* Check whether any insn in the function makes use of the original
4229 value of RETURN_REG (e.g. for __builtin_return_address).
4230 If so, insert an insn reloading that value.
4232 Return true if any such insn was found. */
4235 s390_fixup_clobbered_return_reg (return_reg)
4238 bool replacement_done = 0;
4241 struct s390_frame frame;
4242 s390_frame_info (&frame);
4244 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4246 rtx reg, off, new_insn;
4248 if (GET_CODE (insn) != INSN)
4250 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4252 if (GET_CODE (PATTERN (insn)) == PARALLEL
4253 && store_multiple_operation (PATTERN (insn), VOIDmode))
4256 if (frame.frame_pointer_p)
4257 reg = hard_frame_pointer_rtx;
4259 reg = stack_pointer_rtx;
4261 off = GEN_INT (frame.frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4262 if (INTVAL (off) >= 4096)
4264 off = force_const_mem (Pmode, off);
4265 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4266 new_insn = emit_insn_before (new_insn, insn);
4267 INSN_ADDRESSES_NEW (new_insn, -1);
4271 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4272 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4273 new_insn = emit_insn_before (new_insn, insn);
4274 INSN_ADDRESSES_NEW (new_insn, -1);
4276 replacement_done = 1;
4279 return replacement_done;
4282 /* Perform machine-dependent processing. */
4285 s390_machine_dependent_reorg (first)
4286 rtx first ATTRIBUTE_UNUSED;
4288 bool fixed_up_clobbered_return_reg = 0;
4289 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4292 /* Make sure all splits have been performed; splits after
4293 machine_dependent_reorg might confuse insn length counts. */
4294 split_all_insns_noflow ();
4297 /* There are two problematic situations we need to correct:
4299 - the literal pool might be > 4096 bytes in size, so that
4300 some of its elements cannot be directly accessed
4302 - a branch target might be > 64K away from the branch, so that
4303 it is not possible to use a PC-relative instruction.
4305 To fix those, we split the single literal pool into multiple
4306 pool chunks, reloading the pool base register at various
4307 points throughout the function to ensure it always points to
4308 the pool chunk the following code expects, and / or replace
4309 PC-relative branches by absolute branches.
4311 However, the two problems are interdependent: splitting the
4312 literal pool can move a branch further away from its target,
4313 causing the 64K limit to overflow, and on the other hand,
4314 replacing a PC-relative branch by an absolute branch means
4315 we need to put the branch target address into the literal
4316 pool, possibly causing it to overflow.
4318 So, we loop trying to fix up both problems until we manage
4319 to satisfy both conditions at the same time. Note that the
4320 loop is guaranteed to terminate as every pass of the loop
4321 strictly decreases the total number of PC-relative branches
4322 in the function. (This is not completely true as there
4323 might be branch-over-pool insns introduced by chunkify_start.
4324 Those never need to be split however.) */
4328 struct constant_pool *pool_list;
4330 /* Try to chunkify the literal pool. */
4331 pool_list = s390_chunkify_start (temp_reg, &temp_used);
4333 /* Split out-of-range branches. If this has created new
4334 literal pool entries, cancel current chunk list and
4336 if (s390_split_branches (temp_reg, &temp_used))
4339 s390_chunkify_cancel (pool_list);
4344 /* Check whether we have clobbered a use of the return
4345 register (e.g. for __builtin_return_address). If so,
4346 add insns reloading the register where necessary. */
4347 if (temp_used && !fixed_up_clobbered_return_reg
4348 && s390_fixup_clobbered_return_reg (temp_reg))
4350 fixed_up_clobbered_return_reg = 1;
4352 /* The fixup insns might have caused a jump to overflow. */
4354 s390_chunkify_cancel (pool_list);
4359 /* If we made it up to here, both conditions are satisfied.
4360 Finish up pool chunkification if required. */
4362 s390_chunkify_finish (pool_list, temp_reg);
4367 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4371 /* Return an RTL expression representing the value of the return address
4372 for the frame COUNT steps up from the current frame. FRAME is the
4373 frame pointer of that frame. */
4376 s390_return_addr_rtx (count, frame)
4382 /* For the current frame, we use the initial value of RETURN_REGNUM.
4383 This works both in leaf and non-leaf functions. */
4386 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
4388 /* For frames farther back, we read the stack slot where the
4389 corresponding RETURN_REGNUM value was saved. */
4391 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
4392 addr = memory_address (Pmode, addr);
4393 return gen_rtx_MEM (Pmode, addr);
4396 /* Find first call clobbered register unsused in a function.
4397 This could be used as base register in a leaf function
4398 or for holding the return address before epilogue. */
4401 find_unused_clobbered_reg ()
4404 for (i = 0; i < 6; i++)
4405 if (!regs_ever_live[i])
4410 /* Fill FRAME with info about frame of current function. */
4413 s390_frame_info (frame)
4414 struct s390_frame *frame;
4416 char gprs_ever_live[16];
4418 HOST_WIDE_INT fsize = get_frame_size ();
4420 if (fsize > 0x7fff0000)
4421 fatal_error ("Total size of local variables exceeds architecture limit.");
4423 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
4424 frame->save_fprs_p = 0;
4426 for (i = 24; i < 32; i++)
4427 if (regs_ever_live[i])
4429 frame->save_fprs_p = 1;
4433 frame->frame_size = fsize + frame->save_fprs_p * 64;
4435 /* Does function need to setup frame and save area. */
4437 if (! current_function_is_leaf
4438 || frame->frame_size > 0
4439 || current_function_calls_alloca
4440 || current_function_stdarg)
4441 frame->frame_size += STARTING_FRAME_OFFSET;
4443 /* Frame pointer needed. */
4445 frame->frame_pointer_p = frame_pointer_needed;
4447 /* Find first and last gpr to be saved. Note that at this point,
4448 we assume the return register and the base register always
4449 need to be saved. This is done because the usage of these
4450 register might change even after the prolog was emitted.
4451 If it turns out later that we really don't need them, the
4452 prolog/epilog code is modified again. */
4454 for (i = 0; i < 16; i++)
4455 gprs_ever_live[i] = regs_ever_live[i];
4457 gprs_ever_live[BASE_REGISTER] = 1;
4458 gprs_ever_live[RETURN_REGNUM] = 1;
4459 gprs_ever_live[STACK_POINTER_REGNUM] = frame->frame_size > 0;
4461 for (i = 6; i < 16; i++)
4462 if (gprs_ever_live[i])
4465 for (j = 15; j > i; j--)
4466 if (gprs_ever_live[j])
4470 /* Save / Restore from gpr i to j. */
4471 frame->first_save_gpr = i;
4472 frame->first_restore_gpr = i;
4473 frame->last_save_gpr = j;
4475 /* Varargs functions need to save gprs 2 to 6. */
4476 if (current_function_stdarg)
4477 frame->first_save_gpr = 2;
4480 /* Return offset between argument pointer and frame pointer
4481 initially after prologue. */
4484 s390_arg_frame_offset ()
4486 struct s390_frame frame;
4488 /* Compute frame_info. */
4490 s390_frame_info (&frame);
4492 return frame.frame_size + STACK_POINTER_OFFSET;
4495 /* Emit insn to save fpr REGNUM at offset OFFSET relative
4496 to register BASE. Return generated insn. */
4499 save_fpr (base, offset, regnum)
4505 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4506 set_mem_alias_set (addr, s390_sr_alias_set);
4508 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
4511 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
4512 to register BASE. Return generated insn. */
4515 restore_fpr (base, offset, regnum)
4521 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4522 set_mem_alias_set (addr, s390_sr_alias_set);
4524 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4527 /* Generate insn to save registers FIRST to LAST into
4528 the register save area located at offset OFFSET
4529 relative to register BASE. */
4532 save_gprs (base, offset, first, last)
4538 rtx addr, insn, note;
4541 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4542 addr = gen_rtx_MEM (Pmode, addr);
4543 set_mem_alias_set (addr, s390_sr_alias_set);
4545 /* Special-case single register. */
4549 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
4551 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
4553 RTX_FRAME_RELATED_P (insn) = 1;
4558 insn = gen_store_multiple (addr,
4559 gen_rtx_REG (Pmode, first),
4560 GEN_INT (last - first + 1));
4563 /* We need to set the FRAME_RELATED flag on all SETs
4564 inside the store-multiple pattern.
4566 However, we must not emit DWARF records for registers 2..5
4567 if they are stored for use by variable arguments ...
4569 ??? Unfortunately, it is not enough to simply not the the
4570 FRAME_RELATED flags for those SETs, because the first SET
4571 of the PARALLEL is always treated as if it had the flag
4572 set, even if it does not. Therefore we emit a new pattern
4573 without those registers as REG_FRAME_RELATED_EXPR note. */
4577 rtx pat = PATTERN (insn);
4579 for (i = 0; i < XVECLEN (pat, 0); i++)
4580 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
4581 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
4583 RTX_FRAME_RELATED_P (insn) = 1;
4587 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
4588 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
4589 gen_rtx_REG (Pmode, 6),
4590 GEN_INT (last - 6 + 1));
4591 note = PATTERN (note);
4594 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4595 note, REG_NOTES (insn));
4597 for (i = 0; i < XVECLEN (note, 0); i++)
4598 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
4599 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
4601 RTX_FRAME_RELATED_P (insn) = 1;
4607 /* Generate insn to restore registers FIRST to LAST from
4608 the register save area located at offset OFFSET
4609 relative to register BASE. */
4612 restore_gprs (base, offset, first, last)
4620 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4621 addr = gen_rtx_MEM (Pmode, addr);
4622 set_mem_alias_set (addr, s390_sr_alias_set);
4624 /* Special-case single register. */
4628 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
4630 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
4635 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
4637 GEN_INT (last - first + 1));
4641 /* Expand the prologue into a bunch of separate insns. */
4644 s390_emit_prologue ()
4646 struct s390_frame frame;
4651 /* Compute frame_info. */
4653 s390_frame_info (&frame);
4655 /* Choose best register to use for temp use within prologue. */
4657 if (!current_function_is_leaf
4658 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
4659 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
4660 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4662 temp_reg = gen_rtx_REG (Pmode, 1);
4664 /* Save call saved gprs. */
4666 insn = save_gprs (stack_pointer_rtx, 0,
4667 frame.first_save_gpr, frame.last_save_gpr);
4670 /* Dump constant pool and set constant pool register (13). */
4672 insn = emit_insn (gen_lit ());
4674 /* Save fprs for variable args. */
4676 if (current_function_stdarg)
4678 /* Save fpr 0 and 2. */
4680 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
4681 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
4685 /* Save fpr 4 and 6. */
4687 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4688 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4692 /* Save fprs 4 and 6 if used (31 bit ABI). */
4696 /* Save fpr 4 and 6. */
4697 if (regs_ever_live[18])
4699 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4700 RTX_FRAME_RELATED_P (insn) = 1;
4702 if (regs_ever_live[19])
4704 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4705 RTX_FRAME_RELATED_P (insn) = 1;
4709 /* Decrement stack pointer. */
4711 if (frame.frame_size > 0)
4713 rtx frame_off = GEN_INT (-frame.frame_size);
4715 /* Save incoming stack pointer into temp reg. */
4717 if (TARGET_BACKCHAIN || frame.save_fprs_p)
4719 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
4722 /* Substract frame size from stack pointer. */
4724 frame_off = GEN_INT (-frame.frame_size);
4725 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
4726 frame_off = force_const_mem (Pmode, frame_off);
4728 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
4729 RTX_FRAME_RELATED_P (insn) = 1;
4731 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4732 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
4733 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4734 GEN_INT (-frame.frame_size))),
4737 /* Set backchain. */
4739 if (TARGET_BACKCHAIN)
4741 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4742 set_mem_alias_set (addr, s390_sr_alias_set);
4743 insn = emit_insn (gen_move_insn (addr, temp_reg));
4747 /* Save fprs 8 - 15 (64 bit ABI). */
4749 if (frame.save_fprs_p)
4751 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
4753 for (i = 24; i < 32; i++)
4754 if (regs_ever_live[i])
4756 rtx addr = plus_constant (stack_pointer_rtx,
4757 frame.frame_size - 64 + (i-24)*8);
4759 insn = save_fpr (temp_reg, (i-24)*8, i);
4760 RTX_FRAME_RELATED_P (insn) = 1;
4762 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4763 gen_rtx_SET (VOIDmode,
4764 gen_rtx_MEM (DFmode, addr),
4765 gen_rtx_REG (DFmode, i)),
4770 /* Set frame pointer, if needed. */
4772 if (frame.frame_pointer_p)
4774 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4775 RTX_FRAME_RELATED_P (insn) = 1;
4778 /* Set up got pointer, if needed. */
4780 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4782 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4783 SYMBOL_REF_FLAG (got_symbol) = 1;
4787 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
4790 /* It can happen that the GOT pointer isn't really needed ... */
4791 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4796 got_symbol = gen_rtx_UNSPEC (VOIDmode,
4797 gen_rtvec (1, got_symbol), 100);
4798 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
4799 got_symbol = force_const_mem (Pmode, got_symbol);
4800 insn = emit_move_insn (pic_offset_table_rtx,
4802 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4805 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
4806 gen_rtx_REG (Pmode, BASE_REGISTER)));
4807 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4813 /* Expand the epilogue into a bunch of separate insns. */
4816 s390_emit_epilogue ()
4818 struct s390_frame frame;
4819 rtx frame_pointer, return_reg;
4820 int area_bottom, area_top, offset = 0;
4823 /* Compute frame_info. */
4825 s390_frame_info (&frame);
4827 /* Check whether to use frame or stack pointer for restore. */
4829 frame_pointer = frame.frame_pointer_p ?
4830 hard_frame_pointer_rtx : stack_pointer_rtx;
4832 /* Compute which parts of the save area we need to access. */
4834 if (frame.first_restore_gpr != -1)
4836 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
4837 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
4841 area_bottom = INT_MAX;
4847 if (frame.save_fprs_p)
4849 if (area_bottom > -64)
4857 if (regs_ever_live[18])
4859 if (area_bottom > STACK_POINTER_OFFSET - 16)
4860 area_bottom = STACK_POINTER_OFFSET - 16;
4861 if (area_top < STACK_POINTER_OFFSET - 8)
4862 area_top = STACK_POINTER_OFFSET - 8;
4864 if (regs_ever_live[19])
4866 if (area_bottom > STACK_POINTER_OFFSET - 8)
4867 area_bottom = STACK_POINTER_OFFSET - 8;
4868 if (area_top < STACK_POINTER_OFFSET)
4869 area_top = STACK_POINTER_OFFSET;
4873 /* Check whether we can access the register save area.
4874 If not, increment the frame pointer as required. */
4876 if (area_top <= area_bottom)
4878 /* Nothing to restore. */
4880 else if (frame.frame_size + area_bottom >= 0
4881 && frame.frame_size + area_top <= 4096)
4883 /* Area is in range. */
4884 offset = frame.frame_size;
4888 rtx insn, frame_off;
4890 offset = area_bottom < 0 ? -area_bottom : 0;
4891 frame_off = GEN_INT (frame.frame_size - offset);
4893 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
4894 frame_off = force_const_mem (Pmode, frame_off);
4896 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
4899 /* Restore call saved fprs. */
4905 if (frame.save_fprs_p)
4906 for (i = 24; i < 32; i++)
4907 if (regs_ever_live[i] && !global_regs[i])
4908 restore_fpr (frame_pointer,
4909 offset - 64 + (i-24) * 8, i);
4913 if (regs_ever_live[18] && !global_regs[18])
4914 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
4915 if (regs_ever_live[19] && !global_regs[19])
4916 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
4919 /* Return register. */
4921 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4923 /* Restore call saved gprs. */
4925 if (frame.first_restore_gpr != -1)
4930 /* Check for global register and save them
4931 to stack location from where they get restored. */
4933 for (i = frame.first_restore_gpr;
4934 i <= frame.last_save_gpr;
4937 /* These registers are special and need to be
4938 restored in any case. */
4939 if (i == STACK_POINTER_REGNUM
4940 || i == RETURN_REGNUM
4941 || i == BASE_REGISTER
4942 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4947 addr = plus_constant (frame_pointer,
4948 offset + i * UNITS_PER_WORD);
4949 addr = gen_rtx_MEM (Pmode, addr);
4950 set_mem_alias_set (addr, s390_sr_alias_set);
4951 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
4955 /* Fetch return address from stack before load multiple,
4956 this will do good for scheduling. */
4958 if (!current_function_is_leaf)
4960 int return_regnum = find_unused_clobbered_reg();
4963 return_reg = gen_rtx_REG (Pmode, return_regnum);
4965 addr = plus_constant (frame_pointer,
4966 offset + RETURN_REGNUM * UNITS_PER_WORD);
4967 addr = gen_rtx_MEM (Pmode, addr);
4968 set_mem_alias_set (addr, s390_sr_alias_set);
4969 emit_move_insn (return_reg, addr);
4972 /* ??? As references to the base register are not made
4973 explicit in insn RTX code, we have to add a barrier here
4974 to prevent incorrect scheduling. */
4976 emit_insn (gen_blockage());
4978 insn = restore_gprs (frame_pointer, offset,
4979 frame.first_restore_gpr, frame.last_save_gpr);
4983 /* Return to caller. */
4985 p = rtvec_alloc (2);
4987 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
4988 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
4989 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
4993 /* Return the size in bytes of a function argument of
4994 type TYPE and/or mode MODE. At least one of TYPE or
4995 MODE must be specified. */
4998 s390_function_arg_size (mode, type)
4999 enum machine_mode mode;
5003 return int_size_in_bytes (type);
5005 /* No type info available for some library calls ... */
5006 if (mode != BLKmode)
5007 return GET_MODE_SIZE (mode);
5009 /* If we have neither type nor mode, abort */
5013 /* Return 1 if a function argument of type TYPE and mode MODE
5014 is to be passed by reference. The ABI specifies that only
5015 structures of size 1, 2, 4, or 8 bytes are passed by value,
5016 all other structures (and complex numbers) are passed by
5020 s390_function_arg_pass_by_reference (mode, type)
5021 enum machine_mode mode;
5024 int size = s390_function_arg_size (mode, type);
5028 if (AGGREGATE_TYPE_P (type) &&
5029 size != 1 && size != 2 && size != 4 && size != 8)
5032 if (TREE_CODE (type) == COMPLEX_TYPE)
5039 /* Update the data in CUM to advance over an argument of mode MODE and
5040 data type TYPE. (TYPE is null for libcalls where that information
5041 may not be available.). The boolean NAMED specifies whether the
5042 argument is a named argument (as opposed to an unnamed argument
5043 matching an ellipsis). */
5046 s390_function_arg_advance (cum, mode, type, named)
5047 CUMULATIVE_ARGS *cum;
5048 enum machine_mode mode;
5050 int named ATTRIBUTE_UNUSED;
5052 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5056 else if (s390_function_arg_pass_by_reference (mode, type))
5062 int size = s390_function_arg_size (mode, type);
5063 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5067 /* Define where to put the arguments to a function.
5068 Value is zero to push the argument on the stack,
5069 or a hard register in which to store the argument.
5071 MODE is the argument's machine mode.
5072 TYPE is the data type of the argument (as a tree).
5073 This is null for libcalls where that information may
5075 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5076 the preceding args and about the function being called.
5077 NAMED is nonzero if this argument is a named parameter
5078 (otherwise it is an extra parameter matching an ellipsis).
5080 On S/390, we use general purpose registers 2 through 6 to
5081 pass integer, pointer, and certain structure arguments, and
5082 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5083 to pass floating point arguments. All remaining arguments
5084 are pushed to the stack. */
5087 s390_function_arg (cum, mode, type, named)
5088 CUMULATIVE_ARGS *cum;
5089 enum machine_mode mode;
5091 int named ATTRIBUTE_UNUSED;
5093 if (s390_function_arg_pass_by_reference (mode, type))
5096 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5098 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5101 return gen_rtx (REG, mode, cum->fprs + 16);
5105 int size = s390_function_arg_size (mode, type);
5106 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5108 if (cum->gprs + n_gprs > 5)
5111 return gen_rtx (REG, mode, cum->gprs + 2);
5116 /* Create and return the va_list datatype.
5118 On S/390, va_list is an array type equivalent to
5120 typedef struct __va_list_tag
5124 void *__overflow_arg_area;
5125 void *__reg_save_area;
5129 where __gpr and __fpr hold the number of general purpose
5130 or floating point arguments used up to now, respectively,
5131 __overflow_arg_area points to the stack location of the
5132 next argument passed on the stack, and __reg_save_area
5133 always points to the start of the register area in the
5134 call frame of the current function. The function prologue
5135 saves all registers used for argument passing into this
5136 area if the function uses variable arguments. */
5139 s390_build_va_list ()
5141 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5143 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5146 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5148 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5149 long_integer_type_node);
5150 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5151 long_integer_type_node);
5152 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5154 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5157 DECL_FIELD_CONTEXT (f_gpr) = record;
5158 DECL_FIELD_CONTEXT (f_fpr) = record;
5159 DECL_FIELD_CONTEXT (f_ovf) = record;
5160 DECL_FIELD_CONTEXT (f_sav) = record;
5162 TREE_CHAIN (record) = type_decl;
5163 TYPE_NAME (record) = type_decl;
5164 TYPE_FIELDS (record) = f_gpr;
5165 TREE_CHAIN (f_gpr) = f_fpr;
5166 TREE_CHAIN (f_fpr) = f_ovf;
5167 TREE_CHAIN (f_ovf) = f_sav;
5169 layout_type (record);
5171 /* The correct type is an array type of one element. */
5172 return build_array_type (record, build_index_type (size_zero_node));
5175 /* Implement va_start by filling the va_list structure VALIST.
5176 STDARG_P is always true, and ignored.
5177 NEXTARG points to the first anonymous stack argument.
5179 The following global variables are used to initialize
5180 the va_list structure:
5182 current_function_args_info:
5183 holds number of gprs and fprs used for named arguments.
5184 current_function_arg_offset_rtx:
5185 holds the offset of the first anonymous stack argument
5186 (relative to the virtual arg pointer). */
5189 s390_va_start (valist, nextarg)
5191 rtx nextarg ATTRIBUTE_UNUSED;
5193 HOST_WIDE_INT n_gpr, n_fpr;
5195 tree f_gpr, f_fpr, f_ovf, f_sav;
5196 tree gpr, fpr, ovf, sav, t;
5198 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5199 f_fpr = TREE_CHAIN (f_gpr);
5200 f_ovf = TREE_CHAIN (f_fpr);
5201 f_sav = TREE_CHAIN (f_ovf);
5203 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5204 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5205 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5206 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5207 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5209 /* Count number of gp and fp argument registers used. */
5211 n_gpr = current_function_args_info.gprs;
5212 n_fpr = current_function_args_info.fprs;
5214 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5215 TREE_SIDE_EFFECTS (t) = 1;
5216 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5218 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5219 TREE_SIDE_EFFECTS (t) = 1;
5220 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5222 /* Find the overflow area. */
5223 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5225 off = INTVAL (current_function_arg_offset_rtx);
5226 off = off < 0 ? 0 : off;
5227 if (TARGET_DEBUG_ARG)
5228 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5229 (int)n_gpr, (int)n_fpr, off);
5231 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5233 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5234 TREE_SIDE_EFFECTS (t) = 1;
5235 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5237 /* Find the register save area. */
5238 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5239 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5240 build_int_2 (-STACK_POINTER_OFFSET, -1));
5241 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5242 TREE_SIDE_EFFECTS (t) = 1;
5243 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5246 /* Implement va_arg by updating the va_list structure
5247 VALIST as required to retrieve an argument of type
5248 TYPE, and returning that argument.
5250 Generates code equivalent to:
5252 if (integral value) {
5253 if (size <= 4 && args.gpr < 5 ||
5254 size > 4 && args.gpr < 4 )
5255 ret = args.reg_save_area[args.gpr+8]
5257 ret = *args.overflow_arg_area++;
5258 } else if (float value) {
5260 ret = args.reg_save_area[args.fpr+64]
5262 ret = *args.overflow_arg_area++;
5263 } else if (aggregate value) {
5265 ret = *args.reg_save_area[args.gpr]
5267 ret = **args.overflow_arg_area++;
5271 s390_va_arg (valist, type)
5275 tree f_gpr, f_fpr, f_ovf, f_sav;
5276 tree gpr, fpr, ovf, sav, reg, t, u;
5277 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5278 rtx lab_false, lab_over, addr_rtx, r;
5280 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5281 f_fpr = TREE_CHAIN (f_gpr);
5282 f_ovf = TREE_CHAIN (f_fpr);
5283 f_sav = TREE_CHAIN (f_ovf);
5285 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5286 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5287 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5288 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5289 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5291 size = int_size_in_bytes (type);
5293 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5295 if (TARGET_DEBUG_ARG)
5297 fprintf (stderr, "va_arg: aggregate type");
5301 /* Aggregates are passed by reference. */
5305 sav_ofs = 2 * UNITS_PER_WORD;
5306 sav_scale = UNITS_PER_WORD;
5307 size = UNITS_PER_WORD;
5310 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5312 if (TARGET_DEBUG_ARG)
5314 fprintf (stderr, "va_arg: float type");
5318 /* FP args go in FP registers, if present. */
5322 sav_ofs = 16 * UNITS_PER_WORD;
5324 /* TARGET_64BIT has up to 4 parameter in fprs */
5325 max_reg = TARGET_64BIT ? 3 : 1;
5329 if (TARGET_DEBUG_ARG)
5331 fprintf (stderr, "va_arg: other type");
5335 /* Otherwise into GP registers. */
5338 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5339 sav_ofs = 2 * UNITS_PER_WORD;
5341 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
5342 TYPE_MODE (type) == HImode ? 6 :
5343 TYPE_MODE (type) == QImode ? 7 : 0;
5345 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
5346 TYPE_MODE (type) == QImode ? 3 : 0;
5348 sav_scale = UNITS_PER_WORD;
5355 /* Pull the value out of the saved registers ... */
5357 lab_false = gen_label_rtx ();
5358 lab_over = gen_label_rtx ();
5359 addr_rtx = gen_reg_rtx (Pmode);
5361 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
5363 GT, const1_rtx, Pmode, 0, lab_false);
5366 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5370 u = build (MULT_EXPR, long_integer_type_node,
5371 reg, build_int_2 (sav_scale, 0));
5372 TREE_SIDE_EFFECTS (u) = 1;
5374 t = build (PLUS_EXPR, ptr_type_node, t, u);
5375 TREE_SIDE_EFFECTS (t) = 1;
5377 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5379 emit_move_insn (addr_rtx, r);
5382 emit_jump_insn (gen_jump (lab_over));
5384 emit_label (lab_false);
5386 /* ... Otherwise out of the overflow area. */
5388 t = save_expr (ovf);
5391 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
5392 if (size < UNITS_PER_WORD)
5394 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
5395 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5396 TREE_SIDE_EFFECTS (t) = 1;
5397 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5399 t = save_expr (ovf);
5402 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5404 emit_move_insn (addr_rtx, r);
5406 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5407 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5408 TREE_SIDE_EFFECTS (t) = 1;
5409 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5411 emit_label (lab_over);
5413 /* If less than max_regs a registers are retrieved out
5414 of register save area, increment. */
5416 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
5417 build_int_2 (n_reg, 0));
5418 TREE_SIDE_EFFECTS (u) = 1;
5419 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5423 r = gen_rtx_MEM (Pmode, addr_rtx);
5424 set_mem_alias_set (r, get_varargs_alias_set ());
5425 emit_move_insn (addr_rtx, r);
5433 /* Output assembly code for the trampoline template to
5436 On S/390, we use gpr 1 internally in the trampoline code;
5437 gpr 0 is used to hold the static chain. */
5440 s390_trampoline_template (file)
5445 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
5446 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
5447 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
5448 fprintf (file, "br\t%s\n", reg_names[1]);
5449 fprintf (file, "0:\t.quad\t0\n");
5450 fprintf (file, ".quad\t0\n");
5454 fprintf (file, "basr\t%s,0\n", reg_names[1]);
5455 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
5456 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
5457 fprintf (file, "br\t%s\n", reg_names[1]);
5458 fprintf (file, ".long\t0\n");
5459 fprintf (file, ".long\t0\n");
5463 /* Emit RTL insns to initialize the variable parts of a trampoline.
5464 FNADDR is an RTX for the address of the function's pure code.
5465 CXT is an RTX for the static chain value for the function. */
5468 s390_initialize_trampoline (addr, fnaddr, cxt)
5473 emit_move_insn (gen_rtx
5475 memory_address (Pmode,
5476 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
5477 emit_move_insn (gen_rtx
5479 memory_address (Pmode,
5480 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
5483 /* Return rtx for 64-bit constant formed from the 32-bit subwords
5484 LOW and HIGH, independent of the host word size. */
5487 s390_gen_rtx_const_DI (high, low)
5491 #if HOST_BITS_PER_WIDE_INT >= 64
5493 val = (HOST_WIDE_INT)high;
5495 val |= (HOST_WIDE_INT)low;
5497 return GEN_INT (val);
5499 #if HOST_BITS_PER_WIDE_INT >= 32
5500 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
5507 /* Output assembler code to FILE to increment profiler label # LABELNO
5508 for profiling a function entry. */
5511 s390_function_profiler (file, labelno)
5518 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
5520 fprintf (file, "# function profiler \n");
5522 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
5523 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
5524 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
5526 op[2] = gen_rtx_REG (Pmode, 1);
5527 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
5528 SYMBOL_REF_FLAG (op[3]) = 1;
5530 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
5533 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
5534 op[4] = gen_rtx_CONST (Pmode, op[4]);
5539 output_asm_insn ("stg\t%0,%1", op);
5540 output_asm_insn ("larl\t%2,%3", op);
5541 output_asm_insn ("brasl\t%0,%4", op);
5542 output_asm_insn ("lg\t%0,%1", op);
5546 op[6] = gen_label_rtx ();
5548 output_asm_insn ("st\t%0,%1", op);
5549 output_asm_insn ("bras\t%2,%l6", op);
5550 output_asm_insn (".long\t%4", op);
5551 output_asm_insn (".long\t%3", op);
5552 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5553 output_asm_insn ("l\t%0,0(%2)", op);
5554 output_asm_insn ("l\t%2,4(%2)", op);
5555 output_asm_insn ("basr\t%0,%0", op);
5556 output_asm_insn ("l\t%0,%1", op);
5560 op[5] = gen_label_rtx ();
5561 op[6] = gen_label_rtx ();
5563 output_asm_insn ("st\t%0,%1", op);
5564 output_asm_insn ("bras\t%2,%l6", op);
5565 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5566 output_asm_insn (".long\t%4-%l5", op);
5567 output_asm_insn (".long\t%3-%l5", op);
5568 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5569 output_asm_insn ("lr\t%0,%2", op);
5570 output_asm_insn ("a\t%0,0(%2)", op);
5571 output_asm_insn ("a\t%2,4(%2)", op);
5572 output_asm_insn ("basr\t%0,%0", op);
5573 output_asm_insn ("l\t%0,%1", op);
5577 /* Select section for constant in constant pool. In 32-bit mode,
5578 constants go in the function section; in 64-bit mode in .rodata. */
5581 s390_select_rtx_section (mode, x, align)
5582 enum machine_mode mode ATTRIBUTE_UNUSED;
5583 rtx x ATTRIBUTE_UNUSED;
5584 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
5587 readonly_data_section ();
5589 function_section (current_function_decl);
5592 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
5593 may access it directly in the GOT. */
5596 s390_encode_section_info (decl, first)
5598 int first ATTRIBUTE_UNUSED;
5602 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5603 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
5605 if (GET_CODE (rtl) == MEM)
5607 SYMBOL_REF_FLAG (XEXP (rtl, 0))
5608 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5609 || ! TREE_PUBLIC (decl));
5614 /* Output thunk to FILE that implements a C++ virtual function call (with
5615 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
5616 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
5617 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
5618 relative to the resulting this pointer. */
5621 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
5623 tree thunk ATTRIBUTE_UNUSED;
5624 HOST_WIDE_INT delta;
5625 HOST_WIDE_INT vcall_offset;
5630 /* Operand 0 is the target function. */
5631 op[0] = XEXP (DECL_RTL (function), 0);
5632 if (flag_pic && !SYMBOL_REF_FLAG (op[0]))
5634 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]), 113);
5635 op[0] = gen_rtx_CONST (Pmode, op[0]);
5638 /* Operand 1 is the 'this' pointer. */
5639 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
5640 op[1] = gen_rtx_REG (Pmode, 3);
5642 op[1] = gen_rtx_REG (Pmode, 2);
5644 /* Operand 2 is the delta. */
5645 op[2] = GEN_INT (delta);
5647 /* Operand 3 is the vcall_offset. */
5648 op[3] = GEN_INT (vcall_offset);
5650 /* Operand 4 is the temporary register. */
5651 op[4] = gen_rtx_REG (Pmode, 1);
5653 /* Operands 5 to 8 can be used as labels. */
5659 /* Generate code. */
5662 /* Setup literal pool pointer if required. */
5663 if (!CONST_OK_FOR_LETTER_P (delta, 'K')
5664 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5666 op[5] = gen_label_rtx ();
5667 output_asm_insn ("larl\t%4,%5", op);
5670 /* Add DELTA to this pointer. */
5673 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
5674 output_asm_insn ("la\t%1,%2(%1)", op);
5675 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
5676 output_asm_insn ("aghi\t%1,%2", op);
5679 op[6] = gen_label_rtx ();
5680 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
5684 /* Perform vcall adjustment. */
5687 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
5689 output_asm_insn ("lg\t%4,0(%1)", op);
5690 output_asm_insn ("ag\t%1,%3(%4)", op);
5692 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5694 output_asm_insn ("lghi\t%4,%3", op);
5695 output_asm_insn ("ag\t%4,0(%1)", op);
5696 output_asm_insn ("ag\t%1,0(%4)", op);
5700 op[7] = gen_label_rtx ();
5701 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
5702 output_asm_insn ("ag\t%4,0(%1)", op);
5703 output_asm_insn ("ag\t%1,0(%4)", op);
5707 /* Jump to target. */
5708 output_asm_insn ("jg\t%0", op);
5710 /* Output literal pool if required. */
5713 output_asm_insn (".align\t4", op);
5714 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5718 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5719 output_asm_insn (".long\t%2", op);
5723 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[7]));
5724 output_asm_insn (".long\t%3", op);
5729 /* Setup base pointer if required. */
5731 || !CONST_OK_FOR_LETTER_P (delta, 'K')
5732 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5734 op[5] = gen_label_rtx ();
5735 output_asm_insn ("basr\t%4,0", op);
5736 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5739 /* Add DELTA to this pointer. */
5742 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
5743 output_asm_insn ("la\t%1,%2(%1)", op);
5744 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
5745 output_asm_insn ("ahi\t%1,%2", op);
5748 op[6] = gen_label_rtx ();
5749 output_asm_insn ("a\t%1,%6-%5(%4)", op);
5753 /* Perform vcall adjustment. */
5756 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
5758 output_asm_insn ("lg\t%4,0(%1)", op);
5759 output_asm_insn ("a\t%1,%3(%4)", op);
5761 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5763 output_asm_insn ("lhi\t%4,%3", op);
5764 output_asm_insn ("a\t%4,0(%1)", op);
5765 output_asm_insn ("a\t%1,0(%4)", op);
5769 op[7] = gen_label_rtx ();
5770 output_asm_insn ("l\t%4,%7-%5(%4)", op);
5771 output_asm_insn ("a\t%4,0(%1)", op);
5772 output_asm_insn ("a\t%1,0(%4)", op);
5775 /* We had to clobber the base pointer register.
5776 Re-setup the base pointer (with a different base). */
5777 op[5] = gen_label_rtx ();
5778 output_asm_insn ("basr\t%4,0", op);
5779 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5782 /* Jump to target. */
5783 op[8] = gen_label_rtx ();
5785 output_asm_insn ("l\t%4,%8-%5(%4)", op);
5787 output_asm_insn ("a\t%4,%8-%5(%4)", op);
5788 output_asm_insn ("br\t%4", op);
5790 /* Output literal pool. */
5791 output_asm_insn (".align\t4", op);
5792 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[8]));
5794 output_asm_insn (".long\t%0", op);
5796 output_asm_insn (".long\t%0-%5", op);
5800 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5801 output_asm_insn (".long\t%2", op);
5805 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[7]));
5806 output_asm_insn (".long\t%3", op);