1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
34 #include "insn-attr.h"
42 #include "basic-block.h"
43 #include "integrate.h"
46 #include "target-def.h"
48 #include "langhooks.h"
51 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
52 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
53 static int s390_adjust_priority PARAMS ((rtx, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
58 #undef TARGET_ASM_ALIGNED_HI_OP
59 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
60 #undef TARGET_ASM_ALIGNED_DI_OP
61 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
62 #undef TARGET_ASM_INTEGER
63 #define TARGET_ASM_INTEGER s390_assemble_integer
65 #undef TARGET_ASM_OPEN_PAREN
66 #define TARGET_ASM_OPEN_PAREN ""
68 #undef TARGET_ASM_CLOSE_PAREN
69 #define TARGET_ASM_CLOSE_PAREN ""
71 #undef TARGET_ASM_SELECT_RTX_SECTION
72 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
74 #undef TARGET_SCHED_ADJUST_COST
75 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
77 #undef TARGET_SCHED_ADJUST_PRIORITY
78 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
80 #undef TARGET_ENCODE_SECTION_INFO
81 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
83 struct gcc_target targetm = TARGET_INITIALIZER;
85 extern int reload_completed;
87 /* The alias set for prologue/epilogue register save/restore. */
88 static int s390_sr_alias_set = 0;
90 /* Save information from a "cmpxx" operation until the branch or scc is
92 rtx s390_compare_op0, s390_compare_op1;
94 /* Structure used to hold the components of a S/390 memory
95 address. A legitimate address on S/390 is of the general
97 base + index + displacement
98 where any of the components is optional.
100 base and index are registers of the class ADDR_REGS,
101 displacement is an unsigned 12-bit immediate constant. */
111 /* Structure containing information for prologue and epilogue. */
118 int first_restore_gpr;
120 int arg_frame_offset;
122 HOST_WIDE_INT frame_size;
125 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
126 static int s390_branch_condition_mask PARAMS ((rtx));
127 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
128 static int check_mode PARAMS ((rtx, enum machine_mode *));
129 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
130 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
131 static int reg_used_in_mem_p PARAMS ((int, rtx));
132 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
133 static void s390_split_branches PARAMS ((void));
134 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
135 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
136 static void s390_chunkify_pool PARAMS ((void));
137 static void s390_optimize_prolog PARAMS ((void));
138 static int find_unused_clobbered_reg PARAMS ((void));
139 static void s390_frame_info PARAMS ((struct s390_frame *));
140 static rtx save_fpr PARAMS ((rtx, int, int));
141 static rtx restore_fpr PARAMS ((rtx, int, int));
142 static rtx save_gprs PARAMS ((rtx, int, int, int));
143 static rtx restore_gprs PARAMS ((rtx, int, int, int));
144 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
147 /* Return true if SET either doesn't set the CC register, or else
148 the source and destination have matching CC modes and that
149 CC mode is at least as constrained as REQ_MODE. */
152 s390_match_ccmode_set (set, req_mode)
154 enum machine_mode req_mode;
156 enum machine_mode set_mode;
158 if (GET_CODE (set) != SET)
161 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
164 set_mode = GET_MODE (SET_DEST (set));
177 if (req_mode != set_mode)
182 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
183 && req_mode != CCSRmode && req_mode != CCURmode)
189 if (req_mode != CCAmode)
197 return (GET_MODE (SET_SRC (set)) == set_mode);
200 /* Return true if every SET in INSN that sets the CC register
201 has source and destination with matching CC modes and that
202 CC mode is at least as constrained as REQ_MODE.
203 If REQ_MODE is VOIDmode, always return false. */
206 s390_match_ccmode (insn, req_mode)
208 enum machine_mode req_mode;
212 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
213 if (req_mode == VOIDmode)
216 if (GET_CODE (PATTERN (insn)) == SET)
217 return s390_match_ccmode_set (PATTERN (insn), req_mode);
219 if (GET_CODE (PATTERN (insn)) == PARALLEL)
220 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
222 rtx set = XVECEXP (PATTERN (insn), 0, i);
223 if (GET_CODE (set) == SET)
224 if (!s390_match_ccmode_set (set, req_mode))
231 /* If a test-under-mask instruction can be used to implement
232 (compare (and ... OP1) OP2), return the CC mode required
233 to do that. Otherwise, return VOIDmode.
234 MIXED is true if the instruction can distinguish between
235 CC1 and CC2 for mixed selected bits (TMxx), it is false
236 if the instruction cannot (TM). */
239 s390_tm_ccmode (op1, op2, mixed)
246 /* ??? Fixme: should work on CONST_DOUBLE as well. */
247 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
250 /* Selected bits all zero: CC0. */
251 if (INTVAL (op2) == 0)
254 /* Selected bits all one: CC3. */
255 if (INTVAL (op2) == INTVAL (op1))
258 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
261 bit1 = exact_log2 (INTVAL (op2));
262 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
263 if (bit0 != -1 && bit1 != -1)
264 return bit0 > bit1 ? CCT1mode : CCT2mode;
270 /* Given a comparison code OP (EQ, NE, etc.) and the operands
271 OP0 and OP1 of a COMPARE, return the mode to be used for the
275 s390_select_ccmode (code, op0, op1)
284 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
285 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
287 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
288 || GET_CODE (op1) == NEG)
291 if (GET_CODE (op0) == AND)
293 /* Check whether we can potentially do it via TM. */
294 enum machine_mode ccmode;
295 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
296 if (ccmode != VOIDmode)
298 /* Relax CCTmode to CCZmode to allow fall-back to AND
299 if that turns out to be beneficial. */
300 return ccmode == CCTmode ? CCZmode : ccmode;
304 if (register_operand (op0, HImode)
305 && GET_CODE (op1) == CONST_INT
306 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
308 if (register_operand (op0, QImode)
309 && GET_CODE (op1) == CONST_INT
310 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
319 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
320 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
322 if (INTVAL (XEXP((op0), 1)) < 0)
335 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
336 && GET_CODE (op1) != CONST_INT)
342 if (GET_CODE (op0) == PLUS)
345 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
346 && GET_CODE (op1) != CONST_INT)
352 if (GET_CODE (op0) == MINUS)
355 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
356 && GET_CODE (op1) != CONST_INT)
365 /* Return branch condition mask to implement a branch
366 specified by CODE. */
369 s390_branch_condition_mask (code)
372 const int CC0 = 1 << 3;
373 const int CC1 = 1 << 2;
374 const int CC2 = 1 << 1;
375 const int CC3 = 1 << 0;
377 if (GET_CODE (XEXP (code, 0)) != REG
378 || REGNO (XEXP (code, 0)) != CC_REGNUM
379 || XEXP (code, 1) != const0_rtx)
382 switch (GET_MODE (XEXP (code, 0)))
385 switch (GET_CODE (code))
388 case NE: return CC1 | CC2 | CC3;
395 switch (GET_CODE (code))
398 case NE: return CC0 | CC2 | CC3;
405 switch (GET_CODE (code))
408 case NE: return CC0 | CC1 | CC3;
415 switch (GET_CODE (code))
418 case NE: return CC0 | CC1 | CC2;
425 switch (GET_CODE (code))
427 case EQ: return CC0 | CC2;
428 case NE: return CC1 | CC3;
435 switch (GET_CODE (code))
437 case LTU: return CC2 | CC3; /* carry */
438 case GEU: return CC0 | CC1; /* no carry */
445 switch (GET_CODE (code))
447 case GTU: return CC0 | CC1; /* borrow */
448 case LEU: return CC2 | CC3; /* no borrow */
455 switch (GET_CODE (code))
458 case NE: return CC1 | CC2 | CC3;
459 case LTU: return CC1;
460 case GTU: return CC2;
461 case LEU: return CC0 | CC1;
462 case GEU: return CC0 | CC2;
469 switch (GET_CODE (code))
472 case NE: return CC2 | CC1 | CC3;
473 case LTU: return CC2;
474 case GTU: return CC1;
475 case LEU: return CC0 | CC2;
476 case GEU: return CC0 | CC1;
483 switch (GET_CODE (code))
486 case NE: return CC1 | CC2 | CC3;
487 case LT: return CC1 | CC3;
489 case LE: return CC0 | CC1 | CC3;
490 case GE: return CC0 | CC2;
497 switch (GET_CODE (code))
500 case NE: return CC1 | CC2 | CC3;
502 case GT: return CC2 | CC3;
503 case LE: return CC0 | CC1;
504 case GE: return CC0 | CC2 | CC3;
511 switch (GET_CODE (code))
514 case NE: return CC1 | CC2 | CC3;
517 case LE: return CC0 | CC1;
518 case GE: return CC0 | CC2;
519 case UNORDERED: return CC3;
520 case ORDERED: return CC0 | CC1 | CC2;
521 case UNEQ: return CC0 | CC3;
522 case UNLT: return CC1 | CC3;
523 case UNGT: return CC2 | CC3;
524 case UNLE: return CC0 | CC1 | CC3;
525 case UNGE: return CC0 | CC2 | CC3;
526 case LTGT: return CC1 | CC2;
533 switch (GET_CODE (code))
536 case NE: return CC2 | CC1 | CC3;
539 case LE: return CC0 | CC2;
540 case GE: return CC0 | CC1;
541 case UNORDERED: return CC3;
542 case ORDERED: return CC0 | CC2 | CC1;
543 case UNEQ: return CC0 | CC3;
544 case UNLT: return CC2 | CC3;
545 case UNGT: return CC1 | CC3;
546 case UNLE: return CC0 | CC2 | CC3;
547 case UNGE: return CC0 | CC1 | CC3;
548 case LTGT: return CC2 | CC1;
559 /* If INV is false, return assembler mnemonic string to implement
560 a branch specified by CODE. If INV is true, return mnemonic
561 for the corresponding inverted branch. */
564 s390_branch_condition_mnemonic (code, inv)
568 static const char *const mnemonic[16] =
570 NULL, "o", "h", "nle",
571 "l", "nhe", "lh", "ne",
572 "e", "nlh", "he", "nl",
573 "le", "nh", "no", NULL
576 int mask = s390_branch_condition_mask (code);
581 if (mask < 1 || mask > 14)
584 return mnemonic[mask];
587 /* If OP is an integer constant of mode MODE with exactly one
588 HImode subpart unequal to DEF, return the number of that
589 subpart. As a special case, all HImode subparts of OP are
590 equal to DEF, return zero. Otherwise, return -1. */
593 s390_single_hi (op, mode, def)
595 enum machine_mode mode;
598 if (GET_CODE (op) == CONST_INT)
600 unsigned HOST_WIDE_INT value;
601 int n_parts = GET_MODE_SIZE (mode) / 2;
604 for (i = 0; i < n_parts; i++)
607 value = (unsigned HOST_WIDE_INT) INTVAL (op);
611 if ((value & 0xffff) != (unsigned)(def & 0xffff))
620 return part == -1 ? 0 : (n_parts - 1 - part);
623 else if (GET_CODE (op) == CONST_DOUBLE
624 && GET_MODE (op) == VOIDmode)
626 unsigned HOST_WIDE_INT value;
627 int n_parts = GET_MODE_SIZE (mode) / 2;
630 for (i = 0; i < n_parts; i++)
633 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
634 else if (i == HOST_BITS_PER_WIDE_INT / 16)
635 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
639 if ((value & 0xffff) != (unsigned)(def & 0xffff))
648 return part == -1 ? 0 : (n_parts - 1 - part);
654 /* Extract the HImode part number PART from integer
655 constant OP of mode MODE. */
658 s390_extract_hi (op, mode, part)
660 enum machine_mode mode;
663 int n_parts = GET_MODE_SIZE (mode) / 2;
664 if (part < 0 || part >= n_parts)
667 part = n_parts - 1 - part;
669 if (GET_CODE (op) == CONST_INT)
671 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
672 return ((value >> (16 * part)) & 0xffff);
674 else if (GET_CODE (op) == CONST_DOUBLE
675 && GET_MODE (op) == VOIDmode)
677 unsigned HOST_WIDE_INT value;
678 if (part < HOST_BITS_PER_WIDE_INT / 16)
679 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
681 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
682 part -= HOST_BITS_PER_WIDE_INT / 16;
684 return ((value >> (16 * part)) & 0xffff);
690 /* If OP is an integer constant of mode MODE with exactly one
691 QImode subpart unequal to DEF, return the number of that
692 subpart. As a special case, all QImode subparts of OP are
693 equal to DEF, return zero. Otherwise, return -1. */
696 s390_single_qi (op, mode, def)
698 enum machine_mode mode;
701 if (GET_CODE (op) == CONST_INT)
703 unsigned HOST_WIDE_INT value;
704 int n_parts = GET_MODE_SIZE (mode);
707 for (i = 0; i < n_parts; i++)
710 value = (unsigned HOST_WIDE_INT) INTVAL (op);
714 if ((value & 0xff) != (unsigned)(def & 0xff))
723 return part == -1 ? 0 : (n_parts - 1 - part);
726 else if (GET_CODE (op) == CONST_DOUBLE
727 && GET_MODE (op) == VOIDmode)
729 unsigned HOST_WIDE_INT value;
730 int n_parts = GET_MODE_SIZE (mode);
733 for (i = 0; i < n_parts; i++)
736 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
737 else if (i == HOST_BITS_PER_WIDE_INT / 8)
738 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
742 if ((value & 0xff) != (unsigned)(def & 0xff))
751 return part == -1 ? 0 : (n_parts - 1 - part);
757 /* Extract the QImode part number PART from integer
758 constant OP of mode MODE. */
761 s390_extract_qi (op, mode, part)
763 enum machine_mode mode;
766 int n_parts = GET_MODE_SIZE (mode);
767 if (part < 0 || part >= n_parts)
770 part = n_parts - 1 - part;
772 if (GET_CODE (op) == CONST_INT)
774 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
775 return ((value >> (8 * part)) & 0xff);
777 else if (GET_CODE (op) == CONST_DOUBLE
778 && GET_MODE (op) == VOIDmode)
780 unsigned HOST_WIDE_INT value;
781 if (part < HOST_BITS_PER_WIDE_INT / 8)
782 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
784 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
785 part -= HOST_BITS_PER_WIDE_INT / 8;
787 return ((value >> (8 * part)) & 0xff);
794 /* Change optimizations to be performed, depending on the
797 LEVEL is the optimization level specified; 2 if `-O2' is
798 specified, 1 if `-O' is specified, and 0 if neither is specified.
800 SIZE is nonzero if `-Os' is specified and zero otherwise. */
803 optimization_options (level, size)
804 int level ATTRIBUTE_UNUSED;
805 int size ATTRIBUTE_UNUSED;
812 /* Acquire a unique set number for our register saves and restores. */
813 s390_sr_alias_set = new_alias_set ();
817 /* Map for smallest class containing reg regno. */
819 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
820 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
821 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
822 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
823 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
824 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
825 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
826 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
827 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
828 ADDR_REGS, NO_REGS, ADDR_REGS
832 /* Return true if OP a (const_int 0) operand.
833 OP is the current operation.
834 MODE is the current operation mode. */
837 const0_operand (op, mode)
839 enum machine_mode mode;
841 return op == CONST0_RTX (mode);
844 /* Return true if OP is constant.
845 OP is the current operation.
846 MODE is the current operation mode. */
849 consttable_operand (op, mode)
851 enum machine_mode mode ATTRIBUTE_UNUSED;
853 return CONSTANT_P (op);
856 /* Return true if the mode of operand OP matches MODE.
857 If MODE is set to VOIDmode, set it to the mode of OP. */
860 check_mode (op, mode)
862 enum machine_mode *mode;
864 if (*mode == VOIDmode)
865 *mode = GET_MODE (op);
868 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
874 /* Return true if OP a valid operand for the LARL instruction.
875 OP is the current operation.
876 MODE is the current operation mode. */
879 larl_operand (op, mode)
881 enum machine_mode mode;
883 if (! check_mode (op, &mode))
886 /* Allow labels and local symbols. */
887 if (GET_CODE (op) == LABEL_REF)
889 if (GET_CODE (op) == SYMBOL_REF
890 && (!flag_pic || SYMBOL_REF_FLAG (op)
891 || CONSTANT_POOL_ADDRESS_P (op)))
894 /* Everything else must have a CONST, so strip it. */
895 if (GET_CODE (op) != CONST)
899 /* Allow adding *even* constants. */
900 if (GET_CODE (op) == PLUS)
902 if (GET_CODE (XEXP (op, 1)) != CONST_INT
903 || (INTVAL (XEXP (op, 1)) & 1) != 0)
908 /* Labels and local symbols allowed here as well. */
909 if (GET_CODE (op) == LABEL_REF)
911 if (GET_CODE (op) == SYMBOL_REF
912 && (!flag_pic || SYMBOL_REF_FLAG (op)
913 || CONSTANT_POOL_ADDRESS_P (op)))
916 /* Now we must have a @GOTENT offset or @PLT stub. */
917 if (GET_CODE (op) == UNSPEC
918 && XINT (op, 1) == 111)
920 if (GET_CODE (op) == UNSPEC
921 && XINT (op, 1) == 113)
927 /* Return true if OP is a valid FP-Register.
928 OP is the current operation.
929 MODE is the current operation mode. */
932 fp_operand (op, mode)
934 enum machine_mode mode;
936 register enum rtx_code code = GET_CODE (op);
937 if (! check_mode (op, &mode))
939 if (code == REG && REGNO_OK_FOR_FP_P (REGNO (op)))
945 /* Helper routine to implement s_operand and s_imm_operand.
946 OP is the current operation.
947 MODE is the current operation mode.
948 ALLOW_IMMEDIATE specifies whether immediate operands should
949 be accepted or not. */
952 general_s_operand (op, mode, allow_immediate)
954 enum machine_mode mode;
957 struct s390_address addr;
959 /* Call general_operand first, so that we don't have to
960 check for many special cases. */
961 if (!general_operand (op, mode))
964 /* Just like memory_operand, allow (subreg (mem ...))
967 && GET_CODE (op) == SUBREG
968 && GET_CODE (SUBREG_REG (op)) == MEM)
969 op = SUBREG_REG (op);
971 switch (GET_CODE (op))
973 /* Constants that we are sure will be forced to the
974 literal pool in reload are OK as s-operand. Note
975 that we cannot call s390_preferred_reload_class here
976 because it might not be known yet at this point
977 whether the current function is a leaf or not. */
980 if (!allow_immediate || reload_completed)
982 if (!legitimate_reload_constant_p (op))
988 /* Memory operands are OK unless they already use an
991 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
993 if (s390_decompose_address (XEXP (op, 0), &addr)
1005 /* Return true if OP is a valid S-type operand.
1006 OP is the current operation.
1007 MODE is the current operation mode. */
1010 s_operand (op, mode)
1012 enum machine_mode mode;
1014 return general_s_operand (op, mode, 0);
1017 /* Return true if OP is a valid S-type operand or an immediate
1018 operand that can be addressed as S-type operand by forcing
1019 it into the literal pool.
1020 OP is the current operation.
1021 MODE is the current operation mode. */
1024 s_imm_operand (op, mode)
1026 enum machine_mode mode;
1028 return general_s_operand (op, mode, 1);
1031 /* Return true if OP is a valid operand for a 'Q' constraint.
1032 This differs from s_operand in that only memory operands
1033 without index register are accepted, nothing else. */
1039 struct s390_address addr;
1041 if (GET_CODE (op) != MEM)
1044 if (!s390_decompose_address (XEXP (op, 0), &addr))
1053 /* Return true if OP is a valid operand for the BRAS instruction.
1054 OP is the current operation.
1055 MODE is the current operation mode. */
1058 bras_sym_operand (op, mode)
1060 enum machine_mode mode ATTRIBUTE_UNUSED;
1062 register enum rtx_code code = GET_CODE (op);
1064 /* Allow SYMBOL_REFs. */
1065 if (code == SYMBOL_REF)
1068 /* Allow @PLT stubs. */
1070 && GET_CODE (XEXP (op, 0)) == UNSPEC
1071 && XINT (XEXP (op, 0), 1) == 113)
1077 /* Return true if OP is a load multiple operation. It is known to be a
1078 PARALLEL and the first section will be tested.
1079 OP is the current operation.
1080 MODE is the current operation mode. */
1083 load_multiple_operation (op, mode)
1085 enum machine_mode mode ATTRIBUTE_UNUSED;
1087 int count = XVECLEN (op, 0);
1088 unsigned int dest_regno;
1093 /* Perform a quick check so we don't blow up below. */
1095 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1096 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1097 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1100 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1101 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1103 /* Check, is base, or base + displacement. */
1105 if (GET_CODE (src_addr) == REG)
1107 else if (GET_CODE (src_addr) == PLUS
1108 && GET_CODE (XEXP (src_addr, 0)) == REG
1109 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1111 off = INTVAL (XEXP (src_addr, 1));
1112 src_addr = XEXP (src_addr, 0);
1117 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1120 for (i = 1; i < count; i++)
1122 rtx elt = XVECEXP (op, 0, i);
1124 if (GET_CODE (elt) != SET
1125 || GET_CODE (SET_DEST (elt)) != REG
1126 || GET_MODE (SET_DEST (elt)) != Pmode
1127 || REGNO (SET_DEST (elt)) != dest_regno + i
1128 || GET_CODE (SET_SRC (elt)) != MEM
1129 || GET_MODE (SET_SRC (elt)) != Pmode
1130 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1131 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1132 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1133 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1134 != off + i * UNITS_PER_WORD)
1141 /* Return true if OP is a store multiple operation. It is known to be a
1142 PARALLEL and the first section will be tested.
1143 OP is the current operation.
1144 MODE is the current operation mode. */
1147 store_multiple_operation (op, mode)
1149 enum machine_mode mode ATTRIBUTE_UNUSED;
1151 int count = XVECLEN (op, 0);
1152 unsigned int src_regno;
1156 /* Perform a quick check so we don't blow up below. */
1158 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1159 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1160 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1163 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1164 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1166 /* Check, is base, or base + displacement. */
1168 if (GET_CODE (dest_addr) == REG)
1170 else if (GET_CODE (dest_addr) == PLUS
1171 && GET_CODE (XEXP (dest_addr, 0)) == REG
1172 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1174 off = INTVAL (XEXP (dest_addr, 1));
1175 dest_addr = XEXP (dest_addr, 0);
1180 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1183 for (i = 1; i < count; i++)
1185 rtx elt = XVECEXP (op, 0, i);
1187 if (GET_CODE (elt) != SET
1188 || GET_CODE (SET_SRC (elt)) != REG
1189 || GET_MODE (SET_SRC (elt)) != Pmode
1190 || REGNO (SET_SRC (elt)) != src_regno + i
1191 || GET_CODE (SET_DEST (elt)) != MEM
1192 || GET_MODE (SET_DEST (elt)) != Pmode
1193 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1194 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1195 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1196 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1197 != off + i * UNITS_PER_WORD)
1204 /* Return true if OP contains a symbol reference */
1207 symbolic_reference_mentioned_p (op)
1210 register const char *fmt;
1213 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1216 fmt = GET_RTX_FORMAT (GET_CODE (op));
1217 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1223 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1224 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1228 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1236 /* Return true if OP is a legitimate general operand when
1237 generating PIC code. It is given that flag_pic is on
1238 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1241 legitimate_pic_operand_p (op)
1244 /* Accept all non-symbolic constants. */
1245 if (!SYMBOLIC_CONST (op))
1248 /* Reject everything else; must be handled
1249 via emit_pic_move. */
1253 /* Returns true if the constant value OP is a legitimate general operand.
1254 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1257 legitimate_constant_p (op)
1260 /* Accept all non-symbolic constants. */
1261 if (!SYMBOLIC_CONST (op))
1264 /* In the PIC case, symbolic constants must *not* be
1265 forced into the literal pool. We accept them here,
1266 so that they will be handled by emit_pic_move. */
1270 /* Even in the non-PIC case, we can accept immediate
1271 LARL operands here. */
1273 return larl_operand (op, VOIDmode);
1275 /* All remaining non-PIC symbolic constants are
1276 forced into the literal pool. */
1280 /* Returns true if the constant value OP is a legitimate general
1281 operand during and after reload. The difference to
1282 legitimate_constant_p is that this function will not accept
1283 a constant that would need to be forced to the literal pool
1284 before it can be used as operand. */
1287 legitimate_reload_constant_p (op)
1290 /* Accept l(g)hi operands. */
1291 if (GET_CODE (op) == CONST_INT
1292 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1295 /* Accept lliXX operands. */
1297 && s390_single_hi (op, DImode, 0) >= 0)
1300 /* Accept larl operands. */
1302 && larl_operand (op, VOIDmode))
1305 /* Everything else cannot be handled without reload. */
1309 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1310 return the class of reg to actually use. */
1313 s390_preferred_reload_class (op, class)
1315 enum reg_class class;
1317 /* This can happen if a floating point constant is being
1318 reloaded into an integer register. Leave well alone. */
1319 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1320 && class != FP_REGS)
1323 switch (GET_CODE (op))
1325 /* Constants we cannot reload must be forced into the
1326 literal pool. For constants we *could* handle directly,
1327 it might still be preferable to put them in the pool and
1328 use a memory-to-memory instruction.
1330 However, try to avoid needlessly allocating a literal
1331 pool in a routine that wouldn't otherwise need any.
1332 Heuristically, we assume that 64-bit leaf functions
1333 typically don't need a literal pool, all others do. */
1336 if (!legitimate_reload_constant_p (op))
1339 if (TARGET_64BIT && current_function_is_leaf)
1344 /* If a symbolic constant or a PLUS is reloaded,
1345 it is most likely being used as an address, so
1346 prefer ADDR_REGS. If 'class' is not a superset
1347 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1352 if (reg_class_subset_p (ADDR_REGS, class))
1364 /* Return the register class of a scratch register needed to
1365 load IN into a register of class CLASS in MODE.
1367 We need a temporary when loading a PLUS expression which
1368 is not a legitimate operand of the LOAD ADDRESS instruction. */
1371 s390_secondary_input_reload_class (class, mode, in)
1372 enum reg_class class ATTRIBUTE_UNUSED;
1373 enum machine_mode mode;
1376 if (s390_plus_operand (in, mode))
1382 /* Return true if OP is a PLUS that is not a legitimate
1383 operand for the LA instruction.
1384 OP is the current operation.
1385 MODE is the current operation mode. */
1388 s390_plus_operand (op, mode)
1390 enum machine_mode mode;
1392 if (!check_mode (op, &mode) || mode != Pmode)
1395 if (GET_CODE (op) != PLUS)
1398 if (legitimate_la_operand_p (op))
1404 /* Generate code to load SRC, which is PLUS that is not a
1405 legitimate operand for the LA instruction, into TARGET.
1406 SCRATCH may be used as scratch register. */
1409 s390_expand_plus_operand (target, src, scratch)
1410 register rtx target;
1412 register rtx scratch;
1415 struct s390_address ad;
1417 /* src must be a PLUS; get its two operands. */
1418 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1421 /* Check if any of the two operands is already scheduled
1422 for replacement by reload. This can happen e.g. when
1423 float registers occur in an address. */
1424 sum1 = find_replacement (&XEXP (src, 0));
1425 sum2 = find_replacement (&XEXP (src, 1));
1426 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1428 /* If the address is already strictly valid, there's nothing to do. */
1429 if (!s390_decompose_address (src, &ad)
1430 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1431 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1433 /* Otherwise, one of the operands cannot be an address register;
1434 we reload its value into the scratch register. */
1435 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1437 emit_move_insn (scratch, sum1);
1440 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1442 emit_move_insn (scratch, sum2);
1446 /* According to the way these invalid addresses are generated
1447 in reload.c, it should never happen (at least on s390) that
1448 *neither* of the PLUS components, after find_replacements
1449 was applied, is an address register. */
1450 if (sum1 == scratch && sum2 == scratch)
1456 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1459 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1460 is only ever performed on addresses, so we can mark the
1461 sum as legitimate for LA in any case. */
1462 s390_load_address (target, src);
1466 /* Decompose a RTL expression ADDR for a memory address into
1467 its components, returned in OUT.
1469 Returns 0 if ADDR is not a valid memory address, nonzero
1470 otherwise. If OUT is NULL, don't return the components,
1471 but check for validity only.
1473 Note: Only addresses in canonical form are recognized.
1474 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1475 canonical form so that they will be recognized. */
1478 s390_decompose_address (addr, out)
1480 struct s390_address *out;
1482 rtx base = NULL_RTX;
1483 rtx indx = NULL_RTX;
1484 rtx disp = NULL_RTX;
1485 int pointer = FALSE;
1487 /* Decompose address into base + index + displacement. */
1489 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1492 else if (GET_CODE (addr) == PLUS)
1494 rtx op0 = XEXP (addr, 0);
1495 rtx op1 = XEXP (addr, 1);
1496 enum rtx_code code0 = GET_CODE (op0);
1497 enum rtx_code code1 = GET_CODE (op1);
1499 if (code0 == REG || code0 == UNSPEC)
1501 if (code1 == REG || code1 == UNSPEC)
1503 indx = op0; /* index + base */
1509 base = op0; /* base + displacement */
1514 else if (code0 == PLUS)
1516 indx = XEXP (op0, 0); /* index + base + disp */
1517 base = XEXP (op0, 1);
1528 disp = addr; /* displacement */
1531 /* Validate base register. */
1534 if (GET_CODE (base) == UNSPEC)
1536 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1538 base = XVECEXP (base, 0, 0);
1542 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1545 if (REGNO (base) == BASE_REGISTER
1546 || REGNO (base) == STACK_POINTER_REGNUM
1547 || REGNO (base) == FRAME_POINTER_REGNUM
1548 || ((reload_completed || reload_in_progress)
1549 && frame_pointer_needed
1550 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1551 || REGNO (base) == ARG_POINTER_REGNUM
1552 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1553 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1555 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1559 /* Validate index register. */
1562 if (GET_CODE (indx) == UNSPEC)
1564 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1566 indx = XVECEXP (indx, 0, 0);
1570 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1573 if (REGNO (indx) == BASE_REGISTER
1574 || REGNO (indx) == STACK_POINTER_REGNUM
1575 || REGNO (indx) == FRAME_POINTER_REGNUM
1576 || ((reload_completed || reload_in_progress)
1577 && frame_pointer_needed
1578 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1579 || REGNO (indx) == ARG_POINTER_REGNUM
1580 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1581 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1583 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1587 /* Validate displacement. */
1590 /* Allow integer constant in range. */
1591 if (GET_CODE (disp) == CONST_INT)
1593 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1597 /* In the small-PIC case, the linker converts @GOT12
1598 offsets to possible displacements. */
1599 else if (GET_CODE (disp) == CONST
1600 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1601 && XINT (XEXP (disp, 0), 1) == 110)
1609 /* Accept chunkfied literal pool symbol references. */
1610 else if (GET_CODE (disp) == CONST
1611 && GET_CODE (XEXP (disp, 0)) == MINUS
1612 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1613 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1618 /* Likewise if a constant offset is present. */
1619 else if (GET_CODE (disp) == CONST
1620 && GET_CODE (XEXP (disp, 0)) == PLUS
1621 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1622 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1623 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1624 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1629 /* We can convert literal pool addresses to
1630 displacements by basing them off the base register. */
1633 /* In some cases, we can accept an additional
1634 small constant offset. Split these off here. */
1636 unsigned int offset = 0;
1638 if (GET_CODE (disp) == CONST
1639 && GET_CODE (XEXP (disp, 0)) == PLUS
1640 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1642 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1643 disp = XEXP (XEXP (disp, 0), 0);
1646 /* Now we must have a literal pool address. */
1647 if (GET_CODE (disp) != SYMBOL_REF
1648 || !CONSTANT_POOL_ADDRESS_P (disp))
1651 /* In 64-bit PIC mode we cannot accept symbolic
1652 constants in the constant pool. */
1653 if (TARGET_64BIT && flag_pic
1654 && SYMBOLIC_CONST (get_pool_constant (disp)))
1657 /* If we have an offset, make sure it does not
1658 exceed the size of the constant pool entry. */
1659 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1662 /* Either base or index must be free to
1663 hold the base register. */
1667 /* Convert the address. */
1669 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1671 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1673 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1674 disp = gen_rtx_CONST (Pmode, disp);
1677 disp = plus_constant (disp, offset);
1691 out->pointer = pointer;
1697 /* Return nonzero if ADDR is a valid memory address.
1698 STRICT specifies whether strict register checking applies. */
1701 legitimate_address_p (mode, addr, strict)
1702 enum machine_mode mode ATTRIBUTE_UNUSED;
1706 struct s390_address ad;
1707 if (!s390_decompose_address (addr, &ad))
1712 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1714 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1719 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1721 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1728 /* Return 1 if OP is a valid operand for the LA instruction.
1729 In 31-bit, we need to prove that the result is used as an
1730 address, as LA performs only a 31-bit addition. */
1733 legitimate_la_operand_p (op)
1736 struct s390_address addr;
1737 if (!s390_decompose_address (op, &addr))
1740 if (TARGET_64BIT || addr.pointer)
1746 /* Emit a forced load-address operation to load SRC into DST.
1747 This will use the LOAD ADDRESS instruction even in situations
1748 where legitimate_la_operand_p (SRC) returns false. */
1751 s390_load_address (dst, src)
1756 emit_move_insn (dst, src);
1758 emit_insn (gen_force_la_31 (dst, src));
1761 /* Return a legitimate reference for ORIG (an address) using the
1762 register REG. If REG is 0, a new pseudo is generated.
1764 There are two types of references that must be handled:
1766 1. Global data references must load the address from the GOT, via
1767 the PIC reg. An insn is emitted to do this load, and the reg is
1770 2. Static data references, constant pool addresses, and code labels
1771 compute the address as an offset from the GOT, whose base is in
1772 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1773 differentiate them from global data objects. The returned
1774 address is the PIC reg + an unspec constant.
1776 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1777 reg also appears in the address. */
1780 legitimize_pic_address (orig, reg)
1788 if (GET_CODE (addr) == LABEL_REF
1789 || (GET_CODE (addr) == SYMBOL_REF
1790 && (SYMBOL_REF_FLAG (addr)
1791 || CONSTANT_POOL_ADDRESS_P (addr))))
1793 /* This is a local symbol. */
1796 /* Access local symbols PC-relative via LARL.
1797 This is the same as in the non-PIC case, so it is
1798 handled automatically ... */
1802 /* Access local symbols relative to the literal pool. */
1804 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1806 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1807 addr = gen_rtx_CONST (SImode, addr);
1808 addr = force_const_mem (SImode, addr);
1809 emit_move_insn (temp, addr);
1811 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1812 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1813 new = gen_rtx_PLUS (Pmode, base, temp);
1817 emit_move_insn (reg, new);
1822 else if (GET_CODE (addr) == SYMBOL_REF)
1825 reg = gen_reg_rtx (Pmode);
1829 /* Assume GOT offset < 4k. This is handled the same way
1830 in both 31- and 64-bit code (@GOT12). */
1832 if (reload_in_progress || reload_completed)
1833 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1835 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1836 new = gen_rtx_CONST (Pmode, new);
1837 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1838 new = gen_rtx_MEM (Pmode, new);
1839 RTX_UNCHANGING_P (new) = 1;
1840 emit_move_insn (reg, new);
1843 else if (TARGET_64BIT)
1845 /* If the GOT offset might be >= 4k, we determine the position
1846 of the GOT entry via a PC-relative LARL (@GOTENT). */
1848 rtx temp = gen_reg_rtx (Pmode);
1850 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1851 new = gen_rtx_CONST (Pmode, new);
1852 emit_move_insn (temp, new);
1854 new = gen_rtx_MEM (Pmode, temp);
1855 RTX_UNCHANGING_P (new) = 1;
1856 emit_move_insn (reg, new);
1861 /* If the GOT offset might be >= 4k, we have to load it
1862 from the literal pool (@GOT). */
1864 rtx temp = gen_reg_rtx (Pmode);
1866 if (reload_in_progress || reload_completed)
1867 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1869 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
1870 addr = gen_rtx_CONST (SImode, addr);
1871 addr = force_const_mem (SImode, addr);
1872 emit_move_insn (temp, addr);
1874 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
1875 new = gen_rtx_MEM (Pmode, new);
1876 RTX_UNCHANGING_P (new) = 1;
1877 emit_move_insn (reg, new);
1883 if (GET_CODE (addr) == CONST)
1885 addr = XEXP (addr, 0);
1886 if (GET_CODE (addr) == UNSPEC)
1888 if (XVECLEN (addr, 0) != 1)
1890 switch (XINT (addr, 1))
1892 /* If someone moved an @GOT or lt-relative UNSPEC
1893 out of the literal pool, force them back in. */
1897 new = force_const_mem (SImode, orig);
1900 /* @GOTENT is OK as is. */
1904 /* @PLT is OK as is on 64-bit, must be converted to
1905 lt-relative PLT on 31-bit. */
1909 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1911 addr = XVECEXP (addr, 0, 0);
1912 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
1913 addr = gen_rtx_CONST (SImode, addr);
1914 addr = force_const_mem (SImode, addr);
1915 emit_move_insn (temp, addr);
1917 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1918 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1919 new = gen_rtx_PLUS (Pmode, base, temp);
1923 emit_move_insn (reg, new);
1929 /* Everything else cannot happen. */
1934 else if (GET_CODE (addr) != PLUS)
1937 if (GET_CODE (addr) == PLUS)
1939 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
1940 /* Check first to see if this is a constant offset
1941 from a local symbol reference. */
1942 if ((GET_CODE (op0) == LABEL_REF
1943 || (GET_CODE (op0) == SYMBOL_REF
1944 && (SYMBOL_REF_FLAG (op0)
1945 || CONSTANT_POOL_ADDRESS_P (op0))))
1946 && GET_CODE (op1) == CONST_INT)
1950 if (INTVAL (op1) & 1)
1952 /* LARL can't handle odd offsets, so emit a
1953 pair of LARL and LA. */
1954 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1956 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
1958 int even = INTVAL (op1) - 1;
1959 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
1960 op0 = gen_rtx_CONST (Pmode, op0);
1964 emit_move_insn (temp, op0);
1965 new = gen_rtx_PLUS (Pmode, temp, op1);
1969 emit_move_insn (reg, new);
1975 /* If the offset is even, we can just use LARL.
1976 This will happen automatically. */
1981 /* Access local symbols relative to the literal pool. */
1983 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1985 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
1986 addr = gen_rtx_PLUS (SImode, addr, op1);
1987 addr = gen_rtx_CONST (SImode, addr);
1988 addr = force_const_mem (SImode, addr);
1989 emit_move_insn (temp, addr);
1991 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1992 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1993 new = gen_rtx_PLUS (Pmode, base, temp);
1997 emit_move_insn (reg, new);
2003 /* Now, check whether it is an LT-relative symbol plus offset
2004 that was pulled out of the literal pool. Force it back in. */
2006 else if (GET_CODE (op0) == UNSPEC
2007 && GET_CODE (op1) == CONST_INT)
2009 if (XVECLEN (op0, 0) != 1)
2011 if (XINT (op0, 1) != 100)
2014 new = force_const_mem (SImode, orig);
2017 /* Otherwise, compute the sum. */
2020 base = legitimize_pic_address (XEXP (addr, 0), reg);
2021 new = legitimize_pic_address (XEXP (addr, 1),
2022 base == reg ? NULL_RTX : reg);
2023 if (GET_CODE (new) == CONST_INT)
2024 new = plus_constant (base, INTVAL (new));
2027 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2029 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2030 new = XEXP (new, 1);
2032 new = gen_rtx_PLUS (Pmode, base, new);
2035 if (GET_CODE (new) == CONST)
2036 new = XEXP (new, 0);
2037 new = force_operand (new, 0);
2044 /* Emit insns to move operands[1] into operands[0]. */
2047 emit_pic_move (operands, mode)
2049 enum machine_mode mode ATTRIBUTE_UNUSED;
2051 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2053 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2054 operands[1] = force_reg (Pmode, operands[1]);
2056 operands[1] = legitimize_pic_address (operands[1], temp);
2059 /* Try machine-dependent ways of modifying an illegitimate address X
2060 to be legitimate. If we find one, return the new, valid address.
2062 OLDX is the address as it was before break_out_memory_refs was called.
2063 In some cases it is useful to look at this to decide what needs to be done.
2065 MODE is the mode of the operand pointed to by X.
2067 When -fpic is used, special handling is needed for symbolic references.
2068 See comments by legitimize_pic_address for details. */
2071 legitimize_address (x, oldx, mode)
2073 register rtx oldx ATTRIBUTE_UNUSED;
2074 enum machine_mode mode ATTRIBUTE_UNUSED;
2076 rtx constant_term = const0_rtx;
2080 if (SYMBOLIC_CONST (x)
2081 || (GET_CODE (x) == PLUS
2082 && (SYMBOLIC_CONST (XEXP (x, 0))
2083 || SYMBOLIC_CONST (XEXP (x, 1)))))
2084 x = legitimize_pic_address (x, 0);
2086 if (legitimate_address_p (mode, x, FALSE))
2090 x = eliminate_constant_term (x, &constant_term);
2092 /* Optimize loading of large displacements by splitting them
2093 into the multiple of 4K and the rest; this allows the
2094 former to be CSE'd if possible.
2096 Don't do this if the displacement is added to a register
2097 pointing into the stack frame, as the offsets will
2098 change later anyway. */
2100 if (GET_CODE (constant_term) == CONST_INT
2101 && (INTVAL (constant_term) < 0
2102 || INTVAL (constant_term) >= 4096)
2103 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2105 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2106 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2108 rtx temp = gen_reg_rtx (Pmode);
2109 rtx val = force_operand (GEN_INT (upper), temp);
2111 emit_move_insn (temp, val);
2113 x = gen_rtx_PLUS (Pmode, x, temp);
2114 constant_term = GEN_INT (lower);
2117 if (GET_CODE (x) == PLUS)
2119 if (GET_CODE (XEXP (x, 0)) == REG)
2121 register rtx temp = gen_reg_rtx (Pmode);
2122 register rtx val = force_operand (XEXP (x, 1), temp);
2124 emit_move_insn (temp, val);
2126 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2129 else if (GET_CODE (XEXP (x, 1)) == REG)
2131 register rtx temp = gen_reg_rtx (Pmode);
2132 register rtx val = force_operand (XEXP (x, 0), temp);
2134 emit_move_insn (temp, val);
2136 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2140 if (constant_term != const0_rtx)
2141 x = gen_rtx_PLUS (Pmode, x, constant_term);
2146 /* Emit code to move LEN bytes from DST to SRC. */
2149 s390_expand_movstr (dst, src, len)
2154 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2155 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2156 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2157 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2160 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2162 if (INTVAL (len) > 0)
2163 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2166 else if (TARGET_MVCLE)
2168 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2169 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2170 rtx reg0 = gen_reg_rtx (double_mode);
2171 rtx reg1 = gen_reg_rtx (double_mode);
2173 emit_move_insn (gen_highpart (single_mode, reg0),
2174 force_operand (XEXP (dst, 0), NULL_RTX));
2175 emit_move_insn (gen_highpart (single_mode, reg1),
2176 force_operand (XEXP (src, 0), NULL_RTX));
2178 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2179 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2181 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2186 rtx dst_addr, src_addr, count, blocks, temp;
2187 rtx end_label = gen_label_rtx ();
2188 enum machine_mode mode;
2191 mode = GET_MODE (len);
2192 if (mode == VOIDmode)
2195 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2199 dst_addr = gen_reg_rtx (Pmode);
2200 src_addr = gen_reg_rtx (Pmode);
2201 count = gen_reg_rtx (mode);
2202 blocks = gen_reg_rtx (mode);
2204 convert_move (count, len, 1);
2205 emit_cmp_and_jump_insns (count, const0_rtx,
2206 EQ, NULL_RTX, mode, 1, end_label);
2208 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2209 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2210 dst = change_address (dst, VOIDmode, dst_addr);
2211 src = change_address (src, VOIDmode, src_addr);
2213 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2215 emit_move_insn (count, temp);
2217 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2219 emit_move_insn (blocks, temp);
2221 expand_start_loop (1);
2222 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2223 make_tree (type, blocks),
2224 make_tree (type, const0_rtx)));
2226 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2227 s390_load_address (dst_addr,
2228 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2229 s390_load_address (src_addr,
2230 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2232 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2234 emit_move_insn (blocks, temp);
2238 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2239 emit_label (end_label);
2243 /* Emit code to clear LEN bytes at DST. */
2246 s390_expand_clrstr (dst, len)
2250 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2251 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2252 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2253 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2256 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2258 if (INTVAL (len) > 0)
2259 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2262 else if (TARGET_MVCLE)
2264 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2265 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2266 rtx reg0 = gen_reg_rtx (double_mode);
2267 rtx reg1 = gen_reg_rtx (double_mode);
2269 emit_move_insn (gen_highpart (single_mode, reg0),
2270 force_operand (XEXP (dst, 0), NULL_RTX));
2271 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2273 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2274 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2276 emit_insn ((*gen_long) (reg0, reg1, reg0));
2281 rtx dst_addr, src_addr, count, blocks, temp;
2282 rtx end_label = gen_label_rtx ();
2283 enum machine_mode mode;
2286 mode = GET_MODE (len);
2287 if (mode == VOIDmode)
2290 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2294 dst_addr = gen_reg_rtx (Pmode);
2295 src_addr = gen_reg_rtx (Pmode);
2296 count = gen_reg_rtx (mode);
2297 blocks = gen_reg_rtx (mode);
2299 convert_move (count, len, 1);
2300 emit_cmp_and_jump_insns (count, const0_rtx,
2301 EQ, NULL_RTX, mode, 1, end_label);
2303 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2304 dst = change_address (dst, VOIDmode, dst_addr);
2306 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2308 emit_move_insn (count, temp);
2310 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2312 emit_move_insn (blocks, temp);
2314 expand_start_loop (1);
2315 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2316 make_tree (type, blocks),
2317 make_tree (type, const0_rtx)));
2319 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2320 s390_load_address (dst_addr,
2321 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2323 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2325 emit_move_insn (blocks, temp);
2329 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2330 emit_label (end_label);
2334 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2335 and return the result in TARGET. */
2338 s390_expand_cmpstr (target, op0, op1, len)
2344 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2345 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2346 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2347 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2348 rtx (*gen_result) PARAMS ((rtx)) =
2349 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2351 op0 = protect_from_queue (op0, 0);
2352 op1 = protect_from_queue (op1, 0);
2353 len = protect_from_queue (len, 0);
2355 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2357 if (INTVAL (len) > 0)
2359 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2360 emit_insn ((*gen_result) (target));
2363 emit_move_insn (target, const0_rtx);
2366 else if (TARGET_MVCLE)
2368 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2369 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2370 rtx reg0 = gen_reg_rtx (double_mode);
2371 rtx reg1 = gen_reg_rtx (double_mode);
2373 emit_move_insn (gen_highpart (single_mode, reg0),
2374 force_operand (XEXP (op0, 0), NULL_RTX));
2375 emit_move_insn (gen_highpart (single_mode, reg1),
2376 force_operand (XEXP (op1, 0), NULL_RTX));
2378 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2379 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2381 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2382 emit_insn ((*gen_result) (target));
2387 rtx addr0, addr1, count, blocks, temp;
2388 rtx end_label = gen_label_rtx ();
2389 enum machine_mode mode;
2392 mode = GET_MODE (len);
2393 if (mode == VOIDmode)
2396 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2400 addr0 = gen_reg_rtx (Pmode);
2401 addr1 = gen_reg_rtx (Pmode);
2402 count = gen_reg_rtx (mode);
2403 blocks = gen_reg_rtx (mode);
2405 convert_move (count, len, 1);
2406 emit_cmp_and_jump_insns (count, const0_rtx,
2407 EQ, NULL_RTX, mode, 1, end_label);
2409 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2410 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2411 op0 = change_address (op0, VOIDmode, addr0);
2412 op1 = change_address (op1, VOIDmode, addr1);
2414 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2416 emit_move_insn (count, temp);
2418 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2420 emit_move_insn (blocks, temp);
2422 expand_start_loop (1);
2423 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2424 make_tree (type, blocks),
2425 make_tree (type, const0_rtx)));
2427 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2428 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2429 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2430 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2431 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2432 emit_jump_insn (temp);
2434 s390_load_address (addr0,
2435 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2436 s390_load_address (addr1,
2437 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2439 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2441 emit_move_insn (blocks, temp);
2445 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2446 emit_label (end_label);
2448 emit_insn ((*gen_result) (target));
2452 /* In the name of slightly smaller debug output, and to cater to
2453 general assembler losage, recognize various UNSPEC sequences
2454 and turn them back into a direct symbol reference. */
2457 s390_simplify_dwarf_addr (orig_x)
2462 if (GET_CODE (x) != MEM)
2466 if (GET_CODE (x) == PLUS
2467 && GET_CODE (XEXP (x, 1)) == CONST
2468 && GET_CODE (XEXP (x, 0)) == REG
2469 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2471 y = XEXP (XEXP (x, 1), 0);
2472 if (GET_CODE (y) == UNSPEC
2473 && XINT (y, 1) == 110)
2474 return XVECEXP (y, 0, 0);
2478 if (GET_CODE (x) == CONST)
2481 if (GET_CODE (y) == UNSPEC
2482 && XINT (y, 1) == 111)
2483 return XVECEXP (y, 0, 0);
2490 /* Output symbolic constant X in assembler syntax to
2491 stdio stream FILE. */
2494 s390_output_symbolic_const (file, x)
2498 switch (GET_CODE (x))
2503 s390_output_symbolic_const (file, XEXP (x, 0));
2507 s390_output_symbolic_const (file, XEXP (x, 0));
2508 fprintf (file, "+");
2509 s390_output_symbolic_const (file, XEXP (x, 1));
2513 s390_output_symbolic_const (file, XEXP (x, 0));
2514 fprintf (file, "-");
2515 s390_output_symbolic_const (file, XEXP (x, 1));
2522 output_addr_const (file, x);
2526 if (XVECLEN (x, 0) != 1)
2527 output_operand_lossage ("invalid UNSPEC as operand (1)");
2528 switch (XINT (x, 1))
2531 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2532 fprintf (file, "-.LT%d", current_function_funcdef_no);
2535 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2536 fprintf (file, "@GOT12");
2539 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2540 fprintf (file, "@GOTENT");
2543 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2544 fprintf (file, "@GOT");
2547 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2548 fprintf (file, "@PLT");
2551 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2552 fprintf (file, "@PLT-.LT%d", current_function_funcdef_no);
2555 output_operand_lossage ("invalid UNSPEC as operand (2)");
2561 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2566 /* Output address operand ADDR in assembler syntax to
2567 stdio stream FILE. */
2570 print_operand_address (file, addr)
2574 struct s390_address ad;
2576 if (!s390_decompose_address (addr, &ad)
2577 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2578 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2579 output_operand_lossage ("Cannot decompose address.");
2582 s390_output_symbolic_const (file, ad.disp);
2584 fprintf (file, "0");
2586 if (ad.base && ad.indx)
2587 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2588 reg_names[REGNO (ad.base)]);
2590 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2593 /* Output operand X in assembler syntax to stdio stream FILE.
2594 CODE specified the format flag. The following format flags
2597 'C': print opcode suffix for branch condition.
2598 'D': print opcode suffix for inverse branch condition.
2599 'O': print only the displacement of a memory reference.
2600 'R': print only the base register of a memory reference.
2601 'N': print the second word of a DImode operand.
2602 'M': print the second word of a TImode operand.
2604 'b': print integer X as if it's an unsigned byte.
2605 'x': print integer X as if it's an unsigned word.
2606 'h': print integer X as if it's a signed word. */
2609 print_operand (file, x, code)
2617 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2621 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2626 struct s390_address ad;
2628 if (GET_CODE (x) != MEM
2629 || !s390_decompose_address (XEXP (x, 0), &ad)
2630 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2635 s390_output_symbolic_const (file, ad.disp);
2637 fprintf (file, "0");
2643 struct s390_address ad;
2645 if (GET_CODE (x) != MEM
2646 || !s390_decompose_address (XEXP (x, 0), &ad)
2647 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2652 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2654 fprintf (file, "0");
2659 if (GET_CODE (x) == REG)
2660 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2661 else if (GET_CODE (x) == MEM)
2662 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2668 if (GET_CODE (x) == REG)
2669 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2670 else if (GET_CODE (x) == MEM)
2671 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2677 switch (GET_CODE (x))
2680 fprintf (file, "%s", reg_names[REGNO (x)]);
2684 output_address (XEXP (x, 0));
2691 s390_output_symbolic_const (file, x);
2696 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2697 else if (code == 'x')
2698 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2699 else if (code == 'h')
2700 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2702 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2706 if (GET_MODE (x) != VOIDmode)
2709 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2710 else if (code == 'x')
2711 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2712 else if (code == 'h')
2713 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2719 fatal_insn ("UNKNOWN in print_operand !?", x);
2724 /* Target hook for assembling integer objects. We need to define it
2725 here to work a round a bug in some versions of GAS, which couldn't
2726 handle values smaller than INT_MIN when printed in decimal. */
2729 s390_assemble_integer (x, size, aligned_p)
2734 if (size == 8 && aligned_p
2735 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2737 fputs ("\t.quad\t", asm_out_file);
2738 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2739 putc ('\n', asm_out_file);
2742 return default_assemble_integer (x, size, aligned_p);
2746 #define DEBUG_SCHED 0
2748 /* Returns true if register REGNO is used for forming
2749 a memory address in expression X. */
2752 reg_used_in_mem_p (regno, x)
2756 enum rtx_code code = GET_CODE (x);
2762 if (refers_to_regno_p (regno, regno+1,
2766 else if (code == SET
2767 && GET_CODE (SET_DEST (x)) == PC)
2769 if (refers_to_regno_p (regno, regno+1,
2774 fmt = GET_RTX_FORMAT (code);
2775 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2778 && reg_used_in_mem_p (regno, XEXP (x, i)))
2781 else if (fmt[i] == 'E')
2782 for (j = 0; j < XVECLEN (x, i); j++)
2783 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2789 /* Returns true if expression DEP_RTX sets an address register
2790 used by instruction INSN to address memory. */
2793 addr_generation_dependency_p (dep_rtx, insn)
2799 if (GET_CODE (dep_rtx) == SET)
2801 target = SET_DEST (dep_rtx);
2803 if (GET_CODE (target) == REG)
2805 int regno = REGNO (target);
2807 if (get_attr_type (insn) == TYPE_LA)
2809 pat = PATTERN (insn);
2810 if (GET_CODE (pat) == PARALLEL)
2812 if (XVECLEN (pat, 0) != 2)
2814 pat = XVECEXP (pat, 0, 0);
2816 if (GET_CODE (pat) == SET)
2817 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2821 else if (get_attr_atype (insn) == ATYPE_MEM)
2822 return reg_used_in_mem_p (regno, PATTERN (insn));
2829 /* Return the modified cost of the dependency of instruction INSN
2830 on instruction DEP_INSN through the link LINK. COST is the
2831 default cost of that dependency.
2833 Data dependencies are all handled without delay. However, if a
2834 register is modified and subsequently used as base or index
2835 register of a memory reference, at least 4 cycles need to pass
2836 between setting and using the register to avoid pipeline stalls.
2837 An exception is the LA instruction. An address generated by LA can
2838 be used by introducing only a one cycle stall on the pipeline. */
2841 s390_adjust_cost (insn, link, dep_insn, cost)
2850 /* If the dependence is an anti-dependence, there is no cost. For an
2851 output dependence, there is sometimes a cost, but it doesn't seem
2852 worth handling those few cases. */
2854 if (REG_NOTE_KIND (link) != 0)
2857 /* If we can't recognize the insns, we can't really do anything. */
2858 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
2861 dep_rtx = PATTERN (dep_insn);
2863 if (GET_CODE (dep_rtx) == SET)
2865 if (addr_generation_dependency_p (dep_rtx, insn))
2867 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2870 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
2872 debug_rtx (dep_insn);
2877 else if (GET_CODE (dep_rtx) == PARALLEL)
2879 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
2881 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
2884 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
2887 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
2889 debug_rtx (dep_insn);
2900 /* A C statement (sans semicolon) to update the integer scheduling priority
2901 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
2902 increase the priority to execute INSN later. Do not define this macro if
2903 you do not need to adjust the scheduling priorities of insns.
2905 A LA instruction maybe scheduled later, since the pipeline bypasses the
2906 calculated value. */
2909 s390_adjust_priority (insn, priority)
2910 rtx insn ATTRIBUTE_UNUSED;
2913 if (! INSN_P (insn))
2916 if (GET_CODE (PATTERN (insn)) == USE
2917 || GET_CODE (PATTERN (insn)) == CLOBBER)
2920 switch (get_attr_type (insn))
2926 if (priority >= 0 && priority < 0x01000000)
2930 /* LM in epilogue should never be scheduled. This
2931 is due to literal access done in function body.
2932 The usage of register 13 is not mentioned explicitly,
2933 leading to scheduling 'LM' accross this instructions.
2935 priority = 0x7fffffff;
2943 /* Split all branches that exceed the maximum distance. */
2946 s390_split_branches ()
2948 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
2949 rtx insn, pat, tmp, target;
2952 /* We need correct insn addresses. */
2954 shorten_branches (get_insns ());
2956 /* Find all branches that exceed 64KB, and split them. */
2958 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2960 if (GET_CODE (insn) != JUMP_INSN)
2963 pat = PATTERN (insn);
2964 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
2965 pat = XVECEXP (pat, 0, 0);
2966 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
2969 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
2971 label = &SET_SRC (pat);
2973 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
2975 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
2976 label = &XEXP (SET_SRC (pat), 1);
2977 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
2978 label = &XEXP (SET_SRC (pat), 2);
2985 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
2988 regs_ever_live[RETURN_REGNUM] = 1;
2992 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
2993 INSN_ADDRESSES_NEW (tmp, -1);
2999 tmp = force_const_mem (Pmode, *label);
3000 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3001 INSN_ADDRESSES_NEW (tmp, -1);
3007 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 100);
3008 tmp = gen_rtx_CONST (SImode, tmp);
3009 tmp = force_const_mem (SImode, tmp);
3010 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3011 INSN_ADDRESSES_NEW (tmp, -1);
3013 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3014 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3017 if (!validate_change (insn, label, target, 0))
3023 /* Find a literal pool symbol referenced in RTX X, and store
3024 it at REF. Will abort if X contains references to more than
3025 one such pool symbol; multiple references to the same symbol
3026 are allowed, however.
3028 The rtx pointed to by REF must be initialized to NULL_RTX
3029 by the caller before calling this routine. */
3032 find_constant_pool_ref (x, ref)
3039 if (GET_CODE (x) == SYMBOL_REF
3040 && CONSTANT_POOL_ADDRESS_P (x))
3042 if (*ref == NULL_RTX)
3048 fmt = GET_RTX_FORMAT (GET_CODE (x));
3049 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3053 find_constant_pool_ref (XEXP (x, i), ref);
3055 else if (fmt[i] == 'E')
3057 for (j = 0; j < XVECLEN (x, i); j++)
3058 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3063 /* Replace every reference to the literal pool symbol REF
3064 in X by the address ADDR. Fix up MEMs as required. */
3067 replace_constant_pool_ref (x, ref, addr)
3078 /* Literal pool references can only occur inside a MEM ... */
3079 if (GET_CODE (*x) == MEM)
3081 rtx memref = XEXP (*x, 0);
3085 *x = replace_equiv_address (*x, addr);
3089 if (GET_CODE (memref) == CONST
3090 && GET_CODE (XEXP (memref, 0)) == PLUS
3091 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3092 && XEXP (XEXP (memref, 0), 0) == ref)
3094 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3095 *x = replace_equiv_address (*x, plus_constant (addr, off));
3100 /* ... or a load-address type pattern. */
3101 if (GET_CODE (*x) == SET)
3103 rtx addrref = SET_SRC (*x);
3107 SET_SRC (*x) = addr;
3111 if (GET_CODE (addrref) == CONST
3112 && GET_CODE (XEXP (addrref, 0)) == PLUS
3113 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3114 && XEXP (XEXP (addrref, 0), 0) == ref)
3116 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3117 SET_SRC (*x) = plus_constant (addr, off);
3122 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3123 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3127 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3129 else if (fmt[i] == 'E')
3131 for (j = 0; j < XVECLEN (*x, i); j++)
3132 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3137 /* We keep a list of constants we which we have to add to internal
3138 constant tables in the middle of large functions. */
3140 #define NR_C_MODES 6
3141 enum machine_mode constant_modes[NR_C_MODES] =
3149 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3151 gen_consttable_df, gen_consttable_di,
3152 gen_consttable_sf, gen_consttable_si,
3159 struct constant *next;
3164 struct constant_pool
3166 struct constant_pool *next;
3170 struct constant *constants[NR_C_MODES];
3175 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3176 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3177 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3178 static rtx s390_add_pool PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3179 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3180 static void s390_free_pool PARAMS ((struct constant_pool *));
3182 /* Create new constant pool covering instructions starting at INSN
3183 and chain it to the end of POOL_LIST. */
3185 static struct constant_pool *
3186 s390_start_pool (pool_list, insn)
3187 struct constant_pool **pool_list;
3190 struct constant_pool *pool, **prev;
3193 pool = (struct constant_pool *) xmalloc (sizeof *pool);
3195 for (i = 0; i < NR_C_MODES; i++)
3196 pool->constants[i] = NULL;
3198 pool->label = gen_label_rtx ();
3199 pool->first_insn = insn;
3200 pool->last_insn = NULL_RTX;
3203 for (prev = pool_list; *prev; prev = &(*prev)->next)
3210 /* End range of instructions covered by POOL at INSN. */
3213 s390_end_pool (pool, insn)
3214 struct constant_pool *pool;
3217 pool->last_insn = insn;
3220 /* Return pool out of POOL_LIST that covers INSN. */
3222 static struct constant_pool *
3223 s390_find_pool (pool_list, insn)
3224 struct constant_pool *pool_list;
3227 int addr = INSN_ADDRESSES (INSN_UID (insn));
3228 struct constant_pool *pool;
3233 for (pool = pool_list; pool; pool = pool->next)
3234 if (INSN_ADDRESSES (INSN_UID (pool->first_insn)) <= addr
3235 && (pool->last_insn == NULL_RTX
3236 || INSN_ADDRESSES (INSN_UID (pool->last_insn)) > addr))
3242 /* Add constant VAL of mode MODE to the constant pool POOL.
3243 Return an RTX describing the distance from the start of
3244 the pool to the location of the new constant. */
3247 s390_add_pool (pool, val, mode)
3248 struct constant_pool *pool;
3250 enum machine_mode mode;
3256 for (i = 0; i < NR_C_MODES; i++)
3257 if (constant_modes[i] == mode)
3259 if (i == NR_C_MODES)
3262 for (c = pool->constants[i]; c != NULL; c = c->next)
3263 if (rtx_equal_p (val, c->value))
3268 c = (struct constant *) xmalloc (sizeof *c);
3270 c->label = gen_label_rtx ();
3271 c->next = pool->constants[i];
3272 pool->constants[i] = c;
3273 pool->size += GET_MODE_SIZE (mode);
3276 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
3277 gen_rtx_LABEL_REF (Pmode, pool->label));
3278 offset = gen_rtx_CONST (Pmode, offset);
3282 /* Dump out the constants in POOL. */
3285 s390_dump_pool (pool)
3286 struct constant_pool *pool;
3292 /* Select location to put literal pool. */
3294 insn = get_last_insn ();
3296 insn = pool->last_insn? pool->last_insn : get_last_insn ();
3298 /* Pool start insn switches to proper section
3299 and guarantees necessary alignment. */
3301 insn = emit_insn_after (gen_pool_start_64 (), insn);
3303 insn = emit_insn_after (gen_pool_start_31 (), insn);
3304 INSN_ADDRESSES_NEW (insn, -1);
3306 insn = emit_label_after (pool->label, insn);
3307 INSN_ADDRESSES_NEW (insn, -1);
3309 /* Dump constants in descending alignment requirement order,
3310 ensuring proper alignment for every constant. */
3311 for (i = 0; i < NR_C_MODES; i++)
3312 for (c = pool->constants[i]; c; c = c->next)
3314 insn = emit_label_after (c->label, insn);
3315 INSN_ADDRESSES_NEW (insn, -1);
3316 insn = emit_insn_after (gen_consttable[i] (c->value), insn);
3317 INSN_ADDRESSES_NEW (insn, -1);
3320 /* Pool end insn switches back to previous section
3321 and guarantees necessary alignment. */
3323 insn = emit_insn_after (gen_pool_end_64 (), insn);
3325 insn = emit_insn_after (gen_pool_end_31 (), insn);
3326 INSN_ADDRESSES_NEW (insn, -1);
3328 insn = emit_barrier_after (insn);
3329 INSN_ADDRESSES_NEW (insn, -1);
3334 /* Free all memory used by POOL. */
3337 s390_free_pool (pool)
3338 struct constant_pool *pool;
3342 for (i = 0; i < NR_C_MODES; i++)
3344 struct constant *c = pool->constants[i];
3347 struct constant *next = c->next;
3356 /* Used in s390.md for branch length calculation. */
3357 int s390_pool_overflow = 0;
3359 /* Chunkify the literal pool if required. */
3361 #define S390_POOL_CHUNK_MIN 0xc00
3362 #define S390_POOL_CHUNK_MAX 0xe00
3365 s390_chunkify_pool ()
3367 rtx base_reg = gen_rtx_REG (Pmode,
3368 TARGET_64BIT? BASE_REGISTER : RETURN_REGNUM);
3370 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3375 /* Do we need to chunkify the literal pool? */
3377 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3381 regs_ever_live[RETURN_REGNUM] = 1;
3383 /* We need correct insn addresses. */
3385 shorten_branches (get_insns ());
3388 /* Scan all insns and move literals to pool chunks.
3389 Replace all occurrances of literal pool references
3390 by explicit references to pool chunk entries. */
3392 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3394 if (GET_CODE (insn) == INSN)
3396 rtx addr, pool_ref = NULL_RTX;
3397 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3401 curr_pool = s390_start_pool (&pool_list, insn);
3403 addr = s390_add_pool (curr_pool, get_pool_constant (pool_ref),
3404 get_pool_mode (pool_ref));
3406 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
3407 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
3408 INSN_CODE (insn) = -1;
3413 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3414 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3419 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3422 s390_end_pool (curr_pool, insn);
3427 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3428 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3431 /* We will later have to insert base register reload insns.
3432 Those will have an effect on code size, which we need to
3433 consider here. This calculation makes rather pessimistic
3434 worst-case assumptions. */
3435 if (GET_CODE (insn) == CODE_LABEL
3436 || GET_CODE (insn) == JUMP_INSN)
3438 else if (GET_CODE (insn) == CALL_INSN)
3441 if (chunk_size < S390_POOL_CHUNK_MIN
3442 && curr_pool->size < S390_POOL_CHUNK_MIN)
3445 /* Pool chunks can only be inserted after BARRIERs ... */
3446 if (GET_CODE (insn) == BARRIER)
3448 s390_end_pool (curr_pool, insn);
3453 /* ... so if we don't find one in time, create one. */
3454 else if ((chunk_size > S390_POOL_CHUNK_MAX
3455 || curr_pool->size > S390_POOL_CHUNK_MAX)
3456 && (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN))
3458 int addr = INSN_ADDRESSES (INSN_UID (insn));
3459 rtx label, jump, barrier;
3461 label = gen_label_rtx ();
3462 jump = emit_jump_insn_after (gen_jump (label), insn);
3463 barrier = emit_barrier_after (jump);
3464 insn = emit_label_after (label, barrier);
3465 JUMP_LABEL (jump) = label;
3466 LABEL_NUSES (label) = 1;
3468 INSN_ADDRESSES_NEW (jump, addr+1);
3469 INSN_ADDRESSES_NEW (barrier, addr+1);
3470 INSN_ADDRESSES_NEW (insn, -1);
3472 s390_end_pool (curr_pool, barrier);
3479 /* Dump out all literal pools. */
3481 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3482 s390_dump_pool (curr_pool);
3485 /* Find all labels that are branched into
3486 from an insn belonging to a different chunk. */
3488 far_labels = BITMAP_XMALLOC ();
3490 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3492 /* Labels marked with LABEL_PRESERVE_P can be target
3493 of non-local jumps, so we have to mark them.
3494 The same holds for named labels.
3496 Don't do that, however, if it is the label before
3499 if (GET_CODE (insn) == CODE_LABEL
3500 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3502 rtx vec_insn = next_real_insn (insn);
3503 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3504 PATTERN (vec_insn) : NULL_RTX;
3506 || !(GET_CODE (vec_pat) == ADDR_VEC
3507 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3508 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3511 /* If we have a direct jump (conditional or unconditional)
3512 or a casesi jump, check all potential targets. */
3513 else if (GET_CODE (insn) == JUMP_INSN)
3515 rtx pat = PATTERN (insn);
3516 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3517 pat = XVECEXP (pat, 0, 0);
3519 if (GET_CODE (pat) == SET)
3523 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3525 label = XEXP (SET_SRC (pat), 0);
3527 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3529 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3530 label = XEXP (XEXP (SET_SRC (pat), 1), 0);
3531 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3532 label = XEXP (XEXP (SET_SRC (pat), 2), 0);
3537 if (s390_find_pool (pool_list, label)
3538 != s390_find_pool (pool_list, insn))
3539 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3542 else if (GET_CODE (pat) == PARALLEL
3543 && XVECLEN (pat, 0) == 2
3544 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3545 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3546 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3548 /* Find the jump table used by this casesi jump. */
3549 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3550 rtx vec_insn = next_real_insn (vec_label);
3551 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3552 PATTERN (vec_insn) : NULL_RTX;
3554 && (GET_CODE (vec_pat) == ADDR_VEC
3555 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3557 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3559 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3561 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3563 if (s390_find_pool (pool_list, label)
3564 != s390_find_pool (pool_list, insn))
3565 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3572 /* Insert base register reload insns before every pool. */
3574 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3577 rtx pool_ref = gen_rtx_LABEL_REF (Pmode, curr_pool->label);
3578 rtx new_insn = gen_rtx_SET (Pmode, base_reg, pool_ref);
3579 rtx insn = curr_pool->first_insn;
3580 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3584 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3585 rtx insn = curr_pool->first_insn;
3586 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3589 /* Insert base register reload insns at every far label. */
3591 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3592 if (GET_CODE (insn) == CODE_LABEL
3593 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3595 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3600 rtx pool_ref = gen_rtx_LABEL_REF (Pmode, pool->label);
3601 rtx new_insn = gen_rtx_SET (Pmode, base_reg, pool_ref);
3602 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3606 rtx new_insn = gen_reload_base (base_reg, pool->label);
3607 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3612 /* Insert base register reload insns after every call if necessary. */
3614 if (REGNO (base_reg) == RETURN_REGNUM)
3615 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3616 if (GET_CODE (insn) == CALL_INSN)
3618 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3621 rtx new_insn = gen_reload_base2 (base_reg, pool->label);
3622 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3627 /* Recompute insn addresses. */
3629 s390_pool_overflow = 1;
3630 init_insn_lengths ();
3631 shorten_branches (get_insns ());
3632 s390_pool_overflow = 0;
3634 /* Insert base register reload insns after far branches. */
3637 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3638 if (GET_CODE (insn) == JUMP_INSN
3639 && GET_CODE (PATTERN (insn)) == SET
3640 && get_attr_length (insn) >= 12)
3642 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3645 rtx new_insn = gen_reload_base (base_reg, pool->label);
3646 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
3651 /* Free all memory. */
3655 struct constant_pool *next = pool_list->next;
3656 s390_free_pool (pool_list);
3660 BITMAP_XFREE (far_labels);
3664 /* Index of constant pool chunk that is currently being processed.
3665 Set to -1 before function output has started. */
3666 int s390_pool_count = -1;
3668 /* Number of elements of current constant pool. */
3669 int s390_nr_constants;
3671 /* Output main constant pool to stdio stream FILE. */
3674 s390_output_constant_pool (file)
3677 /* Output constant pool. */
3678 if (s390_nr_constants)
3682 fprintf (file, "\tlarl\t%s,.LT%d\n", reg_names[BASE_REGISTER],
3683 current_function_funcdef_no);
3684 readonly_data_section ();
3685 ASM_OUTPUT_ALIGN (file, 3);
3689 fprintf (file, "\tbras\t%s,.LTN%d\n", reg_names[BASE_REGISTER],
3690 current_function_funcdef_no);
3692 fprintf (file, ".LT%d:\n", current_function_funcdef_no);
3694 s390_pool_count = 0;
3695 output_constant_pool (current_function_name, current_function_decl);
3696 s390_pool_count = -1;
3699 function_section (current_function_decl);
3701 fprintf (file, ".LTN%d:\n", current_function_funcdef_no);
3706 /* Rework the prolog/epilog to avoid saving/restoring
3707 registers unnecessarily. */
3710 s390_optimize_prolog ()
3712 int save_first, save_last, restore_first, restore_last;
3714 rtx insn, new_insn, next_insn;
3716 /* Find first and last gpr to be saved. */
3718 for (i = 6; i < 16; i++)
3719 if (regs_ever_live[i])
3722 for (j = 15; j > i; j--)
3723 if (regs_ever_live[j])
3728 /* Nothing to save/restore. */
3729 save_first = restore_first = -1;
3730 save_last = restore_last = -1;
3734 /* Save/restore from i to j. */
3735 save_first = restore_first = i;
3736 save_last = restore_last = j;
3739 /* Varargs functions need to save gprs 2 to 6. */
3740 if (current_function_stdarg)
3748 /* If all special registers are in fact used, there's nothing we
3749 can do, so no point in walking the insn list. */
3750 if (i <= BASE_REGISTER && j >= BASE_REGISTER
3751 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
3755 /* Search for prolog/epilog insns and replace them. */
3757 for (insn = get_insns (); insn; insn = next_insn)
3759 int first, last, off;
3760 rtx set, base, offset;
3762 next_insn = NEXT_INSN (insn);
3764 if (GET_CODE (insn) != INSN)
3766 if (GET_CODE (PATTERN (insn)) != PARALLEL)
3769 if (store_multiple_operation (PATTERN (insn), VOIDmode))
3771 set = XVECEXP (PATTERN (insn), 0, 0);
3772 first = REGNO (SET_SRC (set));
3773 last = first + XVECLEN (PATTERN (insn), 0) - 1;
3774 offset = const0_rtx;
3775 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
3776 off = INTVAL (offset) - first * UNITS_PER_WORD;
3778 if (GET_CODE (base) != REG || off < 0)
3780 if (first > BASE_REGISTER && first > RETURN_REGNUM)
3782 if (last < BASE_REGISTER && last < RETURN_REGNUM)
3785 if (save_first != -1)
3787 new_insn = save_gprs (base, off, save_first, save_last);
3788 new_insn = emit_insn_before (new_insn, insn);
3789 INSN_ADDRESSES_NEW (new_insn, -1);
3795 if (load_multiple_operation (PATTERN (insn), VOIDmode))
3797 set = XVECEXP (PATTERN (insn), 0, 0);
3798 first = REGNO (SET_DEST (set));
3799 last = first + XVECLEN (PATTERN (insn), 0) - 1;
3800 offset = const0_rtx;
3801 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
3802 off = INTVAL (offset) - first * UNITS_PER_WORD;
3804 if (GET_CODE (base) != REG || off < 0)
3806 if (first > BASE_REGISTER && first > RETURN_REGNUM)
3808 if (last < BASE_REGISTER && last < RETURN_REGNUM)
3811 if (restore_first != -1)
3813 new_insn = restore_gprs (base, off, restore_first, restore_last);
3814 new_insn = emit_insn_before (new_insn, insn);
3815 INSN_ADDRESSES_NEW (new_insn, -1);
3823 /* Perform machine-dependent processing. */
3826 s390_machine_dependent_reorg (first)
3827 rtx first ATTRIBUTE_UNUSED;
3829 struct s390_frame frame;
3830 s390_frame_info (&frame);
3832 /* Recompute regs_ever_live data for special registers. */
3833 regs_ever_live[BASE_REGISTER] = 0;
3834 regs_ever_live[RETURN_REGNUM] = 0;
3835 regs_ever_live[STACK_POINTER_REGNUM] = frame.frame_size > 0;
3837 /* If there is (possibly) any pool entry, we need to
3838 load the base register.
3839 ??? FIXME: this should be more precise. */
3840 if (get_pool_size ())
3841 regs_ever_live[BASE_REGISTER] = 1;
3843 /* In non-leaf functions, the prolog/epilog code relies
3844 on RETURN_REGNUM being saved in any case. */
3845 if (!current_function_is_leaf)
3846 regs_ever_live[RETURN_REGNUM] = 1;
3848 s390_chunkify_pool ();
3849 s390_split_branches ();
3850 s390_optimize_prolog ();
3854 /* Find first call clobbered register unsused in a function.
3855 This could be used as base register in a leaf function
3856 or for holding the return address before epilogue. */
3859 find_unused_clobbered_reg ()
3862 for (i = 0; i < 6; i++)
3863 if (!regs_ever_live[i])
3868 /* Fill FRAME with info about frame of current function. */
3871 s390_frame_info (frame)
3872 struct s390_frame *frame;
3874 char gprs_ever_live[16];
3876 HOST_WIDE_INT fsize = get_frame_size ();
3878 if (fsize > 0x7fff0000)
3879 fatal_error ("Total size of local variables exceeds architecture limit.");
3881 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
3882 frame->save_fprs_p = 0;
3884 for (i = 24; i < 32; i++)
3885 if (regs_ever_live[i])
3887 frame->save_fprs_p = 1;
3891 frame->frame_size = fsize + frame->save_fprs_p * 64;
3893 /* Does function need to setup frame and save area. */
3895 if (! current_function_is_leaf
3896 || frame->frame_size > 0
3897 || current_function_calls_alloca
3898 || current_function_stdarg)
3899 frame->frame_size += STARTING_FRAME_OFFSET;
3901 /* Frame pointer needed. */
3903 frame->frame_pointer_p = frame_pointer_needed;
3905 /* Find first and last gpr to be saved. Note that at this point,
3906 we assume the return register and the base register always
3907 need to be saved. This is done because the usage of these
3908 register might change even after the prolog was emitted.
3909 If it turns out later that we really don't need them, the
3910 prolog/epilog code is modified again. */
3912 for (i = 0; i < 16; i++)
3913 gprs_ever_live[i] = regs_ever_live[i];
3915 gprs_ever_live[BASE_REGISTER] = 1;
3916 gprs_ever_live[RETURN_REGNUM] = 1;
3917 gprs_ever_live[STACK_POINTER_REGNUM] = frame->frame_size > 0;
3919 for (i = 6; i < 16; i++)
3920 if (gprs_ever_live[i])
3923 for (j = 15; j > i; j--)
3924 if (gprs_ever_live[j])
3928 /* Save / Restore from gpr i to j. */
3929 frame->first_save_gpr = i;
3930 frame->first_restore_gpr = i;
3931 frame->last_save_gpr = j;
3933 /* Varargs functions need to save gprs 2 to 6. */
3934 if (current_function_stdarg)
3935 frame->first_save_gpr = 2;
3938 /* Return offset between argument pointer and frame pointer
3939 initially after prologue. */
3942 s390_arg_frame_offset ()
3944 struct s390_frame frame;
3946 /* Compute frame_info. */
3948 s390_frame_info (&frame);
3950 return frame.frame_size + STACK_POINTER_OFFSET;
3953 /* Emit insn to save fpr REGNUM at offset OFFSET relative
3954 to register BASE. Return generated insn. */
3957 save_fpr (base, offset, regnum)
3963 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
3964 set_mem_alias_set (addr, s390_sr_alias_set);
3966 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
3969 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
3970 to register BASE. Return generated insn. */
3973 restore_fpr (base, offset, regnum)
3979 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
3980 set_mem_alias_set (addr, s390_sr_alias_set);
3982 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
3985 /* Generate insn to save registers FIRST to LAST into
3986 the register save area located at offset OFFSET
3987 relative to register BASE. */
3990 save_gprs (base, offset, first, last)
3996 rtx addr, insn, note;
3999 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4000 addr = gen_rtx_MEM (Pmode, addr);
4001 set_mem_alias_set (addr, s390_sr_alias_set);
4003 /* Special-case single register. */
4007 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
4009 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
4011 RTX_FRAME_RELATED_P (insn) = 1;
4016 insn = gen_store_multiple (addr,
4017 gen_rtx_REG (Pmode, first),
4018 GEN_INT (last - first + 1));
4021 /* We need to set the FRAME_RELATED flag on all SETs
4022 inside the store-multiple pattern.
4024 However, we must not emit DWARF records for registers 2..5
4025 if they are stored for use by variable arguments ...
4027 ??? Unfortunately, it is not enough to simply not the the
4028 FRAME_RELATED flags for those SETs, because the first SET
4029 of the PARALLEL is always treated as if it had the flag
4030 set, even if it does not. Therefore we emit a new pattern
4031 without those registers as REG_FRAME_RELATED_EXPR note. */
4035 rtx pat = PATTERN (insn);
4037 for (i = 0; i < XVECLEN (pat, 0); i++)
4038 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
4039 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
4041 RTX_FRAME_RELATED_P (insn) = 1;
4045 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
4046 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
4047 gen_rtx_REG (Pmode, 6),
4048 GEN_INT (last - 6 + 1));
4049 note = PATTERN (note);
4052 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4053 note, REG_NOTES (insn));
4055 for (i = 0; i < XVECLEN (note, 0); i++)
4056 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
4057 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
4059 RTX_FRAME_RELATED_P (insn) = 1;
4065 /* Generate insn to restore registers FIRST to LAST from
4066 the register save area located at offset OFFSET
4067 relative to register BASE. */
4070 restore_gprs (base, offset, first, last)
4078 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4079 addr = gen_rtx_MEM (Pmode, addr);
4080 set_mem_alias_set (addr, s390_sr_alias_set);
4082 /* Special-case single register. */
4086 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
4088 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
4093 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
4095 GEN_INT (last - first + 1));
4099 /* Expand the prologue into a bunch of separate insns. */
4102 s390_emit_prologue ()
4104 struct s390_frame frame;
4109 /* Compute frame_info. */
4111 s390_frame_info (&frame);
4113 /* Choose best register to use for temp use within prologue. */
4115 if (!current_function_is_leaf
4116 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
4117 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
4118 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4120 temp_reg = gen_rtx_REG (Pmode, 1);
4122 /* Save call saved gprs. */
4124 insn = save_gprs (stack_pointer_rtx, 0,
4125 frame.first_save_gpr, frame.last_save_gpr);
4128 /* Dump constant pool and set constant pool register (13). */
4130 insn = emit_insn (gen_lit ());
4132 /* Save fprs for variable args. */
4134 if (current_function_stdarg)
4136 /* Save fpr 0 and 2. */
4138 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
4139 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
4143 /* Save fpr 4 and 6. */
4145 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4146 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4150 /* Save fprs 4 and 6 if used (31 bit ABI). */
4154 /* Save fpr 4 and 6. */
4155 if (regs_ever_live[18])
4157 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4158 RTX_FRAME_RELATED_P (insn) = 1;
4160 if (regs_ever_live[19])
4162 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4163 RTX_FRAME_RELATED_P (insn) = 1;
4167 /* Decrement stack pointer. */
4169 if (frame.frame_size > 0)
4171 rtx frame_off = GEN_INT (-frame.frame_size);
4173 /* Save incoming stack pointer into temp reg. */
4175 if (TARGET_BACKCHAIN || frame.save_fprs_p)
4177 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
4180 /* Substract frame size from stack pointer. */
4182 frame_off = GEN_INT (-frame.frame_size);
4183 if (!CONST_OK_FOR_LETTER_P (-frame.frame_size, 'K'))
4184 frame_off = force_const_mem (Pmode, frame_off);
4186 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
4187 RTX_FRAME_RELATED_P (insn) = 1;
4189 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4190 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
4191 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4192 GEN_INT (-frame.frame_size))),
4195 /* Set backchain. */
4197 if (TARGET_BACKCHAIN)
4199 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4200 set_mem_alias_set (addr, s390_sr_alias_set);
4201 insn = emit_insn (gen_move_insn (addr, temp_reg));
4205 /* Save fprs 8 - 15 (64 bit ABI). */
4207 if (frame.save_fprs_p)
4209 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
4211 for (i = 24; i < 32; i++)
4212 if (regs_ever_live[i])
4214 rtx addr = plus_constant (stack_pointer_rtx,
4215 frame.frame_size - 64 + (i-24)*8);
4217 insn = save_fpr (temp_reg, (i-24)*8, i);
4218 RTX_FRAME_RELATED_P (insn) = 1;
4220 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4221 gen_rtx_SET (VOIDmode,
4222 gen_rtx_MEM (DFmode, addr),
4223 gen_rtx_REG (DFmode, i)),
4228 /* Set frame pointer, if needed. */
4230 if (frame.frame_pointer_p)
4232 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4233 RTX_FRAME_RELATED_P (insn) = 1;
4236 /* Set up got pointer, if needed. */
4238 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4240 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4241 SYMBOL_REF_FLAG (got_symbol) = 1;
4245 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
4248 /* It can happen that the GOT pointer isn't really needed ... */
4249 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4254 got_symbol = gen_rtx_UNSPEC (VOIDmode,
4255 gen_rtvec (1, got_symbol), 100);
4256 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
4257 got_symbol = force_const_mem (Pmode, got_symbol);
4258 insn = emit_move_insn (pic_offset_table_rtx,
4260 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4263 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
4264 gen_rtx_REG (Pmode, BASE_REGISTER)));
4265 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4271 /* Expand the epilogue into a bunch of separate insns. */
4274 s390_emit_epilogue ()
4276 struct s390_frame frame;
4277 rtx frame_pointer, return_reg;
4278 int area_bottom, area_top, offset;
4281 /* Compute frame_info. */
4283 s390_frame_info (&frame);
4285 /* Check whether to use frame or stack pointer for restore. */
4287 frame_pointer = frame.frame_pointer_p ?
4288 hard_frame_pointer_rtx : stack_pointer_rtx;
4290 /* Compute which parts of the save area we need to access. */
4292 if (frame.first_restore_gpr != -1)
4294 area_bottom = frame.first_restore_gpr * UNITS_PER_WORD;
4295 area_top = (frame.last_save_gpr + 1) * UNITS_PER_WORD;
4299 area_bottom = INT_MAX;
4305 if (frame.save_fprs_p)
4307 if (area_bottom > -64)
4315 if (regs_ever_live[18])
4317 if (area_bottom > STACK_POINTER_OFFSET - 16)
4318 area_bottom = STACK_POINTER_OFFSET - 16;
4319 if (area_top < STACK_POINTER_OFFSET - 8)
4320 area_top = STACK_POINTER_OFFSET - 8;
4322 if (regs_ever_live[19])
4324 if (area_bottom > STACK_POINTER_OFFSET - 8)
4325 area_bottom = STACK_POINTER_OFFSET - 8;
4326 if (area_top < STACK_POINTER_OFFSET)
4327 area_top = STACK_POINTER_OFFSET;
4331 /* Check whether we can access the register save area.
4332 If not, increment the frame pointer as required. */
4334 if (area_top <= area_bottom)
4336 /* Nothing to restore. */
4338 else if (frame.frame_size + area_bottom >= 0
4339 && frame.frame_size + area_top <= 4096)
4341 /* Area is in range. */
4342 offset = frame.frame_size;
4346 rtx insn, frame_off;
4348 offset = area_bottom < 0 ? -area_bottom : 0;
4349 frame_off = GEN_INT (frame.frame_size - offset);
4351 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
4352 frame_off = force_const_mem (Pmode, frame_off);
4354 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
4357 /* Restore call saved fprs. */
4363 if (frame.save_fprs_p)
4364 for (i = 24; i < 32; i++)
4365 if (regs_ever_live[i] && !global_regs[i])
4366 restore_fpr (frame_pointer,
4367 offset - 64 + (i-24) * 8, i);
4371 if (regs_ever_live[18] && !global_regs[18])
4372 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
4373 if (regs_ever_live[19] && !global_regs[19])
4374 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
4377 /* Return register. */
4379 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4381 /* Restore call saved gprs. */
4383 if (frame.first_restore_gpr != -1)
4388 /* Check for global register and save them
4389 to stack location from where they get restored. */
4391 for (i = frame.first_restore_gpr;
4392 i <= frame.last_save_gpr;
4395 /* These registers are special and need to be
4396 restored in any case. */
4397 if (i == STACK_POINTER_REGNUM
4398 || i == RETURN_REGNUM
4399 || i == BASE_REGISTER
4400 || (flag_pic && i == PIC_OFFSET_TABLE_REGNUM))
4405 addr = plus_constant (frame_pointer,
4406 offset + i * UNITS_PER_WORD);
4407 addr = gen_rtx_MEM (Pmode, addr);
4408 set_mem_alias_set (addr, s390_sr_alias_set);
4409 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
4413 /* Fetch return address from stack before load multiple,
4414 this will do good for scheduling. */
4416 if (!current_function_is_leaf)
4418 int return_regnum = find_unused_clobbered_reg();
4421 return_reg = gen_rtx_REG (Pmode, return_regnum);
4423 addr = plus_constant (frame_pointer,
4424 offset + RETURN_REGNUM * UNITS_PER_WORD);
4425 addr = gen_rtx_MEM (Pmode, addr);
4426 set_mem_alias_set (addr, s390_sr_alias_set);
4427 emit_move_insn (return_reg, addr);
4430 /* ??? As references to the base register are not made
4431 explicit in insn RTX code, we have to add a barrier here
4432 to prevent incorrect scheduling. */
4434 emit_insn (gen_blockage());
4436 insn = restore_gprs (frame_pointer, offset,
4437 frame.first_restore_gpr, frame.last_save_gpr);
4441 /* Return to caller. */
4443 p = rtvec_alloc (2);
4445 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
4446 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
4447 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
4451 /* Return the size in bytes of a function argument of
4452 type TYPE and/or mode MODE. At least one of TYPE or
4453 MODE must be specified. */
4456 s390_function_arg_size (mode, type)
4457 enum machine_mode mode;
4461 return int_size_in_bytes (type);
4463 /* No type info available for some library calls ... */
4464 if (mode != BLKmode)
4465 return GET_MODE_SIZE (mode);
4467 /* If we have neither type nor mode, abort */
4471 /* Return 1 if a function argument of type TYPE and mode MODE
4472 is to be passed by reference. The ABI specifies that only
4473 structures of size 1, 2, 4, or 8 bytes are passed by value,
4474 all other structures (and complex numbers) are passed by
4478 s390_function_arg_pass_by_reference (mode, type)
4479 enum machine_mode mode;
4482 int size = s390_function_arg_size (mode, type);
4486 if (AGGREGATE_TYPE_P (type) &&
4487 size != 1 && size != 2 && size != 4 && size != 8)
4490 if (TREE_CODE (type) == COMPLEX_TYPE)
4497 /* Update the data in CUM to advance over an argument of mode MODE and
4498 data type TYPE. (TYPE is null for libcalls where that information
4499 may not be available.). The boolean NAMED specifies whether the
4500 argument is a named argument (as opposed to an unnamed argument
4501 matching an ellipsis). */
4504 s390_function_arg_advance (cum, mode, type, named)
4505 CUMULATIVE_ARGS *cum;
4506 enum machine_mode mode;
4508 int named ATTRIBUTE_UNUSED;
4510 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
4514 else if (s390_function_arg_pass_by_reference (mode, type))
4520 int size = s390_function_arg_size (mode, type);
4521 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
4525 /* Define where to put the arguments to a function.
4526 Value is zero to push the argument on the stack,
4527 or a hard register in which to store the argument.
4529 MODE is the argument's machine mode.
4530 TYPE is the data type of the argument (as a tree).
4531 This is null for libcalls where that information may
4533 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4534 the preceding args and about the function being called.
4535 NAMED is nonzero if this argument is a named parameter
4536 (otherwise it is an extra parameter matching an ellipsis).
4538 On S/390, we use general purpose registers 2 through 6 to
4539 pass integer, pointer, and certain structure arguments, and
4540 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
4541 to pass floating point arguments. All remaining arguments
4542 are pushed to the stack. */
4545 s390_function_arg (cum, mode, type, named)
4546 CUMULATIVE_ARGS *cum;
4547 enum machine_mode mode;
4549 int named ATTRIBUTE_UNUSED;
4551 if (s390_function_arg_pass_by_reference (mode, type))
4554 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
4556 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
4559 return gen_rtx (REG, mode, cum->fprs + 16);
4563 int size = s390_function_arg_size (mode, type);
4564 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
4566 if (cum->gprs + n_gprs > 5)
4569 return gen_rtx (REG, mode, cum->gprs + 2);
4574 /* Create and return the va_list datatype.
4576 On S/390, va_list is an array type equivalent to
4578 typedef struct __va_list_tag
4582 void *__overflow_arg_area;
4583 void *__reg_save_area;
4587 where __gpr and __fpr hold the number of general purpose
4588 or floating point arguments used up to now, respectively,
4589 __overflow_arg_area points to the stack location of the
4590 next argument passed on the stack, and __reg_save_area
4591 always points to the start of the register area in the
4592 call frame of the current function. The function prologue
4593 saves all registers used for argument passing into this
4594 area if the function uses variable arguments. */
4597 s390_build_va_list ()
4599 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4601 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4604 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4606 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
4607 long_integer_type_node);
4608 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
4609 long_integer_type_node);
4610 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
4612 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
4615 DECL_FIELD_CONTEXT (f_gpr) = record;
4616 DECL_FIELD_CONTEXT (f_fpr) = record;
4617 DECL_FIELD_CONTEXT (f_ovf) = record;
4618 DECL_FIELD_CONTEXT (f_sav) = record;
4620 TREE_CHAIN (record) = type_decl;
4621 TYPE_NAME (record) = type_decl;
4622 TYPE_FIELDS (record) = f_gpr;
4623 TREE_CHAIN (f_gpr) = f_fpr;
4624 TREE_CHAIN (f_fpr) = f_ovf;
4625 TREE_CHAIN (f_ovf) = f_sav;
4627 layout_type (record);
4629 /* The correct type is an array type of one element. */
4630 return build_array_type (record, build_index_type (size_zero_node));
4633 /* Implement va_start by filling the va_list structure VALIST.
4634 STDARG_P is always true, and ignored.
4635 NEXTARG points to the first anonymous stack argument.
4637 The following global variables are used to initialize
4638 the va_list structure:
4640 current_function_args_info:
4641 holds number of gprs and fprs used for named arguments.
4642 current_function_arg_offset_rtx:
4643 holds the offset of the first anonymous stack argument
4644 (relative to the virtual arg pointer). */
4647 s390_va_start (valist, nextarg)
4649 rtx nextarg ATTRIBUTE_UNUSED;
4651 HOST_WIDE_INT n_gpr, n_fpr;
4653 tree f_gpr, f_fpr, f_ovf, f_sav;
4654 tree gpr, fpr, ovf, sav, t;
4656 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4657 f_fpr = TREE_CHAIN (f_gpr);
4658 f_ovf = TREE_CHAIN (f_fpr);
4659 f_sav = TREE_CHAIN (f_ovf);
4661 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4662 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4663 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4664 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4665 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4667 /* Count number of gp and fp argument registers used. */
4669 n_gpr = current_function_args_info.gprs;
4670 n_fpr = current_function_args_info.fprs;
4672 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4673 TREE_SIDE_EFFECTS (t) = 1;
4674 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4676 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4677 TREE_SIDE_EFFECTS (t) = 1;
4678 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4680 /* Find the overflow area. */
4681 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4683 off = INTVAL (current_function_arg_offset_rtx);
4684 off = off < 0 ? 0 : off;
4685 if (TARGET_DEBUG_ARG)
4686 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
4687 (int)n_gpr, (int)n_fpr, off);
4689 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
4691 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4692 TREE_SIDE_EFFECTS (t) = 1;
4693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4695 /* Find the register save area. */
4696 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
4697 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4698 build_int_2 (-STACK_POINTER_OFFSET, -1));
4699 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4700 TREE_SIDE_EFFECTS (t) = 1;
4701 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4704 /* Implement va_arg by updating the va_list structure
4705 VALIST as required to retrieve an argument of type
4706 TYPE, and returning that argument.
4708 Generates code equivalent to:
4710 if (integral value) {
4711 if (size <= 4 && args.gpr < 5 ||
4712 size > 4 && args.gpr < 4 )
4713 ret = args.reg_save_area[args.gpr+8]
4715 ret = *args.overflow_arg_area++;
4716 } else if (float value) {
4718 ret = args.reg_save_area[args.fpr+64]
4720 ret = *args.overflow_arg_area++;
4721 } else if (aggregate value) {
4723 ret = *args.reg_save_area[args.gpr]
4725 ret = **args.overflow_arg_area++;
4729 s390_va_arg (valist, type)
4733 tree f_gpr, f_fpr, f_ovf, f_sav;
4734 tree gpr, fpr, ovf, sav, reg, t, u;
4735 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
4736 rtx lab_false, lab_over, addr_rtx, r;
4738 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4739 f_fpr = TREE_CHAIN (f_gpr);
4740 f_ovf = TREE_CHAIN (f_fpr);
4741 f_sav = TREE_CHAIN (f_ovf);
4743 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4744 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4745 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4746 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4747 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4749 size = int_size_in_bytes (type);
4751 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
4753 if (TARGET_DEBUG_ARG)
4755 fprintf (stderr, "va_arg: aggregate type");
4759 /* Aggregates are passed by reference. */
4763 sav_ofs = 2 * UNITS_PER_WORD;
4764 sav_scale = UNITS_PER_WORD;
4765 size = UNITS_PER_WORD;
4768 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
4770 if (TARGET_DEBUG_ARG)
4772 fprintf (stderr, "va_arg: float type");
4776 /* FP args go in FP registers, if present. */
4780 sav_ofs = 16 * UNITS_PER_WORD;
4782 /* TARGET_64BIT has up to 4 parameter in fprs */
4783 max_reg = TARGET_64BIT ? 3 : 1;
4787 if (TARGET_DEBUG_ARG)
4789 fprintf (stderr, "va_arg: other type");
4793 /* Otherwise into GP registers. */
4796 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4797 sav_ofs = 2 * UNITS_PER_WORD;
4799 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
4800 TYPE_MODE (type) == HImode ? 6 :
4801 TYPE_MODE (type) == QImode ? 7 : 0;
4803 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
4804 TYPE_MODE (type) == QImode ? 3 : 0;
4806 sav_scale = UNITS_PER_WORD;
4813 /* Pull the value out of the saved registers ... */
4815 lab_false = gen_label_rtx ();
4816 lab_over = gen_label_rtx ();
4817 addr_rtx = gen_reg_rtx (Pmode);
4819 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
4821 GT, const1_rtx, Pmode, 0, lab_false);
4824 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4828 u = build (MULT_EXPR, long_integer_type_node,
4829 reg, build_int_2 (sav_scale, 0));
4830 TREE_SIDE_EFFECTS (u) = 1;
4832 t = build (PLUS_EXPR, ptr_type_node, t, u);
4833 TREE_SIDE_EFFECTS (t) = 1;
4835 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4837 emit_move_insn (addr_rtx, r);
4840 emit_jump_insn (gen_jump (lab_over));
4842 emit_label (lab_false);
4844 /* ... Otherwise out of the overflow area. */
4846 t = save_expr (ovf);
4849 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
4850 if (size < UNITS_PER_WORD)
4852 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
4853 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4854 TREE_SIDE_EFFECTS (t) = 1;
4855 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4857 t = save_expr (ovf);
4860 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4862 emit_move_insn (addr_rtx, r);
4864 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4865 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4866 TREE_SIDE_EFFECTS (t) = 1;
4867 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4869 emit_label (lab_over);
4871 /* If less than max_regs a registers are retrieved out
4872 of register save area, increment. */
4874 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
4875 build_int_2 (n_reg, 0));
4876 TREE_SIDE_EFFECTS (u) = 1;
4877 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4881 r = gen_rtx_MEM (Pmode, addr_rtx);
4882 set_mem_alias_set (r, get_varargs_alias_set ());
4883 emit_move_insn (addr_rtx, r);
4891 /* Output assembly code for the trampoline template to
4894 On S/390, we use gpr 1 internally in the trampoline code;
4895 gpr 0 is used to hold the static chain. */
4898 s390_trampoline_template (file)
4903 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
4904 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
4905 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
4906 fprintf (file, "br\t%s\n", reg_names[1]);
4907 fprintf (file, "0:\t.quad\t0\n");
4908 fprintf (file, ".quad\t0\n");
4912 fprintf (file, "basr\t%s,0\n", reg_names[1]);
4913 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
4914 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
4915 fprintf (file, "br\t%s\n", reg_names[1]);
4916 fprintf (file, ".long\t0\n");
4917 fprintf (file, ".long\t0\n");
4921 /* Emit RTL insns to initialize the variable parts of a trampoline.
4922 FNADDR is an RTX for the address of the function's pure code.
4923 CXT is an RTX for the static chain value for the function. */
4926 s390_initialize_trampoline (addr, fnaddr, cxt)
4931 emit_move_insn (gen_rtx
4933 memory_address (Pmode,
4934 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
4935 emit_move_insn (gen_rtx
4937 memory_address (Pmode,
4938 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
4941 /* Return rtx for 64-bit constant formed from the 32-bit subwords
4942 LOW and HIGH, independent of the host word size. */
4945 s390_gen_rtx_const_DI (high, low)
4949 #if HOST_BITS_PER_WIDE_INT >= 64
4951 val = (HOST_WIDE_INT)high;
4953 val |= (HOST_WIDE_INT)low;
4955 return GEN_INT (val);
4957 #if HOST_BITS_PER_WIDE_INT >= 32
4958 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
4965 /* Output assembler code to FILE to increment profiler label # LABELNO
4966 for profiling a function entry. */
4969 s390_function_profiler (file, labelno)
4976 sprintf (label, "%sP%d", LPREFIX, labelno);
4978 fprintf (file, "# function profiler \n");
4980 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
4981 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
4982 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
4984 op[2] = gen_rtx_REG (Pmode, 1);
4985 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
4986 SYMBOL_REF_FLAG (op[3]) = 1;
4988 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
4991 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
4992 op[4] = gen_rtx_CONST (Pmode, op[4]);
4997 output_asm_insn ("stg\t%0,%1", op);
4998 output_asm_insn ("larl\t%2,%3", op);
4999 output_asm_insn ("brasl\t%0,%4", op);
5000 output_asm_insn ("lg\t%0,%1", op);
5004 op[6] = gen_label_rtx ();
5006 output_asm_insn ("st\t%0,%1", op);
5007 output_asm_insn ("bras\t%2,%l6", op);
5008 output_asm_insn (".long\t%4", op);
5009 output_asm_insn (".long\t%3", op);
5010 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5011 output_asm_insn ("l\t%0,0(%2)", op);
5012 output_asm_insn ("l\t%2,4(%2)", op);
5013 output_asm_insn ("basr\t%0,%0", op);
5014 output_asm_insn ("l\t%0,%1", op);
5018 op[5] = gen_label_rtx ();
5019 op[6] = gen_label_rtx ();
5021 output_asm_insn ("st\t%0,%1", op);
5022 output_asm_insn ("bras\t%2,%l6", op);
5023 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[5]));
5024 output_asm_insn (".long\t%4-%l5", op);
5025 output_asm_insn (".long\t%3-%l5", op);
5026 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (op[6]));
5027 output_asm_insn ("lr\t%0,%2", op);
5028 output_asm_insn ("a\t%0,0(%2)", op);
5029 output_asm_insn ("a\t%2,4(%2)", op);
5030 output_asm_insn ("basr\t%0,%0", op);
5031 output_asm_insn ("l\t%0,%1", op);
5035 /* Select section for constant in constant pool. In 32-bit mode,
5036 constants go in the function section; in 64-bit mode in .rodata. */
5039 s390_select_rtx_section (mode, x, align)
5040 enum machine_mode mode ATTRIBUTE_UNUSED;
5041 rtx x ATTRIBUTE_UNUSED;
5042 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
5045 readonly_data_section ();
5047 function_section (current_function_decl);
5050 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
5051 may access it directly in the GOT. */
5054 s390_encode_section_info (decl, first)
5056 int first ATTRIBUTE_UNUSED;
5060 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5061 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
5063 if (GET_CODE (rtl) == MEM)
5065 SYMBOL_REF_FLAG (XEXP (rtl, 0))
5066 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5067 || ! TREE_PUBLIC (decl));