1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
54 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
55 static int s390_adjust_priority PARAMS ((rtx, int));
56 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
57 unsigned HOST_WIDE_INT));
58 static void s390_encode_section_info PARAMS ((tree, int));
59 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
60 HOST_WIDE_INT, tree));
62 #undef TARGET_ASM_ALIGNED_HI_OP
63 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
64 #undef TARGET_ASM_ALIGNED_DI_OP
65 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
66 #undef TARGET_ASM_INTEGER
67 #define TARGET_ASM_INTEGER s390_assemble_integer
69 #undef TARGET_ASM_OPEN_PAREN
70 #define TARGET_ASM_OPEN_PAREN ""
72 #undef TARGET_ASM_CLOSE_PAREN
73 #define TARGET_ASM_CLOSE_PAREN ""
75 #undef TARGET_ASM_SELECT_RTX_SECTION
76 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
78 #undef TARGET_SCHED_ADJUST_COST
79 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
81 #undef TARGET_SCHED_ADJUST_PRIORITY
82 #define TARGET_SCHED_ADJUST_PRIORITY s390_adjust_priority
84 #undef TARGET_ENCODE_SECTION_INFO
85 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
87 #undef TARGET_ASM_OUTPUT_MI_THUNK
88 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
89 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
90 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
92 struct gcc_target targetm = TARGET_INITIALIZER;
94 extern int reload_completed;
96 /* The alias set for prologue/epilogue register save/restore. */
97 static int s390_sr_alias_set = 0;
99 /* Save information from a "cmpxx" operation until the branch or scc is
101 rtx s390_compare_op0, s390_compare_op1;
103 /* Structure used to hold the components of a S/390 memory
104 address. A legitimate address on S/390 is of the general
106 base + index + displacement
107 where any of the components is optional.
109 base and index are registers of the class ADDR_REGS,
110 displacement is an unsigned 12-bit immediate constant. */
120 /* Which cpu are we tuning for. */
121 enum processor_type s390_cpu;
122 /* Which instruction set architecture to use. */
123 enum processor_type s390_arch;
125 /* Strings to hold which cpu and instruction set architecture to use. */
126 const char *s390_tune_string; /* for -mtune=<xxx> */
127 const char *s390_arch_string; /* for -march=<xxx> */
129 /* Define the structure for the machine field in struct function. */
131 struct machine_function GTY(())
133 /* Label of start of initial literal pool. */
134 rtx literal_pool_label;
136 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
139 /* Number of first and last gpr to be saved, restored. */
141 int first_restore_gpr;
144 /* Size of stack frame. */
145 HOST_WIDE_INT frame_size;
148 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
149 static int s390_branch_condition_mask PARAMS ((rtx));
150 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
151 static int check_mode PARAMS ((rtx, enum machine_mode *));
152 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
153 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
154 static int reg_used_in_mem_p PARAMS ((int, rtx));
155 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
156 static int s390_split_branches PARAMS ((rtx, bool *));
157 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
158 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
159 static int find_base_register_in_addr PARAMS ((struct s390_address *));
160 static bool find_base_register_ref PARAMS ((rtx));
161 static void replace_base_register_ref PARAMS ((rtx *, rtx));
162 static void s390_optimize_prolog PARAMS ((int));
163 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
164 static int find_unused_clobbered_reg PARAMS ((void));
165 static void s390_frame_info PARAMS ((void));
166 static rtx save_fpr PARAMS ((rtx, int, int));
167 static rtx restore_fpr PARAMS ((rtx, int, int));
168 static rtx save_gprs PARAMS ((rtx, int, int, int));
169 static rtx restore_gprs PARAMS ((rtx, int, int, int));
170 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
171 static struct machine_function * s390_init_machine_status PARAMS ((void));
173 /* Return true if SET either doesn't set the CC register, or else
174 the source and destination have matching CC modes and that
175 CC mode is at least as constrained as REQ_MODE. */
178 s390_match_ccmode_set (set, req_mode)
180 enum machine_mode req_mode;
182 enum machine_mode set_mode;
184 if (GET_CODE (set) != SET)
187 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
190 set_mode = GET_MODE (SET_DEST (set));
203 if (req_mode != set_mode)
208 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
209 && req_mode != CCSRmode && req_mode != CCURmode)
215 if (req_mode != CCAmode)
223 return (GET_MODE (SET_SRC (set)) == set_mode);
226 /* Return true if every SET in INSN that sets the CC register
227 has source and destination with matching CC modes and that
228 CC mode is at least as constrained as REQ_MODE.
229 If REQ_MODE is VOIDmode, always return false. */
232 s390_match_ccmode (insn, req_mode)
234 enum machine_mode req_mode;
238 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
239 if (req_mode == VOIDmode)
242 if (GET_CODE (PATTERN (insn)) == SET)
243 return s390_match_ccmode_set (PATTERN (insn), req_mode);
245 if (GET_CODE (PATTERN (insn)) == PARALLEL)
246 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
248 rtx set = XVECEXP (PATTERN (insn), 0, i);
249 if (GET_CODE (set) == SET)
250 if (!s390_match_ccmode_set (set, req_mode))
257 /* If a test-under-mask instruction can be used to implement
258 (compare (and ... OP1) OP2), return the CC mode required
259 to do that. Otherwise, return VOIDmode.
260 MIXED is true if the instruction can distinguish between
261 CC1 and CC2 for mixed selected bits (TMxx), it is false
262 if the instruction cannot (TM). */
265 s390_tm_ccmode (op1, op2, mixed)
272 /* ??? Fixme: should work on CONST_DOUBLE as well. */
273 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
276 /* Selected bits all zero: CC0. */
277 if (INTVAL (op2) == 0)
280 /* Selected bits all one: CC3. */
281 if (INTVAL (op2) == INTVAL (op1))
284 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
287 bit1 = exact_log2 (INTVAL (op2));
288 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
289 if (bit0 != -1 && bit1 != -1)
290 return bit0 > bit1 ? CCT1mode : CCT2mode;
296 /* Given a comparison code OP (EQ, NE, etc.) and the operands
297 OP0 and OP1 of a COMPARE, return the mode to be used for the
301 s390_select_ccmode (code, op0, op1)
310 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
311 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
313 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
314 || GET_CODE (op1) == NEG)
317 if (GET_CODE (op0) == AND)
319 /* Check whether we can potentially do it via TM. */
320 enum machine_mode ccmode;
321 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
322 if (ccmode != VOIDmode)
324 /* Relax CCTmode to CCZmode to allow fall-back to AND
325 if that turns out to be beneficial. */
326 return ccmode == CCTmode ? CCZmode : ccmode;
330 if (register_operand (op0, HImode)
331 && GET_CODE (op1) == CONST_INT
332 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
334 if (register_operand (op0, QImode)
335 && GET_CODE (op1) == CONST_INT
336 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
345 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
346 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
348 if (INTVAL (XEXP((op0), 1)) < 0)
361 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
362 && GET_CODE (op1) != CONST_INT)
368 if (GET_CODE (op0) == PLUS)
371 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
372 && GET_CODE (op1) != CONST_INT)
378 if (GET_CODE (op0) == MINUS)
381 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
382 && GET_CODE (op1) != CONST_INT)
391 /* Return branch condition mask to implement a branch
392 specified by CODE. */
395 s390_branch_condition_mask (code)
398 const int CC0 = 1 << 3;
399 const int CC1 = 1 << 2;
400 const int CC2 = 1 << 1;
401 const int CC3 = 1 << 0;
403 if (GET_CODE (XEXP (code, 0)) != REG
404 || REGNO (XEXP (code, 0)) != CC_REGNUM
405 || XEXP (code, 1) != const0_rtx)
408 switch (GET_MODE (XEXP (code, 0)))
411 switch (GET_CODE (code))
414 case NE: return CC1 | CC2 | CC3;
421 switch (GET_CODE (code))
424 case NE: return CC0 | CC2 | CC3;
431 switch (GET_CODE (code))
434 case NE: return CC0 | CC1 | CC3;
441 switch (GET_CODE (code))
444 case NE: return CC0 | CC1 | CC2;
451 switch (GET_CODE (code))
453 case EQ: return CC0 | CC2;
454 case NE: return CC1 | CC3;
461 switch (GET_CODE (code))
463 case LTU: return CC2 | CC3; /* carry */
464 case GEU: return CC0 | CC1; /* no carry */
471 switch (GET_CODE (code))
473 case GTU: return CC0 | CC1; /* borrow */
474 case LEU: return CC2 | CC3; /* no borrow */
481 switch (GET_CODE (code))
484 case NE: return CC1 | CC2 | CC3;
485 case LTU: return CC1;
486 case GTU: return CC2;
487 case LEU: return CC0 | CC1;
488 case GEU: return CC0 | CC2;
495 switch (GET_CODE (code))
498 case NE: return CC2 | CC1 | CC3;
499 case LTU: return CC2;
500 case GTU: return CC1;
501 case LEU: return CC0 | CC2;
502 case GEU: return CC0 | CC1;
509 switch (GET_CODE (code))
512 case NE: return CC1 | CC2 | CC3;
513 case LT: return CC1 | CC3;
515 case LE: return CC0 | CC1 | CC3;
516 case GE: return CC0 | CC2;
523 switch (GET_CODE (code))
526 case NE: return CC1 | CC2 | CC3;
528 case GT: return CC2 | CC3;
529 case LE: return CC0 | CC1;
530 case GE: return CC0 | CC2 | CC3;
537 switch (GET_CODE (code))
540 case NE: return CC1 | CC2 | CC3;
543 case LE: return CC0 | CC1;
544 case GE: return CC0 | CC2;
545 case UNORDERED: return CC3;
546 case ORDERED: return CC0 | CC1 | CC2;
547 case UNEQ: return CC0 | CC3;
548 case UNLT: return CC1 | CC3;
549 case UNGT: return CC2 | CC3;
550 case UNLE: return CC0 | CC1 | CC3;
551 case UNGE: return CC0 | CC2 | CC3;
552 case LTGT: return CC1 | CC2;
559 switch (GET_CODE (code))
562 case NE: return CC2 | CC1 | CC3;
565 case LE: return CC0 | CC2;
566 case GE: return CC0 | CC1;
567 case UNORDERED: return CC3;
568 case ORDERED: return CC0 | CC2 | CC1;
569 case UNEQ: return CC0 | CC3;
570 case UNLT: return CC2 | CC3;
571 case UNGT: return CC1 | CC3;
572 case UNLE: return CC0 | CC2 | CC3;
573 case UNGE: return CC0 | CC1 | CC3;
574 case LTGT: return CC2 | CC1;
585 /* If INV is false, return assembler mnemonic string to implement
586 a branch specified by CODE. If INV is true, return mnemonic
587 for the corresponding inverted branch. */
590 s390_branch_condition_mnemonic (code, inv)
594 static const char *const mnemonic[16] =
596 NULL, "o", "h", "nle",
597 "l", "nhe", "lh", "ne",
598 "e", "nlh", "he", "nl",
599 "le", "nh", "no", NULL
602 int mask = s390_branch_condition_mask (code);
607 if (mask < 1 || mask > 14)
610 return mnemonic[mask];
613 /* If OP is an integer constant of mode MODE with exactly one
614 HImode subpart unequal to DEF, return the number of that
615 subpart. As a special case, all HImode subparts of OP are
616 equal to DEF, return zero. Otherwise, return -1. */
619 s390_single_hi (op, mode, def)
621 enum machine_mode mode;
624 if (GET_CODE (op) == CONST_INT)
626 unsigned HOST_WIDE_INT value = 0;
627 int n_parts = GET_MODE_SIZE (mode) / 2;
630 for (i = 0; i < n_parts; i++)
633 value = (unsigned HOST_WIDE_INT) INTVAL (op);
637 if ((value & 0xffff) != (unsigned)(def & 0xffff))
646 return part == -1 ? 0 : (n_parts - 1 - part);
649 else if (GET_CODE (op) == CONST_DOUBLE
650 && GET_MODE (op) == VOIDmode)
652 unsigned HOST_WIDE_INT value = 0;
653 int n_parts = GET_MODE_SIZE (mode) / 2;
656 for (i = 0; i < n_parts; i++)
659 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
660 else if (i == HOST_BITS_PER_WIDE_INT / 16)
661 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
665 if ((value & 0xffff) != (unsigned)(def & 0xffff))
674 return part == -1 ? 0 : (n_parts - 1 - part);
680 /* Extract the HImode part number PART from integer
681 constant OP of mode MODE. */
684 s390_extract_hi (op, mode, part)
686 enum machine_mode mode;
689 int n_parts = GET_MODE_SIZE (mode) / 2;
690 if (part < 0 || part >= n_parts)
693 part = n_parts - 1 - part;
695 if (GET_CODE (op) == CONST_INT)
697 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
698 return ((value >> (16 * part)) & 0xffff);
700 else if (GET_CODE (op) == CONST_DOUBLE
701 && GET_MODE (op) == VOIDmode)
703 unsigned HOST_WIDE_INT value;
704 if (part < HOST_BITS_PER_WIDE_INT / 16)
705 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
707 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
708 part -= HOST_BITS_PER_WIDE_INT / 16;
710 return ((value >> (16 * part)) & 0xffff);
716 /* If OP is an integer constant of mode MODE with exactly one
717 QImode subpart unequal to DEF, return the number of that
718 subpart. As a special case, all QImode subparts of OP are
719 equal to DEF, return zero. Otherwise, return -1. */
722 s390_single_qi (op, mode, def)
724 enum machine_mode mode;
727 if (GET_CODE (op) == CONST_INT)
729 unsigned HOST_WIDE_INT value = 0;
730 int n_parts = GET_MODE_SIZE (mode);
733 for (i = 0; i < n_parts; i++)
736 value = (unsigned HOST_WIDE_INT) INTVAL (op);
740 if ((value & 0xff) != (unsigned)(def & 0xff))
749 return part == -1 ? 0 : (n_parts - 1 - part);
752 else if (GET_CODE (op) == CONST_DOUBLE
753 && GET_MODE (op) == VOIDmode)
755 unsigned HOST_WIDE_INT value = 0;
756 int n_parts = GET_MODE_SIZE (mode);
759 for (i = 0; i < n_parts; i++)
762 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
763 else if (i == HOST_BITS_PER_WIDE_INT / 8)
764 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
768 if ((value & 0xff) != (unsigned)(def & 0xff))
777 return part == -1 ? 0 : (n_parts - 1 - part);
783 /* Extract the QImode part number PART from integer
784 constant OP of mode MODE. */
787 s390_extract_qi (op, mode, part)
789 enum machine_mode mode;
792 int n_parts = GET_MODE_SIZE (mode);
793 if (part < 0 || part >= n_parts)
796 part = n_parts - 1 - part;
798 if (GET_CODE (op) == CONST_INT)
800 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
801 return ((value >> (8 * part)) & 0xff);
803 else if (GET_CODE (op) == CONST_DOUBLE
804 && GET_MODE (op) == VOIDmode)
806 unsigned HOST_WIDE_INT value;
807 if (part < HOST_BITS_PER_WIDE_INT / 8)
808 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
810 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
811 part -= HOST_BITS_PER_WIDE_INT / 8;
813 return ((value >> (8 * part)) & 0xff);
820 /* Change optimizations to be performed, depending on the
823 LEVEL is the optimization level specified; 2 if `-O2' is
824 specified, 1 if `-O' is specified, and 0 if neither is specified.
826 SIZE is nonzero if `-Os' is specified and zero otherwise. */
829 optimization_options (level, size)
830 int level ATTRIBUTE_UNUSED;
831 int size ATTRIBUTE_UNUSED;
833 /* ??? There are apparently still problems with -fcaller-saves. */
834 flag_caller_saves = 0;
836 /* By default, always emit DWARF-2 unwind info. This allows debugging
837 without maintaining a stack frame back-chain. */
838 flag_asynchronous_unwind_tables = 1;
845 static const char * const cpu_names[] = TARGET_CPU_DEFAULT_NAMES;
848 const char *const name; /* processor name or nickname. */
849 const enum processor_type processor;
856 const processor_alias_table[] =
858 {"g5", PROCESSOR_9672_G5, PTA_IEEE_FLOAT},
859 {"g6", PROCESSOR_9672_G6, PTA_IEEE_FLOAT},
860 {"z900", PROCESSOR_2064_Z900, PTA_IEEE_FLOAT | PTA_ZARCH},
863 int const pta_size = ARRAY_SIZE (processor_alias_table);
865 /* Acquire a unique set number for our register saves and restores. */
866 s390_sr_alias_set = new_alias_set ();
868 /* Set up function hooks. */
869 init_machine_status = s390_init_machine_status;
871 /* Set cpu and arch, if only partially given. */
872 if (!s390_tune_string && s390_arch_string)
873 s390_tune_string = s390_arch_string;
874 if (!s390_tune_string)
875 s390_tune_string = cpu_names [TARGET_64BIT ? TARGET_CPU_DEFAULT_2064
876 : TARGET_CPU_DEFAULT_9672];
877 if (!s390_arch_string)
878 #ifdef DEFAULT_TARGET_64BIT
879 s390_arch_string = "z900";
881 s390_arch_string = "g5";
884 for (i = 0; i < pta_size; i++)
885 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
887 s390_arch = processor_alias_table[i].processor;
888 /* Default cpu tuning to the architecture. */
889 s390_cpu = s390_arch;
891 if (!(processor_alias_table[i].flags & PTA_ZARCH)
893 error ("64-bit ABI not supported on %s", s390_arch_string);
895 if (!(processor_alias_table[i].flags & PTA_ZARCH)
897 error ("z/Architecture not supported on %s", s390_arch_string);
903 error ("bad value (%s) for -march= switch", s390_arch_string);
905 /* ESA implies 31 bit mode. */
906 if ((target_flags_explicit & MASK_ZARCH) && !TARGET_ZARCH)
908 if ((target_flags_explicit & MASK_64BIT) && TARGET_64BIT)
909 error ("64-bit ABI not possible in ESA/390 mode");
911 target_flags &= ~MASK_64BIT;
914 for (i = 0; i < pta_size; i++)
915 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
917 s390_cpu = processor_alias_table[i].processor;
922 error ("bad value (%s) for -mtune= switch", s390_tune_string);
925 /* Map for smallest class containing reg regno. */
927 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
928 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
929 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
930 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
931 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
932 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
933 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
934 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
935 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
936 ADDR_REGS, NO_REGS, ADDR_REGS
940 /* Return true if OP a (const_int 0) operand.
941 OP is the current operation.
942 MODE is the current operation mode. */
945 const0_operand (op, mode)
947 enum machine_mode mode;
949 return op == CONST0_RTX (mode);
952 /* Return true if OP is constant.
953 OP is the current operation.
954 MODE is the current operation mode. */
957 consttable_operand (op, mode)
959 enum machine_mode mode ATTRIBUTE_UNUSED;
961 return CONSTANT_P (op);
964 /* Return true if the mode of operand OP matches MODE.
965 If MODE is set to VOIDmode, set it to the mode of OP. */
968 check_mode (op, mode)
970 enum machine_mode *mode;
972 if (*mode == VOIDmode)
973 *mode = GET_MODE (op);
976 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
982 /* Return true if OP a valid operand for the LARL instruction.
983 OP is the current operation.
984 MODE is the current operation mode. */
987 larl_operand (op, mode)
989 enum machine_mode mode;
991 if (! check_mode (op, &mode))
994 /* Allow labels and local symbols. */
995 if (GET_CODE (op) == LABEL_REF)
997 if (GET_CODE (op) == SYMBOL_REF
998 && (!flag_pic || SYMBOL_REF_FLAG (op)
999 || CONSTANT_POOL_ADDRESS_P (op)))
1002 /* Everything else must have a CONST, so strip it. */
1003 if (GET_CODE (op) != CONST)
1007 /* Allow adding *even* constants. */
1008 if (GET_CODE (op) == PLUS)
1010 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1011 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1016 /* Labels and local symbols allowed here as well. */
1017 if (GET_CODE (op) == LABEL_REF)
1019 if (GET_CODE (op) == SYMBOL_REF
1020 && (!flag_pic || SYMBOL_REF_FLAG (op)
1021 || CONSTANT_POOL_ADDRESS_P (op)))
1024 /* Now we must have a @GOTENT offset or @PLT stub. */
1025 if (GET_CODE (op) == UNSPEC
1026 && XINT (op, 1) == 111)
1028 if (GET_CODE (op) == UNSPEC
1029 && XINT (op, 1) == 113)
1035 /* Helper routine to implement s_operand and s_imm_operand.
1036 OP is the current operation.
1037 MODE is the current operation mode.
1038 ALLOW_IMMEDIATE specifies whether immediate operands should
1039 be accepted or not. */
1042 general_s_operand (op, mode, allow_immediate)
1044 enum machine_mode mode;
1045 int allow_immediate;
1047 struct s390_address addr;
1049 /* Call general_operand first, so that we don't have to
1050 check for many special cases. */
1051 if (!general_operand (op, mode))
1054 /* Just like memory_operand, allow (subreg (mem ...))
1056 if (reload_completed
1057 && GET_CODE (op) == SUBREG
1058 && GET_CODE (SUBREG_REG (op)) == MEM)
1059 op = SUBREG_REG (op);
1061 switch (GET_CODE (op))
1063 /* Constants that we are sure will be forced to the
1064 literal pool in reload are OK as s-operand. Note
1065 that we cannot call s390_preferred_reload_class here
1066 because it might not be known yet at this point
1067 whether the current function is a leaf or not. */
1070 if (!allow_immediate || reload_completed)
1072 if (!legitimate_reload_constant_p (op))
1078 /* Memory operands are OK unless they already use an
1081 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1083 if (s390_decompose_address (XEXP (op, 0), &addr)
1095 /* Return true if OP is a valid S-type operand.
1096 OP is the current operation.
1097 MODE is the current operation mode. */
1100 s_operand (op, mode)
1102 enum machine_mode mode;
1104 return general_s_operand (op, mode, 0);
1107 /* Return true if OP is a valid S-type operand or an immediate
1108 operand that can be addressed as S-type operand by forcing
1109 it into the literal pool.
1110 OP is the current operation.
1111 MODE is the current operation mode. */
1114 s_imm_operand (op, mode)
1116 enum machine_mode mode;
1118 return general_s_operand (op, mode, 1);
1121 /* Return true if OP is a valid operand for a 'Q' constraint.
1122 This differs from s_operand in that only memory operands
1123 without index register are accepted, nothing else. */
1129 struct s390_address addr;
1131 if (GET_CODE (op) != MEM)
1134 if (!s390_decompose_address (XEXP (op, 0), &addr))
1143 /* Return the cost of an address rtx ADDR. */
1146 s390_address_cost (addr)
1149 struct s390_address ad;
1150 if (!s390_decompose_address (addr, &ad))
1153 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1156 /* Return true if OP is a valid operand for the BRAS instruction.
1157 OP is the current operation.
1158 MODE is the current operation mode. */
1161 bras_sym_operand (op, mode)
1163 enum machine_mode mode ATTRIBUTE_UNUSED;
1165 register enum rtx_code code = GET_CODE (op);
1167 /* Allow SYMBOL_REFs. */
1168 if (code == SYMBOL_REF)
1171 /* Allow @PLT stubs. */
1173 && GET_CODE (XEXP (op, 0)) == UNSPEC
1174 && XINT (XEXP (op, 0), 1) == 113)
1180 /* Return true if OP is a load multiple operation. It is known to be a
1181 PARALLEL and the first section will be tested.
1182 OP is the current operation.
1183 MODE is the current operation mode. */
1186 load_multiple_operation (op, mode)
1188 enum machine_mode mode ATTRIBUTE_UNUSED;
1190 int count = XVECLEN (op, 0);
1191 unsigned int dest_regno;
1196 /* Perform a quick check so we don't blow up below. */
1198 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1199 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1200 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1203 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1204 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1206 /* Check, is base, or base + displacement. */
1208 if (GET_CODE (src_addr) == REG)
1210 else if (GET_CODE (src_addr) == PLUS
1211 && GET_CODE (XEXP (src_addr, 0)) == REG
1212 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1214 off = INTVAL (XEXP (src_addr, 1));
1215 src_addr = XEXP (src_addr, 0);
1220 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1223 for (i = 1; i < count; i++)
1225 rtx elt = XVECEXP (op, 0, i);
1227 if (GET_CODE (elt) != SET
1228 || GET_CODE (SET_DEST (elt)) != REG
1229 || GET_MODE (SET_DEST (elt)) != Pmode
1230 || REGNO (SET_DEST (elt)) != dest_regno + i
1231 || GET_CODE (SET_SRC (elt)) != MEM
1232 || GET_MODE (SET_SRC (elt)) != Pmode
1233 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1234 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1235 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1236 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1237 != off + i * UNITS_PER_WORD)
1244 /* Return true if OP is a store multiple operation. It is known to be a
1245 PARALLEL and the first section will be tested.
1246 OP is the current operation.
1247 MODE is the current operation mode. */
1250 store_multiple_operation (op, mode)
1252 enum machine_mode mode ATTRIBUTE_UNUSED;
1254 int count = XVECLEN (op, 0);
1255 unsigned int src_regno;
1259 /* Perform a quick check so we don't blow up below. */
1261 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1262 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1263 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1266 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1267 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1269 /* Check, is base, or base + displacement. */
1271 if (GET_CODE (dest_addr) == REG)
1273 else if (GET_CODE (dest_addr) == PLUS
1274 && GET_CODE (XEXP (dest_addr, 0)) == REG
1275 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1277 off = INTVAL (XEXP (dest_addr, 1));
1278 dest_addr = XEXP (dest_addr, 0);
1283 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1286 for (i = 1; i < count; i++)
1288 rtx elt = XVECEXP (op, 0, i);
1290 if (GET_CODE (elt) != SET
1291 || GET_CODE (SET_SRC (elt)) != REG
1292 || GET_MODE (SET_SRC (elt)) != Pmode
1293 || REGNO (SET_SRC (elt)) != src_regno + i
1294 || GET_CODE (SET_DEST (elt)) != MEM
1295 || GET_MODE (SET_DEST (elt)) != Pmode
1296 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1297 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1298 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1299 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1300 != off + i * UNITS_PER_WORD)
1307 /* Return true if OP contains a symbol reference */
1310 symbolic_reference_mentioned_p (op)
1313 register const char *fmt;
1316 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1319 fmt = GET_RTX_FORMAT (GET_CODE (op));
1320 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1326 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1327 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1331 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1339 /* Return true if OP is a legitimate general operand when
1340 generating PIC code. It is given that flag_pic is on
1341 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1344 legitimate_pic_operand_p (op)
1347 /* Accept all non-symbolic constants. */
1348 if (!SYMBOLIC_CONST (op))
1351 /* Reject everything else; must be handled
1352 via emit_pic_move. */
1356 /* Returns true if the constant value OP is a legitimate general operand.
1357 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1360 legitimate_constant_p (op)
1363 /* Accept all non-symbolic constants. */
1364 if (!SYMBOLIC_CONST (op))
1367 /* In the PIC case, symbolic constants must *not* be
1368 forced into the literal pool. We accept them here,
1369 so that they will be handled by emit_pic_move. */
1373 /* Even in the non-PIC case, we can accept immediate
1374 LARL operands here. */
1376 return larl_operand (op, VOIDmode);
1378 /* All remaining non-PIC symbolic constants are
1379 forced into the literal pool. */
1383 /* Returns true if the constant value OP is a legitimate general
1384 operand during and after reload. The difference to
1385 legitimate_constant_p is that this function will not accept
1386 a constant that would need to be forced to the literal pool
1387 before it can be used as operand. */
1390 legitimate_reload_constant_p (op)
1393 /* Accept l(g)hi operands. */
1394 if (GET_CODE (op) == CONST_INT
1395 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1398 /* Accept lliXX operands. */
1400 && s390_single_hi (op, DImode, 0) >= 0)
1403 /* Accept larl operands. */
1405 && larl_operand (op, VOIDmode))
1408 /* Everything else cannot be handled without reload. */
1412 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1413 return the class of reg to actually use. */
1416 s390_preferred_reload_class (op, class)
1418 enum reg_class class;
1420 /* This can happen if a floating point constant is being
1421 reloaded into an integer register. Leave well alone. */
1422 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1423 && class != FP_REGS)
1426 switch (GET_CODE (op))
1428 /* Constants we cannot reload must be forced into the
1429 literal pool. For constants we *could* handle directly,
1430 it might still be preferable to put them in the pool and
1431 use a memory-to-memory instruction.
1433 However, try to avoid needlessly allocating a literal
1434 pool in a routine that wouldn't otherwise need any.
1435 Heuristically, we assume that 64-bit leaf functions
1436 typically don't need a literal pool, all others do. */
1439 if (!legitimate_reload_constant_p (op))
1442 if (TARGET_64BIT && current_function_is_leaf)
1447 /* If a symbolic constant or a PLUS is reloaded,
1448 it is most likely being used as an address, so
1449 prefer ADDR_REGS. If 'class' is not a superset
1450 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1455 if (reg_class_subset_p (ADDR_REGS, class))
1467 /* Return the register class of a scratch register needed to
1468 load IN into a register of class CLASS in MODE.
1470 We need a temporary when loading a PLUS expression which
1471 is not a legitimate operand of the LOAD ADDRESS instruction. */
1474 s390_secondary_input_reload_class (class, mode, in)
1475 enum reg_class class ATTRIBUTE_UNUSED;
1476 enum machine_mode mode;
1479 if (s390_plus_operand (in, mode))
1485 /* Return true if OP is a PLUS that is not a legitimate
1486 operand for the LA instruction.
1487 OP is the current operation.
1488 MODE is the current operation mode. */
1491 s390_plus_operand (op, mode)
1493 enum machine_mode mode;
1495 if (!check_mode (op, &mode) || mode != Pmode)
1498 if (GET_CODE (op) != PLUS)
1501 if (legitimate_la_operand_p (op))
1507 /* Generate code to load SRC, which is PLUS that is not a
1508 legitimate operand for the LA instruction, into TARGET.
1509 SCRATCH may be used as scratch register. */
1512 s390_expand_plus_operand (target, src, scratch)
1513 register rtx target;
1515 register rtx scratch;
1518 struct s390_address ad;
1520 /* src must be a PLUS; get its two operands. */
1521 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1524 /* Check if any of the two operands is already scheduled
1525 for replacement by reload. This can happen e.g. when
1526 float registers occur in an address. */
1527 sum1 = find_replacement (&XEXP (src, 0));
1528 sum2 = find_replacement (&XEXP (src, 1));
1529 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1531 /* If the address is already strictly valid, there's nothing to do. */
1532 if (!s390_decompose_address (src, &ad)
1533 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1534 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1536 /* Otherwise, one of the operands cannot be an address register;
1537 we reload its value into the scratch register. */
1538 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1540 emit_move_insn (scratch, sum1);
1543 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1545 emit_move_insn (scratch, sum2);
1549 /* According to the way these invalid addresses are generated
1550 in reload.c, it should never happen (at least on s390) that
1551 *neither* of the PLUS components, after find_replacements
1552 was applied, is an address register. */
1553 if (sum1 == scratch && sum2 == scratch)
1559 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1562 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1563 is only ever performed on addresses, so we can mark the
1564 sum as legitimate for LA in any case. */
1565 s390_load_address (target, src);
1569 /* Decompose a RTL expression ADDR for a memory address into
1570 its components, returned in OUT.
1572 Returns 0 if ADDR is not a valid memory address, nonzero
1573 otherwise. If OUT is NULL, don't return the components,
1574 but check for validity only.
1576 Note: Only addresses in canonical form are recognized.
1577 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1578 canonical form so that they will be recognized. */
1581 s390_decompose_address (addr, out)
1583 struct s390_address *out;
1585 rtx base = NULL_RTX;
1586 rtx indx = NULL_RTX;
1587 rtx disp = NULL_RTX;
1588 int pointer = FALSE;
1590 /* Decompose address into base + index + displacement. */
1592 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1595 else if (GET_CODE (addr) == PLUS)
1597 rtx op0 = XEXP (addr, 0);
1598 rtx op1 = XEXP (addr, 1);
1599 enum rtx_code code0 = GET_CODE (op0);
1600 enum rtx_code code1 = GET_CODE (op1);
1602 if (code0 == REG || code0 == UNSPEC)
1604 if (code1 == REG || code1 == UNSPEC)
1606 indx = op0; /* index + base */
1612 base = op0; /* base + displacement */
1617 else if (code0 == PLUS)
1619 indx = XEXP (op0, 0); /* index + base + disp */
1620 base = XEXP (op0, 1);
1631 disp = addr; /* displacement */
1634 /* Validate base register. */
1637 if (GET_CODE (base) == UNSPEC)
1639 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1641 base = XVECEXP (base, 0, 0);
1645 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1648 if (REGNO (base) == BASE_REGISTER
1649 || REGNO (base) == STACK_POINTER_REGNUM
1650 || REGNO (base) == FRAME_POINTER_REGNUM
1651 || ((reload_completed || reload_in_progress)
1652 && frame_pointer_needed
1653 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1654 || REGNO (base) == ARG_POINTER_REGNUM
1655 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1656 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1658 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1662 /* Validate index register. */
1665 if (GET_CODE (indx) == UNSPEC)
1667 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1669 indx = XVECEXP (indx, 0, 0);
1673 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1676 if (REGNO (indx) == BASE_REGISTER
1677 || REGNO (indx) == STACK_POINTER_REGNUM
1678 || REGNO (indx) == FRAME_POINTER_REGNUM
1679 || ((reload_completed || reload_in_progress)
1680 && frame_pointer_needed
1681 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1682 || REGNO (indx) == ARG_POINTER_REGNUM
1683 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1684 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1686 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1690 /* Validate displacement. */
1693 /* Allow integer constant in range. */
1694 if (GET_CODE (disp) == CONST_INT)
1696 /* If the argument pointer is involved, the displacement will change
1697 later anyway as the argument pointer gets eliminated. This could
1698 make a valid displacement invalid, but it is more likely to make
1699 an invalid displacement valid, because we sometimes access the
1700 register save area via negative offsets to the arg pointer.
1701 Thus we don't check the displacement for validity here. If after
1702 elimination the displacement turns out to be invalid after all,
1703 this is fixed up by reload in any case. */
1704 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
1706 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1711 /* In the small-PIC case, the linker converts @GOT12
1712 offsets to possible displacements. */
1713 else if (GET_CODE (disp) == CONST
1714 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1715 && XINT (XEXP (disp, 0), 1) == 110)
1723 /* Accept chunkfied literal pool symbol references. */
1724 else if (GET_CODE (disp) == CONST
1725 && GET_CODE (XEXP (disp, 0)) == MINUS
1726 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1727 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1732 /* Likewise if a constant offset is present. */
1733 else if (GET_CODE (disp) == CONST
1734 && GET_CODE (XEXP (disp, 0)) == PLUS
1735 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1736 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1737 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1738 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1743 /* We can convert literal pool addresses to
1744 displacements by basing them off the base register. */
1747 /* In some cases, we can accept an additional
1748 small constant offset. Split these off here. */
1750 unsigned int offset = 0;
1752 if (GET_CODE (disp) == CONST
1753 && GET_CODE (XEXP (disp, 0)) == PLUS
1754 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
1756 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
1757 disp = XEXP (XEXP (disp, 0), 0);
1760 /* Now we must have a literal pool address. */
1761 if (GET_CODE (disp) != SYMBOL_REF
1762 || !CONSTANT_POOL_ADDRESS_P (disp))
1765 /* In 64-bit PIC mode we cannot accept symbolic
1766 constants in the constant pool. */
1767 if (TARGET_64BIT && flag_pic
1768 && SYMBOLIC_CONST (get_pool_constant (disp)))
1771 /* If we have an offset, make sure it does not
1772 exceed the size of the constant pool entry. */
1773 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
1776 /* Either base or index must be free to
1777 hold the base register. */
1781 /* Convert the address. */
1783 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
1785 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1787 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
1788 disp = gen_rtx_CONST (Pmode, disp);
1791 disp = plus_constant (disp, offset);
1805 out->pointer = pointer;
1811 /* Return nonzero if ADDR is a valid memory address.
1812 STRICT specifies whether strict register checking applies. */
1815 legitimate_address_p (mode, addr, strict)
1816 enum machine_mode mode ATTRIBUTE_UNUSED;
1820 struct s390_address ad;
1821 if (!s390_decompose_address (addr, &ad))
1826 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1828 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
1833 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
1835 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
1842 /* Return 1 if OP is a valid operand for the LA instruction.
1843 In 31-bit, we need to prove that the result is used as an
1844 address, as LA performs only a 31-bit addition. */
1847 legitimate_la_operand_p (op)
1850 struct s390_address addr;
1851 if (!s390_decompose_address (op, &addr))
1854 if (TARGET_64BIT || addr.pointer)
1860 /* Return 1 if OP is a valid operand for the LA instruction,
1861 and we prefer to use LA over addition to compute it.
1862 If STRICT is true, only accept operands that will never
1863 change to something we cannot recognize as preferred. */
1866 preferred_la_operand_p (op, strict)
1870 struct s390_address addr;
1871 if (!s390_decompose_address (op, &addr))
1874 if (!TARGET_64BIT && !addr.pointer)
1881 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
1882 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
1888 /* Emit a forced load-address operation to load SRC into DST.
1889 This will use the LOAD ADDRESS instruction even in situations
1890 where legitimate_la_operand_p (SRC) returns false. */
1893 s390_load_address (dst, src)
1898 emit_move_insn (dst, src);
1900 emit_insn (gen_force_la_31 (dst, src));
1903 /* Return a legitimate reference for ORIG (an address) using the
1904 register REG. If REG is 0, a new pseudo is generated.
1906 There are two types of references that must be handled:
1908 1. Global data references must load the address from the GOT, via
1909 the PIC reg. An insn is emitted to do this load, and the reg is
1912 2. Static data references, constant pool addresses, and code labels
1913 compute the address as an offset from the GOT, whose base is in
1914 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
1915 differentiate them from global data objects. The returned
1916 address is the PIC reg + an unspec constant.
1918 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
1919 reg also appears in the address. */
1922 legitimize_pic_address (orig, reg)
1930 if (GET_CODE (addr) == LABEL_REF
1931 || (GET_CODE (addr) == SYMBOL_REF
1932 && (SYMBOL_REF_FLAG (addr)
1933 || CONSTANT_POOL_ADDRESS_P (addr))))
1935 /* This is a local symbol. */
1938 /* Access local symbols PC-relative via LARL.
1939 This is the same as in the non-PIC case, so it is
1940 handled automatically ... */
1944 /* Access local symbols relative to the literal pool. */
1946 rtx temp = reg? reg : gen_reg_rtx (Pmode);
1948 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
1949 addr = gen_rtx_CONST (SImode, addr);
1950 addr = force_const_mem (SImode, addr);
1951 emit_move_insn (temp, addr);
1953 base = gen_rtx_REG (Pmode, BASE_REGISTER);
1954 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
1955 new = gen_rtx_PLUS (Pmode, base, temp);
1959 emit_move_insn (reg, new);
1964 else if (GET_CODE (addr) == SYMBOL_REF)
1967 reg = gen_reg_rtx (Pmode);
1971 /* Assume GOT offset < 4k. This is handled the same way
1972 in both 31- and 64-bit code (@GOT12). */
1974 if (reload_in_progress || reload_completed)
1975 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
1977 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
1978 new = gen_rtx_CONST (Pmode, new);
1979 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
1980 new = gen_rtx_MEM (Pmode, new);
1981 RTX_UNCHANGING_P (new) = 1;
1982 emit_move_insn (reg, new);
1985 else if (TARGET_64BIT)
1987 /* If the GOT offset might be >= 4k, we determine the position
1988 of the GOT entry via a PC-relative LARL (@GOTENT). */
1990 rtx temp = gen_reg_rtx (Pmode);
1992 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
1993 new = gen_rtx_CONST (Pmode, new);
1994 emit_move_insn (temp, new);
1996 new = gen_rtx_MEM (Pmode, temp);
1997 RTX_UNCHANGING_P (new) = 1;
1998 emit_move_insn (reg, new);
2003 /* If the GOT offset might be >= 4k, we have to load it
2004 from the literal pool (@GOT). */
2006 rtx temp = gen_reg_rtx (Pmode);
2008 if (reload_in_progress || reload_completed)
2009 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2011 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
2012 addr = gen_rtx_CONST (SImode, addr);
2013 addr = force_const_mem (SImode, addr);
2014 emit_move_insn (temp, addr);
2016 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2017 new = gen_rtx_MEM (Pmode, new);
2018 RTX_UNCHANGING_P (new) = 1;
2019 emit_move_insn (reg, new);
2025 if (GET_CODE (addr) == CONST)
2027 addr = XEXP (addr, 0);
2028 if (GET_CODE (addr) == UNSPEC)
2030 if (XVECLEN (addr, 0) != 1)
2032 switch (XINT (addr, 1))
2034 /* If someone moved an @GOT or lt-relative UNSPEC
2035 out of the literal pool, force them back in. */
2039 new = force_const_mem (SImode, orig);
2042 /* @GOTENT is OK as is. */
2046 /* @PLT is OK as is on 64-bit, must be converted to
2047 lt-relative PLT on 31-bit. */
2051 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2053 addr = XVECEXP (addr, 0, 0);
2054 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
2055 addr = gen_rtx_CONST (SImode, addr);
2056 addr = force_const_mem (SImode, addr);
2057 emit_move_insn (temp, addr);
2059 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2060 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2061 new = gen_rtx_PLUS (Pmode, base, temp);
2065 emit_move_insn (reg, new);
2071 /* Everything else cannot happen. */
2076 else if (GET_CODE (addr) != PLUS)
2079 if (GET_CODE (addr) == PLUS)
2081 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2082 /* Check first to see if this is a constant offset
2083 from a local symbol reference. */
2084 if ((GET_CODE (op0) == LABEL_REF
2085 || (GET_CODE (op0) == SYMBOL_REF
2086 && (SYMBOL_REF_FLAG (op0)
2087 || CONSTANT_POOL_ADDRESS_P (op0))))
2088 && GET_CODE (op1) == CONST_INT)
2092 if (INTVAL (op1) & 1)
2094 /* LARL can't handle odd offsets, so emit a
2095 pair of LARL and LA. */
2096 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2098 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2100 int even = INTVAL (op1) - 1;
2101 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2102 op0 = gen_rtx_CONST (Pmode, op0);
2106 emit_move_insn (temp, op0);
2107 new = gen_rtx_PLUS (Pmode, temp, op1);
2111 emit_move_insn (reg, new);
2117 /* If the offset is even, we can just use LARL.
2118 This will happen automatically. */
2123 /* Access local symbols relative to the literal pool. */
2125 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2127 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
2128 addr = gen_rtx_PLUS (SImode, addr, op1);
2129 addr = gen_rtx_CONST (SImode, addr);
2130 addr = force_const_mem (SImode, addr);
2131 emit_move_insn (temp, addr);
2133 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2134 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2135 new = gen_rtx_PLUS (Pmode, base, temp);
2139 emit_move_insn (reg, new);
2145 /* Now, check whether it is an LT-relative symbol plus offset
2146 that was pulled out of the literal pool. Force it back in. */
2148 else if (GET_CODE (op0) == UNSPEC
2149 && GET_CODE (op1) == CONST_INT)
2151 if (XVECLEN (op0, 0) != 1)
2153 if (XINT (op0, 1) != 100)
2156 new = force_const_mem (SImode, orig);
2159 /* Otherwise, compute the sum. */
2162 base = legitimize_pic_address (XEXP (addr, 0), reg);
2163 new = legitimize_pic_address (XEXP (addr, 1),
2164 base == reg ? NULL_RTX : reg);
2165 if (GET_CODE (new) == CONST_INT)
2166 new = plus_constant (base, INTVAL (new));
2169 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2171 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2172 new = XEXP (new, 1);
2174 new = gen_rtx_PLUS (Pmode, base, new);
2177 if (GET_CODE (new) == CONST)
2178 new = XEXP (new, 0);
2179 new = force_operand (new, 0);
2186 /* Emit insns to move operands[1] into operands[0]. */
2189 emit_pic_move (operands, mode)
2191 enum machine_mode mode ATTRIBUTE_UNUSED;
2193 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2195 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2196 operands[1] = force_reg (Pmode, operands[1]);
2198 operands[1] = legitimize_pic_address (operands[1], temp);
2201 /* Try machine-dependent ways of modifying an illegitimate address X
2202 to be legitimate. If we find one, return the new, valid address.
2204 OLDX is the address as it was before break_out_memory_refs was called.
2205 In some cases it is useful to look at this to decide what needs to be done.
2207 MODE is the mode of the operand pointed to by X.
2209 When -fpic is used, special handling is needed for symbolic references.
2210 See comments by legitimize_pic_address for details. */
2213 legitimize_address (x, oldx, mode)
2215 register rtx oldx ATTRIBUTE_UNUSED;
2216 enum machine_mode mode ATTRIBUTE_UNUSED;
2218 rtx constant_term = const0_rtx;
2222 if (SYMBOLIC_CONST (x)
2223 || (GET_CODE (x) == PLUS
2224 && (SYMBOLIC_CONST (XEXP (x, 0))
2225 || SYMBOLIC_CONST (XEXP (x, 1)))))
2226 x = legitimize_pic_address (x, 0);
2228 if (legitimate_address_p (mode, x, FALSE))
2232 x = eliminate_constant_term (x, &constant_term);
2234 /* Optimize loading of large displacements by splitting them
2235 into the multiple of 4K and the rest; this allows the
2236 former to be CSE'd if possible.
2238 Don't do this if the displacement is added to a register
2239 pointing into the stack frame, as the offsets will
2240 change later anyway. */
2242 if (GET_CODE (constant_term) == CONST_INT
2243 && (INTVAL (constant_term) < 0
2244 || INTVAL (constant_term) >= 4096)
2245 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2247 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2248 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2250 rtx temp = gen_reg_rtx (Pmode);
2251 rtx val = force_operand (GEN_INT (upper), temp);
2253 emit_move_insn (temp, val);
2255 x = gen_rtx_PLUS (Pmode, x, temp);
2256 constant_term = GEN_INT (lower);
2259 if (GET_CODE (x) == PLUS)
2261 if (GET_CODE (XEXP (x, 0)) == REG)
2263 register rtx temp = gen_reg_rtx (Pmode);
2264 register rtx val = force_operand (XEXP (x, 1), temp);
2266 emit_move_insn (temp, val);
2268 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2271 else if (GET_CODE (XEXP (x, 1)) == REG)
2273 register rtx temp = gen_reg_rtx (Pmode);
2274 register rtx val = force_operand (XEXP (x, 0), temp);
2276 emit_move_insn (temp, val);
2278 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2282 if (constant_term != const0_rtx)
2283 x = gen_rtx_PLUS (Pmode, x, constant_term);
2288 /* Emit code to move LEN bytes from DST to SRC. */
2291 s390_expand_movstr (dst, src, len)
2296 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2297 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2298 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2299 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2302 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2304 if (INTVAL (len) > 0)
2305 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2308 else if (TARGET_MVCLE)
2310 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2311 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2312 rtx reg0 = gen_reg_rtx (double_mode);
2313 rtx reg1 = gen_reg_rtx (double_mode);
2315 emit_move_insn (gen_highpart (single_mode, reg0),
2316 force_operand (XEXP (dst, 0), NULL_RTX));
2317 emit_move_insn (gen_highpart (single_mode, reg1),
2318 force_operand (XEXP (src, 0), NULL_RTX));
2320 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2321 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2323 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2328 rtx dst_addr, src_addr, count, blocks, temp;
2329 rtx end_label = gen_label_rtx ();
2330 enum machine_mode mode;
2333 mode = GET_MODE (len);
2334 if (mode == VOIDmode)
2337 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2341 dst_addr = gen_reg_rtx (Pmode);
2342 src_addr = gen_reg_rtx (Pmode);
2343 count = gen_reg_rtx (mode);
2344 blocks = gen_reg_rtx (mode);
2346 convert_move (count, len, 1);
2347 emit_cmp_and_jump_insns (count, const0_rtx,
2348 EQ, NULL_RTX, mode, 1, end_label);
2350 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2351 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2352 dst = change_address (dst, VOIDmode, dst_addr);
2353 src = change_address (src, VOIDmode, src_addr);
2355 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2357 emit_move_insn (count, temp);
2359 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2361 emit_move_insn (blocks, temp);
2363 expand_start_loop (1);
2364 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2365 make_tree (type, blocks),
2366 make_tree (type, const0_rtx)));
2368 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2369 s390_load_address (dst_addr,
2370 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2371 s390_load_address (src_addr,
2372 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2374 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2376 emit_move_insn (blocks, temp);
2380 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2381 emit_label (end_label);
2385 /* Emit code to clear LEN bytes at DST. */
2388 s390_expand_clrstr (dst, len)
2392 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2393 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2394 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2395 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2398 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2400 if (INTVAL (len) > 0)
2401 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2404 else if (TARGET_MVCLE)
2406 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2407 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2408 rtx reg0 = gen_reg_rtx (double_mode);
2409 rtx reg1 = gen_reg_rtx (double_mode);
2411 emit_move_insn (gen_highpart (single_mode, reg0),
2412 force_operand (XEXP (dst, 0), NULL_RTX));
2413 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2415 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2416 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2418 emit_insn ((*gen_long) (reg0, reg1, reg0));
2423 rtx dst_addr, src_addr, count, blocks, temp;
2424 rtx end_label = gen_label_rtx ();
2425 enum machine_mode mode;
2428 mode = GET_MODE (len);
2429 if (mode == VOIDmode)
2432 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2436 dst_addr = gen_reg_rtx (Pmode);
2437 src_addr = gen_reg_rtx (Pmode);
2438 count = gen_reg_rtx (mode);
2439 blocks = gen_reg_rtx (mode);
2441 convert_move (count, len, 1);
2442 emit_cmp_and_jump_insns (count, const0_rtx,
2443 EQ, NULL_RTX, mode, 1, end_label);
2445 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2446 dst = change_address (dst, VOIDmode, dst_addr);
2448 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2450 emit_move_insn (count, temp);
2452 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2454 emit_move_insn (blocks, temp);
2456 expand_start_loop (1);
2457 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2458 make_tree (type, blocks),
2459 make_tree (type, const0_rtx)));
2461 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2462 s390_load_address (dst_addr,
2463 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2465 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2467 emit_move_insn (blocks, temp);
2471 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2472 emit_label (end_label);
2476 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2477 and return the result in TARGET. */
2480 s390_expand_cmpstr (target, op0, op1, len)
2486 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2487 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2488 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2489 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2490 rtx (*gen_result) PARAMS ((rtx)) =
2491 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2493 op0 = protect_from_queue (op0, 0);
2494 op1 = protect_from_queue (op1, 0);
2495 len = protect_from_queue (len, 0);
2497 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2499 if (INTVAL (len) > 0)
2501 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2502 emit_insn ((*gen_result) (target));
2505 emit_move_insn (target, const0_rtx);
2508 else if (TARGET_MVCLE)
2510 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2511 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2512 rtx reg0 = gen_reg_rtx (double_mode);
2513 rtx reg1 = gen_reg_rtx (double_mode);
2515 emit_move_insn (gen_highpart (single_mode, reg0),
2516 force_operand (XEXP (op0, 0), NULL_RTX));
2517 emit_move_insn (gen_highpart (single_mode, reg1),
2518 force_operand (XEXP (op1, 0), NULL_RTX));
2520 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2521 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2523 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2524 emit_insn ((*gen_result) (target));
2529 rtx addr0, addr1, count, blocks, temp;
2530 rtx end_label = gen_label_rtx ();
2531 enum machine_mode mode;
2534 mode = GET_MODE (len);
2535 if (mode == VOIDmode)
2538 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2542 addr0 = gen_reg_rtx (Pmode);
2543 addr1 = gen_reg_rtx (Pmode);
2544 count = gen_reg_rtx (mode);
2545 blocks = gen_reg_rtx (mode);
2547 convert_move (count, len, 1);
2548 emit_cmp_and_jump_insns (count, const0_rtx,
2549 EQ, NULL_RTX, mode, 1, end_label);
2551 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
2552 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
2553 op0 = change_address (op0, VOIDmode, addr0);
2554 op1 = change_address (op1, VOIDmode, addr1);
2556 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2558 emit_move_insn (count, temp);
2560 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2562 emit_move_insn (blocks, temp);
2564 expand_start_loop (1);
2565 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2566 make_tree (type, blocks),
2567 make_tree (type, const0_rtx)));
2569 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
2570 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
2571 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
2572 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
2573 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
2574 emit_jump_insn (temp);
2576 s390_load_address (addr0,
2577 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
2578 s390_load_address (addr1,
2579 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
2581 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2583 emit_move_insn (blocks, temp);
2587 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
2588 emit_label (end_label);
2590 emit_insn ((*gen_result) (target));
2594 /* In the name of slightly smaller debug output, and to cater to
2595 general assembler losage, recognize various UNSPEC sequences
2596 and turn them back into a direct symbol reference. */
2599 s390_simplify_dwarf_addr (orig_x)
2604 if (GET_CODE (x) != MEM)
2608 if (GET_CODE (x) == PLUS
2609 && GET_CODE (XEXP (x, 1)) == CONST
2610 && GET_CODE (XEXP (x, 0)) == REG
2611 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
2613 y = XEXP (XEXP (x, 1), 0);
2614 if (GET_CODE (y) == UNSPEC
2615 && XINT (y, 1) == 110)
2616 return XVECEXP (y, 0, 0);
2620 if (GET_CODE (x) == CONST)
2623 if (GET_CODE (y) == UNSPEC
2624 && XINT (y, 1) == 111)
2625 return XVECEXP (y, 0, 0);
2632 /* Output symbolic constant X in assembler syntax to
2633 stdio stream FILE. */
2636 s390_output_symbolic_const (file, x)
2640 switch (GET_CODE (x))
2645 s390_output_symbolic_const (file, XEXP (x, 0));
2649 s390_output_symbolic_const (file, XEXP (x, 0));
2650 fprintf (file, "+");
2651 s390_output_symbolic_const (file, XEXP (x, 1));
2655 s390_output_symbolic_const (file, XEXP (x, 0));
2656 fprintf (file, "-");
2657 s390_output_symbolic_const (file, XEXP (x, 1));
2664 output_addr_const (file, x);
2668 if (XVECLEN (x, 0) != 1)
2669 output_operand_lossage ("invalid UNSPEC as operand (1)");
2670 switch (XINT (x, 1))
2674 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2675 fprintf (file, "-");
2676 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
2679 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
2680 fprintf (file, "-");
2681 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2684 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2685 fprintf (file, "@GOT12");
2688 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2689 fprintf (file, "@GOTENT");
2692 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2693 fprintf (file, "@GOT");
2696 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2697 fprintf (file, "@PLT");
2700 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
2701 fprintf (file, "@PLT-");
2702 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
2705 output_operand_lossage ("invalid UNSPEC as operand (2)");
2711 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
2716 /* Output address operand ADDR in assembler syntax to
2717 stdio stream FILE. */
2720 print_operand_address (file, addr)
2724 struct s390_address ad;
2726 if (!s390_decompose_address (addr, &ad)
2727 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2728 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
2729 output_operand_lossage ("Cannot decompose address.");
2732 s390_output_symbolic_const (file, ad.disp);
2734 fprintf (file, "0");
2736 if (ad.base && ad.indx)
2737 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
2738 reg_names[REGNO (ad.base)]);
2740 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
2743 /* Output operand X in assembler syntax to stdio stream FILE.
2744 CODE specified the format flag. The following format flags
2747 'C': print opcode suffix for branch condition.
2748 'D': print opcode suffix for inverse branch condition.
2749 'O': print only the displacement of a memory reference.
2750 'R': print only the base register of a memory reference.
2751 'N': print the second word of a DImode operand.
2752 'M': print the second word of a TImode operand.
2754 'b': print integer X as if it's an unsigned byte.
2755 'x': print integer X as if it's an unsigned word.
2756 'h': print integer X as if it's a signed word. */
2759 print_operand (file, x, code)
2767 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
2771 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
2776 struct s390_address ad;
2778 if (GET_CODE (x) != MEM
2779 || !s390_decompose_address (XEXP (x, 0), &ad)
2780 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2785 s390_output_symbolic_const (file, ad.disp);
2787 fprintf (file, "0");
2793 struct s390_address ad;
2795 if (GET_CODE (x) != MEM
2796 || !s390_decompose_address (XEXP (x, 0), &ad)
2797 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2802 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
2804 fprintf (file, "0");
2809 if (GET_CODE (x) == REG)
2810 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2811 else if (GET_CODE (x) == MEM)
2812 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
2818 if (GET_CODE (x) == REG)
2819 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
2820 else if (GET_CODE (x) == MEM)
2821 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
2827 switch (GET_CODE (x))
2830 fprintf (file, "%s", reg_names[REGNO (x)]);
2834 output_address (XEXP (x, 0));
2841 s390_output_symbolic_const (file, x);
2846 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
2847 else if (code == 'x')
2848 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
2849 else if (code == 'h')
2850 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
2852 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2856 if (GET_MODE (x) != VOIDmode)
2859 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
2860 else if (code == 'x')
2861 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
2862 else if (code == 'h')
2863 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
2869 fatal_insn ("UNKNOWN in print_operand !?", x);
2874 /* Target hook for assembling integer objects. We need to define it
2875 here to work a round a bug in some versions of GAS, which couldn't
2876 handle values smaller than INT_MIN when printed in decimal. */
2879 s390_assemble_integer (x, size, aligned_p)
2884 if (size == 8 && aligned_p
2885 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
2887 fputs ("\t.quad\t", asm_out_file);
2888 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2889 putc ('\n', asm_out_file);
2892 return default_assemble_integer (x, size, aligned_p);
2896 #define DEBUG_SCHED 0
2898 /* Returns true if register REGNO is used for forming
2899 a memory address in expression X. */
2902 reg_used_in_mem_p (regno, x)
2906 enum rtx_code code = GET_CODE (x);
2912 if (refers_to_regno_p (regno, regno+1,
2916 else if (code == SET
2917 && GET_CODE (SET_DEST (x)) == PC)
2919 if (refers_to_regno_p (regno, regno+1,
2924 fmt = GET_RTX_FORMAT (code);
2925 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2928 && reg_used_in_mem_p (regno, XEXP (x, i)))
2931 else if (fmt[i] == 'E')
2932 for (j = 0; j < XVECLEN (x, i); j++)
2933 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
2939 /* Returns true if expression DEP_RTX sets an address register
2940 used by instruction INSN to address memory. */
2943 addr_generation_dependency_p (dep_rtx, insn)
2949 if (GET_CODE (dep_rtx) == SET)
2951 target = SET_DEST (dep_rtx);
2952 if (GET_CODE (target) == STRICT_LOW_PART)
2953 target = XEXP (target, 0);
2954 while (GET_CODE (target) == SUBREG)
2955 target = SUBREG_REG (target);
2957 if (GET_CODE (target) == REG)
2959 int regno = REGNO (target);
2961 if (get_attr_type (insn) == TYPE_LA)
2963 pat = PATTERN (insn);
2964 if (GET_CODE (pat) == PARALLEL)
2966 if (XVECLEN (pat, 0) != 2)
2968 pat = XVECEXP (pat, 0, 0);
2970 if (GET_CODE (pat) == SET)
2971 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
2975 else if (get_attr_atype (insn) == ATYPE_MEM)
2976 return reg_used_in_mem_p (regno, PATTERN (insn));
2983 /* Return the modified cost of the dependency of instruction INSN
2984 on instruction DEP_INSN through the link LINK. COST is the
2985 default cost of that dependency.
2987 Data dependencies are all handled without delay. However, if a
2988 register is modified and subsequently used as base or index
2989 register of a memory reference, at least 4 cycles need to pass
2990 between setting and using the register to avoid pipeline stalls.
2991 An exception is the LA instruction. An address generated by LA can
2992 be used by introducing only a one cycle stall on the pipeline. */
2995 s390_adjust_cost (insn, link, dep_insn, cost)
3004 /* If the dependence is an anti-dependence, there is no cost. For an
3005 output dependence, there is sometimes a cost, but it doesn't seem
3006 worth handling those few cases. */
3008 if (REG_NOTE_KIND (link) != 0)
3011 /* If we can't recognize the insns, we can't really do anything. */
3012 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3015 dep_rtx = PATTERN (dep_insn);
3017 if (GET_CODE (dep_rtx) == SET)
3019 if (addr_generation_dependency_p (dep_rtx, insn))
3021 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3024 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n",
3026 debug_rtx (dep_insn);
3031 else if (GET_CODE (dep_rtx) == PARALLEL)
3033 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3035 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i),
3038 cost += (get_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3041 fprintf (stderr, "\n\nAddress dependency detected: cost %d\n"
3043 debug_rtx (dep_insn);
3054 /* A C statement (sans semicolon) to update the integer scheduling priority
3055 INSN_PRIORITY (INSN). Reduce the priority to execute the INSN earlier,
3056 increase the priority to execute INSN later. Do not define this macro if
3057 you do not need to adjust the scheduling priorities of insns.
3059 A LA instruction maybe scheduled later, since the pipeline bypasses the
3060 calculated value. */
3063 s390_adjust_priority (insn, priority)
3064 rtx insn ATTRIBUTE_UNUSED;
3067 if (! INSN_P (insn))
3070 if (GET_CODE (PATTERN (insn)) == USE
3071 || GET_CODE (PATTERN (insn)) == CLOBBER)
3074 switch (get_attr_type (insn))
3080 if (priority >= 0 && priority < 0x01000000)
3084 /* LM in epilogue should never be scheduled. This
3085 is due to literal access done in function body.
3086 The usage of register 13 is not mentioned explicitly,
3087 leading to scheduling 'LM' accross this instructions.
3089 priority = 0x7fffffff;
3097 /* Split all branches that exceed the maximum distance.
3098 Returns true if this created a new literal pool entry.
3100 Code generated by this routine is allowed to use
3101 TEMP_REG as temporary scratch register. If this is
3102 done, TEMP_USED is set to true. */
3105 s390_split_branches (temp_reg, temp_used)
3109 int new_literal = 0;
3110 rtx insn, pat, tmp, target;
3113 /* We need correct insn addresses. */
3115 shorten_branches (get_insns ());
3117 /* Find all branches that exceed 64KB, and split them. */
3119 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3121 if (GET_CODE (insn) != JUMP_INSN)
3124 pat = PATTERN (insn);
3125 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3126 pat = XVECEXP (pat, 0, 0);
3127 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3130 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3132 label = &SET_SRC (pat);
3134 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3136 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3137 label = &XEXP (SET_SRC (pat), 1);
3138 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3139 label = &XEXP (SET_SRC (pat), 2);
3146 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3153 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3154 INSN_ADDRESSES_NEW (tmp, -1);
3161 tmp = force_const_mem (Pmode, *label);
3162 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3163 INSN_ADDRESSES_NEW (tmp, -1);
3170 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3171 tmp = gen_rtx_CONST (SImode, tmp);
3172 tmp = force_const_mem (SImode, tmp);
3173 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3174 INSN_ADDRESSES_NEW (tmp, -1);
3176 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3177 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3180 if (!validate_change (insn, label, target, 0))
3188 /* Find a literal pool symbol referenced in RTX X, and store
3189 it at REF. Will abort if X contains references to more than
3190 one such pool symbol; multiple references to the same symbol
3191 are allowed, however.
3193 The rtx pointed to by REF must be initialized to NULL_RTX
3194 by the caller before calling this routine. */
3197 find_constant_pool_ref (x, ref)
3204 if (GET_CODE (x) == SYMBOL_REF
3205 && CONSTANT_POOL_ADDRESS_P (x))
3207 if (*ref == NULL_RTX)
3213 fmt = GET_RTX_FORMAT (GET_CODE (x));
3214 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3218 find_constant_pool_ref (XEXP (x, i), ref);
3220 else if (fmt[i] == 'E')
3222 for (j = 0; j < XVECLEN (x, i); j++)
3223 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3228 /* Replace every reference to the literal pool symbol REF
3229 in X by the address ADDR. Fix up MEMs as required. */
3232 replace_constant_pool_ref (x, ref, addr)
3243 /* Literal pool references can only occur inside a MEM ... */
3244 if (GET_CODE (*x) == MEM)
3246 rtx memref = XEXP (*x, 0);
3250 *x = replace_equiv_address (*x, addr);
3254 if (GET_CODE (memref) == CONST
3255 && GET_CODE (XEXP (memref, 0)) == PLUS
3256 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3257 && XEXP (XEXP (memref, 0), 0) == ref)
3259 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3260 *x = replace_equiv_address (*x, plus_constant (addr, off));
3265 /* ... or a load-address type pattern. */
3266 if (GET_CODE (*x) == SET)
3268 rtx addrref = SET_SRC (*x);
3272 SET_SRC (*x) = addr;
3276 if (GET_CODE (addrref) == CONST
3277 && GET_CODE (XEXP (addrref, 0)) == PLUS
3278 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3279 && XEXP (XEXP (addrref, 0), 0) == ref)
3281 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3282 SET_SRC (*x) = plus_constant (addr, off);
3287 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3288 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3292 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3294 else if (fmt[i] == 'E')
3296 for (j = 0; j < XVECLEN (*x, i); j++)
3297 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3302 /* Check whether ADDR is an address that uses the base register,
3303 without actually constituting a literal pool access. (This happens
3304 in 31-bit PIC mode, where the base register is used as anchor for
3305 relative addressing of local symbols.)
3307 Returns 1 if the base register occupies the base slot,
3308 returns 2 if the base register occupies the index slot,
3309 returns 0 if the address is not of this form. */
3312 find_base_register_in_addr (addr)
3313 struct s390_address *addr;
3315 /* If DISP is complex, we might have a literal pool reference. */
3316 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3319 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3322 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3328 /* Return true if X contains an address that uses the base register,
3329 without actually constituting a literal pool access. */
3332 find_base_register_ref (x)
3336 struct s390_address addr;
3340 /* Addresses can only occur inside a MEM ... */
3341 if (GET_CODE (x) == MEM)
3343 if (s390_decompose_address (XEXP (x, 0), &addr)
3344 && find_base_register_in_addr (&addr))
3348 /* ... or a load-address type pattern. */
3349 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3351 if (s390_decompose_address (SET_SRC (x), &addr)
3352 && find_base_register_in_addr (&addr))
3356 fmt = GET_RTX_FORMAT (GET_CODE (x));
3357 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3361 retv |= find_base_register_ref (XEXP (x, i));
3363 else if (fmt[i] == 'E')
3365 for (j = 0; j < XVECLEN (x, i); j++)
3366 retv |= find_base_register_ref (XVECEXP (x, i, j));
3373 /* If X contains an address that uses the base register,
3374 without actually constituting a literal pool access,
3375 replace the base register with REPL in all such cases.
3377 Handles both MEMs and load address patterns. */
3380 replace_base_register_ref (x, repl)
3384 struct s390_address addr;
3389 /* Addresses can only occur inside a MEM ... */
3390 if (GET_CODE (*x) == MEM)
3392 if (s390_decompose_address (XEXP (*x, 0), &addr)
3393 && (pos = find_base_register_in_addr (&addr)))
3400 new_addr = addr.base;
3402 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3404 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3406 *x = replace_equiv_address (*x, new_addr);
3411 /* ... or a load-address type pattern. */
3412 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3414 if (s390_decompose_address (SET_SRC (*x), &addr)
3415 && (pos = find_base_register_in_addr (&addr)))
3422 new_addr = addr.base;
3424 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3426 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3428 SET_SRC (*x) = new_addr;
3433 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3434 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3438 replace_base_register_ref (&XEXP (*x, i), repl);
3440 else if (fmt[i] == 'E')
3442 for (j = 0; j < XVECLEN (*x, i); j++)
3443 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
3449 /* We keep a list of constants we which we have to add to internal
3450 constant tables in the middle of large functions. */
3452 #define NR_C_MODES 6
3453 enum machine_mode constant_modes[NR_C_MODES] =
3461 rtx (*gen_consttable[NR_C_MODES])(rtx) =
3463 gen_consttable_df, gen_consttable_di,
3464 gen_consttable_sf, gen_consttable_si,
3471 struct constant *next;
3476 struct constant_pool
3478 struct constant_pool *next;
3483 struct constant *constants[NR_C_MODES];
3489 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
3490 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
3491 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
3493 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
3494 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
3495 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
3496 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
3497 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3498 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
3499 static void s390_add_anchor PARAMS ((struct constant_pool *));
3500 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
3501 static void s390_free_pool PARAMS ((struct constant_pool *));
3503 /* Create new constant pool covering instructions starting at INSN
3504 and chain it to the end of POOL_LIST. */
3506 static struct constant_pool *
3507 s390_start_pool (pool_list, insn)
3508 struct constant_pool **pool_list;
3511 struct constant_pool *pool, **prev;
3514 pool = (struct constant_pool *) xmalloc (sizeof *pool);
3516 for (i = 0; i < NR_C_MODES; i++)
3517 pool->constants[i] = NULL;
3519 pool->label = gen_label_rtx ();
3520 pool->first_insn = insn;
3521 pool->pool_insn = NULL_RTX;
3522 pool->insns = BITMAP_XMALLOC ();
3524 pool->anchor = FALSE;
3526 for (prev = pool_list; *prev; prev = &(*prev)->next)
3533 /* End range of instructions covered by POOL at INSN and emit
3534 placeholder insn representing the pool. */
3537 s390_end_pool (pool, insn)
3538 struct constant_pool *pool;
3541 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
3544 insn = get_last_insn ();
3546 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
3547 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
3550 /* Add INSN to the list of insns covered by POOL. */
3553 s390_add_pool_insn (pool, insn)
3554 struct constant_pool *pool;
3557 bitmap_set_bit (pool->insns, INSN_UID (insn));
3560 /* Return pool out of POOL_LIST that covers INSN. */
3562 static struct constant_pool *
3563 s390_find_pool (pool_list, insn)
3564 struct constant_pool *pool_list;
3567 struct constant_pool *pool;
3569 for (pool = pool_list; pool; pool = pool->next)
3570 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
3576 /* Add constant VAL of mode MODE to the constant pool POOL. */
3579 s390_add_constant (pool, val, mode)
3580 struct constant_pool *pool;
3582 enum machine_mode mode;
3587 for (i = 0; i < NR_C_MODES; i++)
3588 if (constant_modes[i] == mode)
3590 if (i == NR_C_MODES)
3593 for (c = pool->constants[i]; c != NULL; c = c->next)
3594 if (rtx_equal_p (val, c->value))
3599 c = (struct constant *) xmalloc (sizeof *c);
3601 c->label = gen_label_rtx ();
3602 c->next = pool->constants[i];
3603 pool->constants[i] = c;
3604 pool->size += GET_MODE_SIZE (mode);
3608 /* Find constant VAL of mode MODE in the constant pool POOL.
3609 Return an RTX describing the distance from the start of
3610 the pool to the location of the new constant. */
3613 s390_find_constant (pool, val, mode)
3614 struct constant_pool *pool;
3616 enum machine_mode mode;
3622 for (i = 0; i < NR_C_MODES; i++)
3623 if (constant_modes[i] == mode)
3625 if (i == NR_C_MODES)
3628 for (c = pool->constants[i]; c != NULL; c = c->next)
3629 if (rtx_equal_p (val, c->value))
3635 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
3636 gen_rtx_LABEL_REF (Pmode, pool->label));
3637 offset = gen_rtx_CONST (Pmode, offset);
3641 /* Set 'anchor' flag in POOL. */
3644 s390_add_anchor (pool)
3645 struct constant_pool *pool;
3649 pool->anchor = TRUE;
3654 /* Dump out the constants in POOL. */
3657 s390_dump_pool (pool)
3658 struct constant_pool *pool;
3664 /* Pool start insn switches to proper section
3665 and guarantees necessary alignment. */
3667 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
3669 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
3670 INSN_ADDRESSES_NEW (insn, -1);
3672 insn = emit_label_after (pool->label, insn);
3673 INSN_ADDRESSES_NEW (insn, -1);
3675 /* Emit anchor if we need one. */
3678 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
3679 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
3680 anchor = gen_rtx_CONST (VOIDmode, anchor);
3681 insn = emit_insn_after (gen_consttable_si (anchor), insn);
3682 INSN_ADDRESSES_NEW (insn, -1);
3685 /* Dump constants in descending alignment requirement order,
3686 ensuring proper alignment for every constant. */
3687 for (i = 0; i < NR_C_MODES; i++)
3688 for (c = pool->constants[i]; c; c = c->next)
3690 /* Convert 104 unspecs to pool-relative references. */
3691 rtx value = c->value;
3692 if (GET_CODE (value) == CONST
3693 && GET_CODE (XEXP (value, 0)) == UNSPEC
3694 && XINT (XEXP (value, 0), 1) == 104
3695 && XVECLEN (XEXP (value, 0), 0) == 1)
3697 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
3698 gen_rtx_LABEL_REF (VOIDmode, pool->label));
3699 value = gen_rtx_CONST (VOIDmode, value);
3702 insn = emit_label_after (c->label, insn);
3703 INSN_ADDRESSES_NEW (insn, -1);
3704 insn = emit_insn_after (gen_consttable[i] (value), insn);
3705 INSN_ADDRESSES_NEW (insn, -1);
3708 /* Pool end insn switches back to previous section
3709 and guarantees necessary alignment. */
3711 insn = emit_insn_after (gen_pool_end_64 (), insn);
3713 insn = emit_insn_after (gen_pool_end_31 (), insn);
3714 INSN_ADDRESSES_NEW (insn, -1);
3716 insn = emit_barrier_after (insn);
3717 INSN_ADDRESSES_NEW (insn, -1);
3719 /* Remove placeholder insn. */
3720 remove_insn (pool->pool_insn);
3725 /* Free all memory used by POOL. */
3728 s390_free_pool (pool)
3729 struct constant_pool *pool;
3733 for (i = 0; i < NR_C_MODES; i++)
3735 struct constant *c = pool->constants[i];
3738 struct constant *next = c->next;
3744 BITMAP_XFREE (pool->insns);
3749 /* Chunkify the literal pool if required.
3751 Code generated by this routine is allowed to use
3752 TEMP_REG as temporary scratch register. If this is
3753 done, TEMP_USED is set to true. */
3755 #define S390_POOL_CHUNK_MIN 0xc00
3756 #define S390_POOL_CHUNK_MAX 0xe00
3758 static struct constant_pool *
3759 s390_chunkify_start (temp_reg, temp_used)
3763 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
3765 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
3770 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
3771 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
3774 /* Do we need to chunkify the literal pool? */
3776 if (get_pool_size () < S390_POOL_CHUNK_MAX)
3779 /* We need correct insn addresses. */
3781 shorten_branches (get_insns ());
3783 /* Scan all insns and move literals to pool chunks.
3784 Also, emit anchor reload insns before every insn that uses
3785 the literal pool base register as anchor pointer. */
3787 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3789 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
3791 rtx pool_ref = NULL_RTX;
3792 find_constant_pool_ref (PATTERN (insn), &pool_ref);
3796 curr_pool = s390_start_pool (&pool_list, insn);
3798 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
3799 get_pool_mode (pool_ref));
3800 s390_add_pool_insn (curr_pool, insn);
3803 else if (!TARGET_64BIT && flag_pic
3804 && find_base_register_ref (PATTERN (insn)))
3806 rtx new = gen_reload_anchor (temp_reg, base_reg);
3807 new = emit_insn_before (new, insn);
3808 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
3813 curr_pool = s390_start_pool (&pool_list, new);
3815 s390_add_anchor (curr_pool);
3816 s390_add_pool_insn (curr_pool, insn);
3820 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
3822 s390_add_pool_insn (curr_pool, insn);
3825 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
3826 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
3831 if (curr_pool->size < S390_POOL_CHUNK_MAX)
3834 s390_end_pool (curr_pool, NULL_RTX);
3839 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
3840 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
3843 /* We will later have to insert base register reload insns.
3844 Those will have an effect on code size, which we need to
3845 consider here. This calculation makes rather pessimistic
3846 worst-case assumptions. */
3847 if (GET_CODE (insn) == CODE_LABEL)
3850 if (chunk_size < S390_POOL_CHUNK_MIN
3851 && curr_pool->size < S390_POOL_CHUNK_MIN)
3854 /* Pool chunks can only be inserted after BARRIERs ... */
3855 if (GET_CODE (insn) == BARRIER)
3857 s390_end_pool (curr_pool, insn);
3862 /* ... so if we don't find one in time, create one. */
3863 else if ((chunk_size > S390_POOL_CHUNK_MAX
3864 || curr_pool->size > S390_POOL_CHUNK_MAX))
3866 rtx label, jump, barrier;
3868 /* We can insert the barrier only after a 'real' insn. */
3869 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
3871 if (get_attr_length (insn) == 0)
3874 /* Don't separate insns created by s390_split_branches. */
3875 if (GET_CODE (insn) == INSN
3876 && GET_CODE (PATTERN (insn)) == SET
3877 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
3880 label = gen_label_rtx ();
3881 jump = emit_jump_insn_after (gen_jump (label), insn);
3882 barrier = emit_barrier_after (jump);
3883 insn = emit_label_after (label, barrier);
3884 JUMP_LABEL (jump) = label;
3885 LABEL_NUSES (label) = 1;
3887 INSN_ADDRESSES_NEW (jump, -1);
3888 INSN_ADDRESSES_NEW (barrier, -1);
3889 INSN_ADDRESSES_NEW (insn, -1);
3891 s390_end_pool (curr_pool, barrier);
3899 s390_end_pool (curr_pool, NULL_RTX);
3902 /* Find all labels that are branched into
3903 from an insn belonging to a different chunk. */
3905 far_labels = BITMAP_XMALLOC ();
3907 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3909 /* Labels marked with LABEL_PRESERVE_P can be target
3910 of non-local jumps, so we have to mark them.
3911 The same holds for named labels.
3913 Don't do that, however, if it is the label before
3916 if (GET_CODE (insn) == CODE_LABEL
3917 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
3919 rtx vec_insn = next_real_insn (insn);
3920 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3921 PATTERN (vec_insn) : NULL_RTX;
3923 || !(GET_CODE (vec_pat) == ADDR_VEC
3924 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3925 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
3928 /* If we have a direct jump (conditional or unconditional)
3929 or a casesi jump, check all potential targets. */
3930 else if (GET_CODE (insn) == JUMP_INSN)
3932 rtx pat = PATTERN (insn);
3933 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3934 pat = XVECEXP (pat, 0, 0);
3936 if (GET_CODE (pat) == SET)
3938 rtx label = JUMP_LABEL (insn);
3941 if (s390_find_pool (pool_list, label)
3942 != s390_find_pool (pool_list, insn))
3943 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3946 else if (GET_CODE (pat) == PARALLEL
3947 && XVECLEN (pat, 0) == 2
3948 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
3949 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
3950 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
3952 /* Find the jump table used by this casesi jump. */
3953 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
3954 rtx vec_insn = next_real_insn (vec_label);
3955 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
3956 PATTERN (vec_insn) : NULL_RTX;
3958 && (GET_CODE (vec_pat) == ADDR_VEC
3959 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
3961 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
3963 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
3965 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
3967 if (s390_find_pool (pool_list, label)
3968 != s390_find_pool (pool_list, insn))
3969 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
3976 /* Insert base register reload insns before every pool. */
3978 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
3980 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
3981 rtx insn = curr_pool->first_insn;
3982 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
3985 /* Insert base register reload insns at every far label. */
3987 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3988 if (GET_CODE (insn) == CODE_LABEL
3989 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
3991 struct constant_pool *pool = s390_find_pool (pool_list, insn);
3994 rtx new_insn = gen_reload_base (base_reg, pool->label);
3995 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4000 BITMAP_XFREE (far_labels);
4003 /* Recompute insn addresses. */
4005 init_insn_lengths ();
4006 shorten_branches (get_insns ());
4011 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4012 After we have decided to use this list, finish implementing
4013 all changes to the current function as required.
4015 Code generated by this routine is allowed to use
4016 TEMP_REG as temporary scratch register. */
4019 s390_chunkify_finish (pool_list, temp_reg)
4020 struct constant_pool *pool_list;
4023 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4024 struct constant_pool *curr_pool = NULL;
4028 /* Replace all literal pool references. */
4030 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4032 curr_pool = s390_find_pool (pool_list, insn);
4036 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4038 rtx addr, pool_ref = NULL_RTX;
4039 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4042 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4043 get_pool_mode (pool_ref));
4044 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4045 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4046 INSN_CODE (insn) = -1;
4049 else if (!TARGET_64BIT && flag_pic
4050 && find_base_register_ref (PATTERN (insn)))
4052 replace_base_register_ref (&PATTERN (insn), temp_reg);
4057 /* Dump out all literal pools. */
4059 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4060 s390_dump_pool (curr_pool);
4062 /* Free pool list. */
4066 struct constant_pool *next = pool_list->next;
4067 s390_free_pool (pool_list);
4072 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4073 We have decided we cannot use this list, so revert all changes
4074 to the current function that were done by s390_chunkify_start. */
4077 s390_chunkify_cancel (pool_list)
4078 struct constant_pool *pool_list;
4080 struct constant_pool *curr_pool = NULL;
4083 /* Remove all pool placeholder insns. */
4085 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4087 /* Did we insert an extra barrier? Remove it. */
4088 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4089 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4090 rtx label = NEXT_INSN (curr_pool->pool_insn);
4092 if (jump && GET_CODE (jump) == JUMP_INSN
4093 && barrier && GET_CODE (barrier) == BARRIER
4094 && label && GET_CODE (label) == CODE_LABEL
4095 && GET_CODE (PATTERN (jump)) == SET
4096 && SET_DEST (PATTERN (jump)) == pc_rtx
4097 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4098 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4101 remove_insn (barrier);
4102 remove_insn (label);
4105 remove_insn (curr_pool->pool_insn);
4108 /* Remove all base/anchor register reload insns. */
4110 for (insn = get_insns (); insn; )
4112 rtx next_insn = NEXT_INSN (insn);
4114 if (GET_CODE (insn) == INSN
4115 && GET_CODE (PATTERN (insn)) == SET
4116 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4117 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4118 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4124 /* Free pool list. */
4128 struct constant_pool *next = pool_list->next;
4129 s390_free_pool (pool_list);
4135 /* Index of constant pool chunk that is currently being processed.
4136 Set to -1 before function output has started. */
4137 int s390_pool_count = -1;
4139 /* Number of elements of current constant pool. */
4140 int s390_nr_constants;
4142 /* Output main constant pool to stdio stream FILE. */
4145 s390_output_constant_pool (start_label, end_label)
4151 readonly_data_section ();
4152 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4153 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4154 CODE_LABEL_NUMBER (start_label));
4158 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4159 CODE_LABEL_NUMBER (start_label));
4160 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4163 s390_pool_count = 0;
4164 output_constant_pool (current_function_name, current_function_decl);
4165 s390_pool_count = -1;
4167 function_section (current_function_decl);
4170 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4171 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4172 CODE_LABEL_NUMBER (end_label));
4176 /* Rework the prolog/epilog to avoid saving/restoring
4177 registers unnecessarily. If TEMP_REGNO is nonnegative,
4178 it specifies the number of a caller-saved register used
4179 as temporary scratch register by code emitted during
4180 machine dependent reorg. */
4183 s390_optimize_prolog (temp_regno)
4186 int save_first, save_last, restore_first, restore_last;
4188 rtx insn, new_insn, next_insn;
4190 /* Recompute regs_ever_live data for special registers. */
4191 regs_ever_live[BASE_REGISTER] = 0;
4192 regs_ever_live[RETURN_REGNUM] = 0;
4193 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4195 /* If there is (possibly) any pool entry, we need to
4196 load the base register.
4197 ??? FIXME: this should be more precise. */
4198 if (get_pool_size ())
4199 regs_ever_live[BASE_REGISTER] = 1;
4201 /* In non-leaf functions, the prolog/epilog code relies
4202 on RETURN_REGNUM being saved in any case. */
4203 if (!current_function_is_leaf)
4204 regs_ever_live[RETURN_REGNUM] = 1;
4206 /* We need to save/restore the temporary register. */
4207 if (temp_regno >= 0)
4208 regs_ever_live[temp_regno] = 1;
4211 /* Find first and last gpr to be saved. */
4213 for (i = 6; i < 16; i++)
4214 if (regs_ever_live[i])
4217 for (j = 15; j > i; j--)
4218 if (regs_ever_live[j])
4223 /* Nothing to save/restore. */
4224 save_first = restore_first = -1;
4225 save_last = restore_last = -1;
4229 /* Save/restore from i to j. */
4230 save_first = restore_first = i;
4231 save_last = restore_last = j;
4234 /* Varargs functions need to save gprs 2 to 6. */
4235 if (current_function_stdarg)
4243 /* If all special registers are in fact used, there's nothing we
4244 can do, so no point in walking the insn list. */
4245 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4246 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4250 /* Search for prolog/epilog insns and replace them. */
4252 for (insn = get_insns (); insn; insn = next_insn)
4254 int first, last, off;
4255 rtx set, base, offset;
4257 next_insn = NEXT_INSN (insn);
4259 if (GET_CODE (insn) != INSN)
4261 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4264 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4266 set = XVECEXP (PATTERN (insn), 0, 0);
4267 first = REGNO (SET_SRC (set));
4268 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4269 offset = const0_rtx;
4270 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4271 off = INTVAL (offset) - first * UNITS_PER_WORD;
4273 if (GET_CODE (base) != REG || off < 0)
4275 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4277 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4280 if (save_first != -1)
4282 new_insn = save_gprs (base, off, save_first, save_last);
4283 new_insn = emit_insn_before (new_insn, insn);
4284 INSN_ADDRESSES_NEW (new_insn, -1);
4290 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4292 set = XVECEXP (PATTERN (insn), 0, 0);
4293 first = REGNO (SET_DEST (set));
4294 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4295 offset = const0_rtx;
4296 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4297 off = INTVAL (offset) - first * UNITS_PER_WORD;
4299 if (GET_CODE (base) != REG || off < 0)
4301 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4303 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4306 if (restore_first != -1)
4308 new_insn = restore_gprs (base, off, restore_first, restore_last);
4309 new_insn = emit_insn_before (new_insn, insn);
4310 INSN_ADDRESSES_NEW (new_insn, -1);
4318 /* Check whether any insn in the function makes use of the original
4319 value of RETURN_REG (e.g. for __builtin_return_address).
4320 If so, insert an insn reloading that value.
4322 Return true if any such insn was found. */
4325 s390_fixup_clobbered_return_reg (return_reg)
4328 bool replacement_done = 0;
4331 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4333 rtx reg, off, new_insn;
4335 if (GET_CODE (insn) != INSN)
4337 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4339 if (GET_CODE (PATTERN (insn)) == PARALLEL
4340 && store_multiple_operation (PATTERN (insn), VOIDmode))
4343 if (frame_pointer_needed)
4344 reg = hard_frame_pointer_rtx;
4346 reg = stack_pointer_rtx;
4348 off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4349 if (INTVAL (off) >= 4096)
4351 off = force_const_mem (Pmode, off);
4352 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4353 new_insn = emit_insn_before (new_insn, insn);
4354 INSN_ADDRESSES_NEW (new_insn, -1);
4358 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4359 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4360 new_insn = emit_insn_before (new_insn, insn);
4361 INSN_ADDRESSES_NEW (new_insn, -1);
4363 replacement_done = 1;
4366 return replacement_done;
4369 /* Perform machine-dependent processing. */
4372 s390_machine_dependent_reorg (first)
4373 rtx first ATTRIBUTE_UNUSED;
4375 bool fixed_up_clobbered_return_reg = 0;
4376 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4379 /* Make sure all splits have been performed; splits after
4380 machine_dependent_reorg might confuse insn length counts. */
4381 split_all_insns_noflow ();
4384 /* There are two problematic situations we need to correct:
4386 - the literal pool might be > 4096 bytes in size, so that
4387 some of its elements cannot be directly accessed
4389 - a branch target might be > 64K away from the branch, so that
4390 it is not possible to use a PC-relative instruction.
4392 To fix those, we split the single literal pool into multiple
4393 pool chunks, reloading the pool base register at various
4394 points throughout the function to ensure it always points to
4395 the pool chunk the following code expects, and / or replace
4396 PC-relative branches by absolute branches.
4398 However, the two problems are interdependent: splitting the
4399 literal pool can move a branch further away from its target,
4400 causing the 64K limit to overflow, and on the other hand,
4401 replacing a PC-relative branch by an absolute branch means
4402 we need to put the branch target address into the literal
4403 pool, possibly causing it to overflow.
4405 So, we loop trying to fix up both problems until we manage
4406 to satisfy both conditions at the same time. Note that the
4407 loop is guaranteed to terminate as every pass of the loop
4408 strictly decreases the total number of PC-relative branches
4409 in the function. (This is not completely true as there
4410 might be branch-over-pool insns introduced by chunkify_start.
4411 Those never need to be split however.) */
4415 struct constant_pool *pool_list;
4417 /* Try to chunkify the literal pool. */
4418 pool_list = s390_chunkify_start (temp_reg, &temp_used);
4420 /* Split out-of-range branches. If this has created new
4421 literal pool entries, cancel current chunk list and
4423 if (s390_split_branches (temp_reg, &temp_used))
4426 s390_chunkify_cancel (pool_list);
4431 /* Check whether we have clobbered a use of the return
4432 register (e.g. for __builtin_return_address). If so,
4433 add insns reloading the register where necessary. */
4434 if (temp_used && !fixed_up_clobbered_return_reg
4435 && s390_fixup_clobbered_return_reg (temp_reg))
4437 fixed_up_clobbered_return_reg = 1;
4439 /* The fixup insns might have caused a jump to overflow. */
4441 s390_chunkify_cancel (pool_list);
4446 /* If we made it up to here, both conditions are satisfied.
4447 Finish up pool chunkification if required. */
4449 s390_chunkify_finish (pool_list, temp_reg);
4454 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
4458 /* Return an RTL expression representing the value of the return address
4459 for the frame COUNT steps up from the current frame. FRAME is the
4460 frame pointer of that frame. */
4463 s390_return_addr_rtx (count, frame)
4469 /* For the current frame, we use the initial value of RETURN_REGNUM.
4470 This works both in leaf and non-leaf functions. */
4473 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
4475 /* For frames farther back, we read the stack slot where the
4476 corresponding RETURN_REGNUM value was saved. */
4478 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
4479 addr = memory_address (Pmode, addr);
4480 return gen_rtx_MEM (Pmode, addr);
4483 /* Find first call clobbered register unsused in a function.
4484 This could be used as base register in a leaf function
4485 or for holding the return address before epilogue. */
4488 find_unused_clobbered_reg ()
4491 for (i = 0; i < 6; i++)
4492 if (!regs_ever_live[i])
4497 /* Fill FRAME with info about frame of current function. */
4502 char gprs_ever_live[16];
4504 HOST_WIDE_INT fsize = get_frame_size ();
4506 if (fsize > 0x7fff0000)
4507 fatal_error ("Total size of local variables exceeds architecture limit.");
4509 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
4510 cfun->machine->save_fprs_p = 0;
4512 for (i = 24; i < 32; i++)
4513 if (regs_ever_live[i])
4515 cfun->machine->save_fprs_p = 1;
4519 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
4521 /* Does function need to setup frame and save area. */
4523 if (! current_function_is_leaf
4524 || cfun->machine->frame_size > 0
4525 || current_function_calls_alloca
4526 || current_function_stdarg)
4527 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
4529 /* Find first and last gpr to be saved. Note that at this point,
4530 we assume the return register and the base register always
4531 need to be saved. This is done because the usage of these
4532 register might change even after the prolog was emitted.
4533 If it turns out later that we really don't need them, the
4534 prolog/epilog code is modified again. */
4536 for (i = 0; i < 16; i++)
4537 gprs_ever_live[i] = regs_ever_live[i];
4539 gprs_ever_live[BASE_REGISTER] = 1;
4540 gprs_ever_live[RETURN_REGNUM] = 1;
4541 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4543 for (i = 6; i < 16; i++)
4544 if (gprs_ever_live[i])
4547 for (j = 15; j > i; j--)
4548 if (gprs_ever_live[j])
4552 /* Save / Restore from gpr i to j. */
4553 cfun->machine->first_save_gpr = i;
4554 cfun->machine->first_restore_gpr = i;
4555 cfun->machine->last_save_gpr = j;
4557 /* Varargs functions need to save gprs 2 to 6. */
4558 if (current_function_stdarg)
4559 cfun->machine->first_save_gpr = 2;
4562 /* Return offset between argument pointer and frame pointer
4563 initially after prologue. */
4566 s390_arg_frame_offset ()
4568 HOST_WIDE_INT fsize = get_frame_size ();
4571 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
4574 for (i = 24; i < 32; i++)
4575 if (regs_ever_live[i])
4581 fsize = fsize + save_fprs_p * 64;
4583 /* Does function need to setup frame and save area. */
4585 if (! current_function_is_leaf
4587 || current_function_calls_alloca
4588 || current_function_stdarg)
4589 fsize += STARTING_FRAME_OFFSET;
4590 return fsize + STACK_POINTER_OFFSET;
4593 /* Emit insn to save fpr REGNUM at offset OFFSET relative
4594 to register BASE. Return generated insn. */
4597 save_fpr (base, offset, regnum)
4603 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4604 set_mem_alias_set (addr, s390_sr_alias_set);
4606 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
4609 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
4610 to register BASE. Return generated insn. */
4613 restore_fpr (base, offset, regnum)
4619 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
4620 set_mem_alias_set (addr, s390_sr_alias_set);
4622 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
4625 /* Generate insn to save registers FIRST to LAST into
4626 the register save area located at offset OFFSET
4627 relative to register BASE. */
4630 save_gprs (base, offset, first, last)
4636 rtx addr, insn, note;
4639 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4640 addr = gen_rtx_MEM (Pmode, addr);
4641 set_mem_alias_set (addr, s390_sr_alias_set);
4643 /* Special-case single register. */
4647 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
4649 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
4651 RTX_FRAME_RELATED_P (insn) = 1;
4656 insn = gen_store_multiple (addr,
4657 gen_rtx_REG (Pmode, first),
4658 GEN_INT (last - first + 1));
4661 /* We need to set the FRAME_RELATED flag on all SETs
4662 inside the store-multiple pattern.
4664 However, we must not emit DWARF records for registers 2..5
4665 if they are stored for use by variable arguments ...
4667 ??? Unfortunately, it is not enough to simply not the the
4668 FRAME_RELATED flags for those SETs, because the first SET
4669 of the PARALLEL is always treated as if it had the flag
4670 set, even if it does not. Therefore we emit a new pattern
4671 without those registers as REG_FRAME_RELATED_EXPR note. */
4675 rtx pat = PATTERN (insn);
4677 for (i = 0; i < XVECLEN (pat, 0); i++)
4678 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
4679 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
4681 RTX_FRAME_RELATED_P (insn) = 1;
4685 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
4686 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
4687 gen_rtx_REG (Pmode, 6),
4688 GEN_INT (last - 6 + 1));
4689 note = PATTERN (note);
4692 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4693 note, REG_NOTES (insn));
4695 for (i = 0; i < XVECLEN (note, 0); i++)
4696 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
4697 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
4699 RTX_FRAME_RELATED_P (insn) = 1;
4705 /* Generate insn to restore registers FIRST to LAST from
4706 the register save area located at offset OFFSET
4707 relative to register BASE. */
4710 restore_gprs (base, offset, first, last)
4718 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
4719 addr = gen_rtx_MEM (Pmode, addr);
4720 set_mem_alias_set (addr, s390_sr_alias_set);
4722 /* Special-case single register. */
4726 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
4728 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
4733 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
4735 GEN_INT (last - first + 1));
4739 /* Expand the prologue into a bunch of separate insns. */
4742 s390_emit_prologue ()
4746 rtx pool_start_label, pool_end_label;
4749 /* Compute frame_info. */
4753 /* Choose best register to use for temp use within prologue. */
4755 if (!current_function_is_leaf
4756 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
4757 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
4758 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4760 temp_reg = gen_rtx_REG (Pmode, 1);
4762 /* Save call saved gprs. */
4764 insn = save_gprs (stack_pointer_rtx, 0,
4765 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
4768 /* Dump constant pool and set constant pool register. */
4770 pool_start_label = gen_label_rtx();
4771 pool_end_label = gen_label_rtx();
4772 cfun->machine->literal_pool_label = pool_start_label;
4775 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
4776 pool_start_label, pool_end_label));
4778 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
4779 pool_start_label, pool_end_label));
4781 /* Save fprs for variable args. */
4783 if (current_function_stdarg)
4785 /* Save fpr 0 and 2. */
4787 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
4788 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
4792 /* Save fpr 4 and 6. */
4794 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4795 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4799 /* Save fprs 4 and 6 if used (31 bit ABI). */
4803 /* Save fpr 4 and 6. */
4804 if (regs_ever_live[18])
4806 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
4807 RTX_FRAME_RELATED_P (insn) = 1;
4809 if (regs_ever_live[19])
4811 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
4812 RTX_FRAME_RELATED_P (insn) = 1;
4816 /* Decrement stack pointer. */
4818 if (cfun->machine->frame_size > 0)
4820 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
4822 /* Save incoming stack pointer into temp reg. */
4824 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
4826 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
4829 /* Substract frame size from stack pointer. */
4831 frame_off = GEN_INT (-cfun->machine->frame_size);
4832 if (!CONST_OK_FOR_LETTER_P (-cfun->machine->frame_size, 'K'))
4833 frame_off = force_const_mem (Pmode, frame_off);
4835 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
4836 RTX_FRAME_RELATED_P (insn) = 1;
4838 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4839 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
4840 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4841 GEN_INT (-cfun->machine->frame_size))),
4844 /* Set backchain. */
4846 if (TARGET_BACKCHAIN)
4848 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
4849 set_mem_alias_set (addr, s390_sr_alias_set);
4850 insn = emit_insn (gen_move_insn (addr, temp_reg));
4854 /* Save fprs 8 - 15 (64 bit ABI). */
4856 if (cfun->machine->save_fprs_p)
4858 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
4860 for (i = 24; i < 32; i++)
4861 if (regs_ever_live[i])
4863 rtx addr = plus_constant (stack_pointer_rtx,
4864 cfun->machine->frame_size - 64 + (i-24)*8);
4866 insn = save_fpr (temp_reg, (i-24)*8, i);
4867 RTX_FRAME_RELATED_P (insn) = 1;
4869 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
4870 gen_rtx_SET (VOIDmode,
4871 gen_rtx_MEM (DFmode, addr),
4872 gen_rtx_REG (DFmode, i)),
4877 /* Set frame pointer, if needed. */
4879 if (frame_pointer_needed)
4881 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
4882 RTX_FRAME_RELATED_P (insn) = 1;
4885 /* Set up got pointer, if needed. */
4887 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
4889 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
4890 SYMBOL_REF_FLAG (got_symbol) = 1;
4894 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
4897 /* It can happen that the GOT pointer isn't really needed ... */
4898 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4903 got_symbol = gen_rtx_UNSPEC (VOIDmode,
4904 gen_rtvec (1, got_symbol), 100);
4905 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
4906 got_symbol = force_const_mem (Pmode, got_symbol);
4907 insn = emit_move_insn (pic_offset_table_rtx,
4909 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4912 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
4913 gen_rtx_REG (Pmode, BASE_REGISTER)));
4914 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
4920 /* Expand the epilogue into a bunch of separate insns. */
4923 s390_emit_epilogue ()
4925 rtx frame_pointer, return_reg;
4926 int area_bottom, area_top, offset = 0;
4929 /* Check whether to use frame or stack pointer for restore. */
4931 frame_pointer = frame_pointer_needed ?
4932 hard_frame_pointer_rtx : stack_pointer_rtx;
4934 /* Compute which parts of the save area we need to access. */
4936 if (cfun->machine->first_restore_gpr != -1)
4938 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
4939 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
4943 area_bottom = INT_MAX;
4949 if (cfun->machine->save_fprs_p)
4951 if (area_bottom > -64)
4959 if (regs_ever_live[18])
4961 if (area_bottom > STACK_POINTER_OFFSET - 16)
4962 area_bottom = STACK_POINTER_OFFSET - 16;
4963 if (area_top < STACK_POINTER_OFFSET - 8)
4964 area_top = STACK_POINTER_OFFSET - 8;
4966 if (regs_ever_live[19])
4968 if (area_bottom > STACK_POINTER_OFFSET - 8)
4969 area_bottom = STACK_POINTER_OFFSET - 8;
4970 if (area_top < STACK_POINTER_OFFSET)
4971 area_top = STACK_POINTER_OFFSET;
4975 /* Check whether we can access the register save area.
4976 If not, increment the frame pointer as required. */
4978 if (area_top <= area_bottom)
4980 /* Nothing to restore. */
4982 else if (cfun->machine->frame_size + area_bottom >= 0
4983 && cfun->machine->frame_size + area_top <= 4096)
4985 /* Area is in range. */
4986 offset = cfun->machine->frame_size;
4990 rtx insn, frame_off;
4992 offset = area_bottom < 0 ? -area_bottom : 0;
4993 frame_off = GEN_INT (cfun->machine->frame_size - offset);
4995 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
4996 frame_off = force_const_mem (Pmode, frame_off);
4998 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5001 /* Restore call saved fprs. */
5007 if (cfun->machine->save_fprs_p)
5008 for (i = 24; i < 32; i++)
5009 if (regs_ever_live[i] && !global_regs[i])
5010 restore_fpr (frame_pointer,
5011 offset - 64 + (i-24) * 8, i);
5015 if (regs_ever_live[18] && !global_regs[18])
5016 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5017 if (regs_ever_live[19] && !global_regs[19])
5018 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5021 /* Return register. */
5023 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5025 /* Restore call saved gprs. */
5027 if (cfun->machine->first_restore_gpr != -1)
5032 /* Check for global register and save them
5033 to stack location from where they get restored. */
5035 for (i = cfun->machine->first_restore_gpr;
5036 i <= cfun->machine->last_save_gpr;
5039 /* These registers are special and need to be
5040 restored in any case. */
5041 if (i == STACK_POINTER_REGNUM
5042 || i == RETURN_REGNUM
5043 || i == BASE_REGISTER
5044 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5049 addr = plus_constant (frame_pointer,
5050 offset + i * UNITS_PER_WORD);
5051 addr = gen_rtx_MEM (Pmode, addr);
5052 set_mem_alias_set (addr, s390_sr_alias_set);
5053 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5057 /* Fetch return address from stack before load multiple,
5058 this will do good for scheduling. */
5060 if (!current_function_is_leaf)
5062 int return_regnum = find_unused_clobbered_reg();
5065 return_reg = gen_rtx_REG (Pmode, return_regnum);
5067 addr = plus_constant (frame_pointer,
5068 offset + RETURN_REGNUM * UNITS_PER_WORD);
5069 addr = gen_rtx_MEM (Pmode, addr);
5070 set_mem_alias_set (addr, s390_sr_alias_set);
5071 emit_move_insn (return_reg, addr);
5074 /* ??? As references to the base register are not made
5075 explicit in insn RTX code, we have to add a barrier here
5076 to prevent incorrect scheduling. */
5078 emit_insn (gen_blockage());
5080 insn = restore_gprs (frame_pointer, offset,
5081 cfun->machine->first_restore_gpr,
5082 cfun->machine->last_save_gpr);
5086 /* Return to caller. */
5088 p = rtvec_alloc (2);
5090 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5091 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5092 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5096 /* Return the size in bytes of a function argument of
5097 type TYPE and/or mode MODE. At least one of TYPE or
5098 MODE must be specified. */
5101 s390_function_arg_size (mode, type)
5102 enum machine_mode mode;
5106 return int_size_in_bytes (type);
5108 /* No type info available for some library calls ... */
5109 if (mode != BLKmode)
5110 return GET_MODE_SIZE (mode);
5112 /* If we have neither type nor mode, abort */
5116 /* Return 1 if a function argument of type TYPE and mode MODE
5117 is to be passed by reference. The ABI specifies that only
5118 structures of size 1, 2, 4, or 8 bytes are passed by value,
5119 all other structures (and complex numbers) are passed by
5123 s390_function_arg_pass_by_reference (mode, type)
5124 enum machine_mode mode;
5127 int size = s390_function_arg_size (mode, type);
5131 if (AGGREGATE_TYPE_P (type) &&
5132 size != 1 && size != 2 && size != 4 && size != 8)
5135 if (TREE_CODE (type) == COMPLEX_TYPE)
5142 /* Update the data in CUM to advance over an argument of mode MODE and
5143 data type TYPE. (TYPE is null for libcalls where that information
5144 may not be available.). The boolean NAMED specifies whether the
5145 argument is a named argument (as opposed to an unnamed argument
5146 matching an ellipsis). */
5149 s390_function_arg_advance (cum, mode, type, named)
5150 CUMULATIVE_ARGS *cum;
5151 enum machine_mode mode;
5153 int named ATTRIBUTE_UNUSED;
5155 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5159 else if (s390_function_arg_pass_by_reference (mode, type))
5165 int size = s390_function_arg_size (mode, type);
5166 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5170 /* Define where to put the arguments to a function.
5171 Value is zero to push the argument on the stack,
5172 or a hard register in which to store the argument.
5174 MODE is the argument's machine mode.
5175 TYPE is the data type of the argument (as a tree).
5176 This is null for libcalls where that information may
5178 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5179 the preceding args and about the function being called.
5180 NAMED is nonzero if this argument is a named parameter
5181 (otherwise it is an extra parameter matching an ellipsis).
5183 On S/390, we use general purpose registers 2 through 6 to
5184 pass integer, pointer, and certain structure arguments, and
5185 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5186 to pass floating point arguments. All remaining arguments
5187 are pushed to the stack. */
5190 s390_function_arg (cum, mode, type, named)
5191 CUMULATIVE_ARGS *cum;
5192 enum machine_mode mode;
5194 int named ATTRIBUTE_UNUSED;
5196 if (s390_function_arg_pass_by_reference (mode, type))
5199 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5201 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5204 return gen_rtx (REG, mode, cum->fprs + 16);
5208 int size = s390_function_arg_size (mode, type);
5209 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5211 if (cum->gprs + n_gprs > 5)
5214 return gen_rtx (REG, mode, cum->gprs + 2);
5219 /* Create and return the va_list datatype.
5221 On S/390, va_list is an array type equivalent to
5223 typedef struct __va_list_tag
5227 void *__overflow_arg_area;
5228 void *__reg_save_area;
5232 where __gpr and __fpr hold the number of general purpose
5233 or floating point arguments used up to now, respectively,
5234 __overflow_arg_area points to the stack location of the
5235 next argument passed on the stack, and __reg_save_area
5236 always points to the start of the register area in the
5237 call frame of the current function. The function prologue
5238 saves all registers used for argument passing into this
5239 area if the function uses variable arguments. */
5242 s390_build_va_list ()
5244 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5246 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5249 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5251 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5252 long_integer_type_node);
5253 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5254 long_integer_type_node);
5255 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5257 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5260 DECL_FIELD_CONTEXT (f_gpr) = record;
5261 DECL_FIELD_CONTEXT (f_fpr) = record;
5262 DECL_FIELD_CONTEXT (f_ovf) = record;
5263 DECL_FIELD_CONTEXT (f_sav) = record;
5265 TREE_CHAIN (record) = type_decl;
5266 TYPE_NAME (record) = type_decl;
5267 TYPE_FIELDS (record) = f_gpr;
5268 TREE_CHAIN (f_gpr) = f_fpr;
5269 TREE_CHAIN (f_fpr) = f_ovf;
5270 TREE_CHAIN (f_ovf) = f_sav;
5272 layout_type (record);
5274 /* The correct type is an array type of one element. */
5275 return build_array_type (record, build_index_type (size_zero_node));
5278 /* Implement va_start by filling the va_list structure VALIST.
5279 STDARG_P is always true, and ignored.
5280 NEXTARG points to the first anonymous stack argument.
5282 The following global variables are used to initialize
5283 the va_list structure:
5285 current_function_args_info:
5286 holds number of gprs and fprs used for named arguments.
5287 current_function_arg_offset_rtx:
5288 holds the offset of the first anonymous stack argument
5289 (relative to the virtual arg pointer). */
5292 s390_va_start (valist, nextarg)
5294 rtx nextarg ATTRIBUTE_UNUSED;
5296 HOST_WIDE_INT n_gpr, n_fpr;
5298 tree f_gpr, f_fpr, f_ovf, f_sav;
5299 tree gpr, fpr, ovf, sav, t;
5301 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5302 f_fpr = TREE_CHAIN (f_gpr);
5303 f_ovf = TREE_CHAIN (f_fpr);
5304 f_sav = TREE_CHAIN (f_ovf);
5306 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5307 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5308 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5309 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5310 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5312 /* Count number of gp and fp argument registers used. */
5314 n_gpr = current_function_args_info.gprs;
5315 n_fpr = current_function_args_info.fprs;
5317 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5318 TREE_SIDE_EFFECTS (t) = 1;
5319 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5321 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5322 TREE_SIDE_EFFECTS (t) = 1;
5323 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5325 /* Find the overflow area. */
5326 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5328 off = INTVAL (current_function_arg_offset_rtx);
5329 off = off < 0 ? 0 : off;
5330 if (TARGET_DEBUG_ARG)
5331 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5332 (int)n_gpr, (int)n_fpr, off);
5334 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5336 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5337 TREE_SIDE_EFFECTS (t) = 1;
5338 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5340 /* Find the register save area. */
5341 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5342 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5343 build_int_2 (-STACK_POINTER_OFFSET, -1));
5344 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5345 TREE_SIDE_EFFECTS (t) = 1;
5346 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5349 /* Implement va_arg by updating the va_list structure
5350 VALIST as required to retrieve an argument of type
5351 TYPE, and returning that argument.
5353 Generates code equivalent to:
5355 if (integral value) {
5356 if (size <= 4 && args.gpr < 5 ||
5357 size > 4 && args.gpr < 4 )
5358 ret = args.reg_save_area[args.gpr+8]
5360 ret = *args.overflow_arg_area++;
5361 } else if (float value) {
5363 ret = args.reg_save_area[args.fpr+64]
5365 ret = *args.overflow_arg_area++;
5366 } else if (aggregate value) {
5368 ret = *args.reg_save_area[args.gpr]
5370 ret = **args.overflow_arg_area++;
5374 s390_va_arg (valist, type)
5378 tree f_gpr, f_fpr, f_ovf, f_sav;
5379 tree gpr, fpr, ovf, sav, reg, t, u;
5380 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5381 rtx lab_false, lab_over, addr_rtx, r;
5383 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5384 f_fpr = TREE_CHAIN (f_gpr);
5385 f_ovf = TREE_CHAIN (f_fpr);
5386 f_sav = TREE_CHAIN (f_ovf);
5388 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5389 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5390 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5391 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5392 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5394 size = int_size_in_bytes (type);
5396 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5398 if (TARGET_DEBUG_ARG)
5400 fprintf (stderr, "va_arg: aggregate type");
5404 /* Aggregates are passed by reference. */
5408 sav_ofs = 2 * UNITS_PER_WORD;
5409 sav_scale = UNITS_PER_WORD;
5410 size = UNITS_PER_WORD;
5413 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5415 if (TARGET_DEBUG_ARG)
5417 fprintf (stderr, "va_arg: float type");
5421 /* FP args go in FP registers, if present. */
5425 sav_ofs = 16 * UNITS_PER_WORD;
5427 /* TARGET_64BIT has up to 4 parameter in fprs */
5428 max_reg = TARGET_64BIT ? 3 : 1;
5432 if (TARGET_DEBUG_ARG)
5434 fprintf (stderr, "va_arg: other type");
5438 /* Otherwise into GP registers. */
5441 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
5442 sav_ofs = 2 * UNITS_PER_WORD;
5444 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
5445 TYPE_MODE (type) == HImode ? 6 :
5446 TYPE_MODE (type) == QImode ? 7 : 0;
5448 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
5449 TYPE_MODE (type) == QImode ? 3 : 0;
5451 sav_scale = UNITS_PER_WORD;
5458 /* Pull the value out of the saved registers ... */
5460 lab_false = gen_label_rtx ();
5461 lab_over = gen_label_rtx ();
5462 addr_rtx = gen_reg_rtx (Pmode);
5464 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
5466 GT, const1_rtx, Pmode, 0, lab_false);
5469 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
5473 u = build (MULT_EXPR, long_integer_type_node,
5474 reg, build_int_2 (sav_scale, 0));
5475 TREE_SIDE_EFFECTS (u) = 1;
5477 t = build (PLUS_EXPR, ptr_type_node, t, u);
5478 TREE_SIDE_EFFECTS (t) = 1;
5480 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5482 emit_move_insn (addr_rtx, r);
5485 emit_jump_insn (gen_jump (lab_over));
5487 emit_label (lab_false);
5489 /* ... Otherwise out of the overflow area. */
5491 t = save_expr (ovf);
5494 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
5495 if (size < UNITS_PER_WORD)
5497 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
5498 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5499 TREE_SIDE_EFFECTS (t) = 1;
5500 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5502 t = save_expr (ovf);
5505 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5507 emit_move_insn (addr_rtx, r);
5509 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5510 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5511 TREE_SIDE_EFFECTS (t) = 1;
5512 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5514 emit_label (lab_over);
5516 /* If less than max_regs a registers are retrieved out
5517 of register save area, increment. */
5519 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
5520 build_int_2 (n_reg, 0));
5521 TREE_SIDE_EFFECTS (u) = 1;
5522 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
5526 r = gen_rtx_MEM (Pmode, addr_rtx);
5527 set_mem_alias_set (r, get_varargs_alias_set ());
5528 emit_move_insn (addr_rtx, r);
5536 /* Output assembly code for the trampoline template to
5539 On S/390, we use gpr 1 internally in the trampoline code;
5540 gpr 0 is used to hold the static chain. */
5543 s390_trampoline_template (file)
5548 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
5549 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
5550 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
5551 fprintf (file, "br\t%s\n", reg_names[1]);
5552 fprintf (file, "0:\t.quad\t0\n");
5553 fprintf (file, ".quad\t0\n");
5557 fprintf (file, "basr\t%s,0\n", reg_names[1]);
5558 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
5559 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
5560 fprintf (file, "br\t%s\n", reg_names[1]);
5561 fprintf (file, ".long\t0\n");
5562 fprintf (file, ".long\t0\n");
5566 /* Emit RTL insns to initialize the variable parts of a trampoline.
5567 FNADDR is an RTX for the address of the function's pure code.
5568 CXT is an RTX for the static chain value for the function. */
5571 s390_initialize_trampoline (addr, fnaddr, cxt)
5576 emit_move_insn (gen_rtx
5578 memory_address (Pmode,
5579 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
5580 emit_move_insn (gen_rtx
5582 memory_address (Pmode,
5583 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
5586 /* Return rtx for 64-bit constant formed from the 32-bit subwords
5587 LOW and HIGH, independent of the host word size. */
5590 s390_gen_rtx_const_DI (high, low)
5594 #if HOST_BITS_PER_WIDE_INT >= 64
5596 val = (HOST_WIDE_INT)high;
5598 val |= (HOST_WIDE_INT)low;
5600 return GEN_INT (val);
5602 #if HOST_BITS_PER_WIDE_INT >= 32
5603 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
5610 /* Output assembler code to FILE to increment profiler label # LABELNO
5611 for profiling a function entry. */
5614 s390_function_profiler (file, labelno)
5621 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
5623 fprintf (file, "# function profiler \n");
5625 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
5626 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
5627 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
5629 op[2] = gen_rtx_REG (Pmode, 1);
5630 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
5631 SYMBOL_REF_FLAG (op[3]) = 1;
5633 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
5636 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
5637 op[4] = gen_rtx_CONST (Pmode, op[4]);
5642 output_asm_insn ("stg\t%0,%1", op);
5643 output_asm_insn ("larl\t%2,%3", op);
5644 output_asm_insn ("brasl\t%0,%4", op);
5645 output_asm_insn ("lg\t%0,%1", op);
5649 op[6] = gen_label_rtx ();
5651 output_asm_insn ("st\t%0,%1", op);
5652 output_asm_insn ("bras\t%2,%l6", op);
5653 output_asm_insn (".long\t%4", op);
5654 output_asm_insn (".long\t%3", op);
5655 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
5656 output_asm_insn ("l\t%0,0(%2)", op);
5657 output_asm_insn ("l\t%2,4(%2)", op);
5658 output_asm_insn ("basr\t%0,%0", op);
5659 output_asm_insn ("l\t%0,%1", op);
5663 op[5] = gen_label_rtx ();
5664 op[6] = gen_label_rtx ();
5666 output_asm_insn ("st\t%0,%1", op);
5667 output_asm_insn ("bras\t%2,%l6", op);
5668 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[5]));
5669 output_asm_insn (".long\t%4-%l5", op);
5670 output_asm_insn (".long\t%3-%l5", op);
5671 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
5672 output_asm_insn ("lr\t%0,%2", op);
5673 output_asm_insn ("a\t%0,0(%2)", op);
5674 output_asm_insn ("a\t%2,4(%2)", op);
5675 output_asm_insn ("basr\t%0,%0", op);
5676 output_asm_insn ("l\t%0,%1", op);
5680 /* Select section for constant in constant pool. In 32-bit mode,
5681 constants go in the function section; in 64-bit mode in .rodata. */
5684 s390_select_rtx_section (mode, x, align)
5685 enum machine_mode mode ATTRIBUTE_UNUSED;
5686 rtx x ATTRIBUTE_UNUSED;
5687 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
5690 readonly_data_section ();
5692 function_section (current_function_decl);
5695 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
5696 may access it directly in the GOT. */
5699 s390_encode_section_info (decl, first)
5701 int first ATTRIBUTE_UNUSED;
5705 rtx rtl = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5706 ? TREE_CST_RTL (decl) : DECL_RTL (decl));
5708 if (GET_CODE (rtl) == MEM)
5710 SYMBOL_REF_FLAG (XEXP (rtl, 0))
5711 = (TREE_CODE_CLASS (TREE_CODE (decl)) != 'd'
5712 || ! TREE_PUBLIC (decl));
5717 /* Output thunk to FILE that implements a C++ virtual function call (with
5718 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
5719 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
5720 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
5721 relative to the resulting this pointer. */
5724 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
5726 tree thunk ATTRIBUTE_UNUSED;
5727 HOST_WIDE_INT delta;
5728 HOST_WIDE_INT vcall_offset;
5733 /* Operand 0 is the target function. */
5734 op[0] = XEXP (DECL_RTL (function), 0);
5735 if (flag_pic && !SYMBOL_REF_FLAG (op[0]))
5737 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]), 113);
5738 op[0] = gen_rtx_CONST (Pmode, op[0]);
5741 /* Operand 1 is the 'this' pointer. */
5742 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
5743 op[1] = gen_rtx_REG (Pmode, 3);
5745 op[1] = gen_rtx_REG (Pmode, 2);
5747 /* Operand 2 is the delta. */
5748 op[2] = GEN_INT (delta);
5750 /* Operand 3 is the vcall_offset. */
5751 op[3] = GEN_INT (vcall_offset);
5753 /* Operand 4 is the temporary register. */
5754 op[4] = gen_rtx_REG (Pmode, 1);
5756 /* Operands 5 to 8 can be used as labels. */
5762 /* Generate code. */
5765 /* Setup literal pool pointer if required. */
5766 if (!CONST_OK_FOR_LETTER_P (delta, 'K')
5767 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5769 op[5] = gen_label_rtx ();
5770 output_asm_insn ("larl\t%4,%5", op);
5773 /* Add DELTA to this pointer. */
5776 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
5777 output_asm_insn ("la\t%1,%2(%1)", op);
5778 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
5779 output_asm_insn ("aghi\t%1,%2", op);
5782 op[6] = gen_label_rtx ();
5783 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
5787 /* Perform vcall adjustment. */
5790 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
5792 output_asm_insn ("lg\t%4,0(%1)", op);
5793 output_asm_insn ("ag\t%1,%3(%4)", op);
5795 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5797 output_asm_insn ("lghi\t%4,%3", op);
5798 output_asm_insn ("ag\t%4,0(%1)", op);
5799 output_asm_insn ("ag\t%1,0(%4)", op);
5803 op[7] = gen_label_rtx ();
5804 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
5805 output_asm_insn ("ag\t%4,0(%1)", op);
5806 output_asm_insn ("ag\t%1,0(%4)", op);
5810 /* Jump to target. */
5811 output_asm_insn ("jg\t%0", op);
5813 /* Output literal pool if required. */
5816 output_asm_insn (".align\t4", op);
5817 (*targetm.asm_out.internal_label) (file, "L",
5818 CODE_LABEL_NUMBER (op[5]));
5822 (*targetm.asm_out.internal_label) (file, "L",
5823 CODE_LABEL_NUMBER (op[6]));
5824 output_asm_insn (".long\t%2", op);
5828 (*targetm.asm_out.internal_label) (file, "L",
5829 CODE_LABEL_NUMBER (op[7]));
5830 output_asm_insn (".long\t%3", op);
5835 /* Setup base pointer if required. */
5837 || !CONST_OK_FOR_LETTER_P (delta, 'K')
5838 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5840 op[5] = gen_label_rtx ();
5841 output_asm_insn ("basr\t%4,0", op);
5842 (*targetm.asm_out.internal_label) (file, "L",
5843 CODE_LABEL_NUMBER (op[5]));
5846 /* Add DELTA to this pointer. */
5849 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
5850 output_asm_insn ("la\t%1,%2(%1)", op);
5851 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
5852 output_asm_insn ("ahi\t%1,%2", op);
5855 op[6] = gen_label_rtx ();
5856 output_asm_insn ("a\t%1,%6-%5(%4)", op);
5860 /* Perform vcall adjustment. */
5863 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
5865 output_asm_insn ("lg\t%4,0(%1)", op);
5866 output_asm_insn ("a\t%1,%3(%4)", op);
5868 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
5870 output_asm_insn ("lhi\t%4,%3", op);
5871 output_asm_insn ("a\t%4,0(%1)", op);
5872 output_asm_insn ("a\t%1,0(%4)", op);
5876 op[7] = gen_label_rtx ();
5877 output_asm_insn ("l\t%4,%7-%5(%4)", op);
5878 output_asm_insn ("a\t%4,0(%1)", op);
5879 output_asm_insn ("a\t%1,0(%4)", op);
5882 /* We had to clobber the base pointer register.
5883 Re-setup the base pointer (with a different base). */
5884 op[5] = gen_label_rtx ();
5885 output_asm_insn ("basr\t%4,0", op);
5886 (*targetm.asm_out.internal_label) (file, "L",
5887 CODE_LABEL_NUMBER (op[5]));
5890 /* Jump to target. */
5891 op[8] = gen_label_rtx ();
5893 output_asm_insn ("l\t%4,%8-%5(%4)", op);
5895 output_asm_insn ("a\t%4,%8-%5(%4)", op);
5896 output_asm_insn ("br\t%4", op);
5898 /* Output literal pool. */
5899 output_asm_insn (".align\t4", op);
5900 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[8]));
5902 output_asm_insn (".long\t%0", op);
5904 output_asm_insn (".long\t%0-%5", op);
5908 (*targetm.asm_out.internal_label) (file, "L",
5909 CODE_LABEL_NUMBER (op[6]));
5910 output_asm_insn (".long\t%2", op);
5914 (*targetm.asm_out.internal_label) (file, "L",
5915 CODE_LABEL_NUMBER (op[7]));
5916 output_asm_insn (".long\t%3", op);
5921 /* How to allocate a 'struct machine_function'. */
5923 static struct machine_function *
5924 s390_init_machine_status ()
5926 return ggc_alloc_cleared (sizeof (struct machine_function));
5929 #include "gt-s390.h"