1 /* Subroutines used for code generation on IBM S/390 and zSeries
2 Copyright (C) 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Contributed by Hartmut Penner (hpenner@de.ibm.com) and
4 Ulrich Weigand (uweigand@de.ibm.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
25 #include "coretypes.h"
31 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
48 #include "target-def.h"
50 #include "langhooks.h"
53 static bool s390_assemble_integer PARAMS ((rtx, unsigned int, int));
54 static void s390_select_rtx_section PARAMS ((enum machine_mode, rtx,
55 unsigned HOST_WIDE_INT));
56 static void s390_encode_section_info PARAMS ((tree, int));
57 static const char *s390_strip_name_encoding PARAMS ((const char *));
58 static bool s390_cannot_force_const_mem PARAMS ((rtx));
59 static rtx s390_delegitimize_address PARAMS ((rtx));
60 static void s390_init_builtins PARAMS ((void));
61 static rtx s390_expand_builtin PARAMS ((tree, rtx, rtx,
62 enum machine_mode, int));
63 static void s390_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
64 HOST_WIDE_INT, tree));
65 static enum attr_type s390_safe_attr_type PARAMS ((rtx));
67 static int s390_adjust_cost PARAMS ((rtx, rtx, rtx, int));
68 static int s390_issue_rate PARAMS ((void));
69 static int s390_use_dfa_pipeline_interface PARAMS ((void));
70 static bool s390_rtx_costs PARAMS ((rtx, int, int, int *));
71 static int s390_address_cost PARAMS ((rtx));
74 #undef TARGET_ASM_ALIGNED_HI_OP
75 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
76 #undef TARGET_ASM_ALIGNED_DI_OP
77 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
78 #undef TARGET_ASM_INTEGER
79 #define TARGET_ASM_INTEGER s390_assemble_integer
81 #undef TARGET_ASM_OPEN_PAREN
82 #define TARGET_ASM_OPEN_PAREN ""
84 #undef TARGET_ASM_CLOSE_PAREN
85 #define TARGET_ASM_CLOSE_PAREN ""
87 #undef TARGET_ASM_SELECT_RTX_SECTION
88 #define TARGET_ASM_SELECT_RTX_SECTION s390_select_rtx_section
90 #undef TARGET_ENCODE_SECTION_INFO
91 #define TARGET_ENCODE_SECTION_INFO s390_encode_section_info
92 #undef TARGET_STRIP_NAME_ENCODING
93 #define TARGET_STRIP_NAME_ENCODING s390_strip_name_encoding
96 #undef TARGET_HAVE_TLS
97 #define TARGET_HAVE_TLS true
99 #undef TARGET_CANNOT_FORCE_CONST_MEM
100 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem
102 #undef TARGET_DELEGITIMIZE_ADDRESS
103 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address
105 #undef TARGET_INIT_BUILTINS
106 #define TARGET_INIT_BUILTINS s390_init_builtins
107 #undef TARGET_EXPAND_BUILTIN
108 #define TARGET_EXPAND_BUILTIN s390_expand_builtin
110 #undef TARGET_ASM_OUTPUT_MI_THUNK
111 #define TARGET_ASM_OUTPUT_MI_THUNK s390_output_mi_thunk
112 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
113 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
115 #undef TARGET_SCHED_ADJUST_COST
116 #define TARGET_SCHED_ADJUST_COST s390_adjust_cost
117 #undef TARGET_SCHED_ISSUE_RATE
118 #define TARGET_SCHED_ISSUE_RATE s390_issue_rate
119 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
120 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE s390_use_dfa_pipeline_interface
122 #undef TARGET_RTX_COSTS
123 #define TARGET_RTX_COSTS s390_rtx_costs
124 #undef TARGET_ADDRESS_COST
125 #define TARGET_ADDRESS_COST s390_address_cost
127 struct gcc_target targetm = TARGET_INITIALIZER;
129 extern int reload_completed;
131 /* The alias set for prologue/epilogue register save/restore. */
132 static int s390_sr_alias_set = 0;
134 /* Save information from a "cmpxx" operation until the branch or scc is
136 rtx s390_compare_op0, s390_compare_op1;
138 /* The encoding characters for the four TLS models present in ELF. */
139 static char const tls_model_chars[] = " GLil";
141 /* Structure used to hold the components of a S/390 memory
142 address. A legitimate address on S/390 is of the general
144 base + index + displacement
145 where any of the components is optional.
147 base and index are registers of the class ADDR_REGS,
148 displacement is an unsigned 12-bit immediate constant. */
158 /* Which cpu are we tuning for. */
159 enum processor_type s390_cpu;
160 /* Which instruction set architecture to use. */
161 enum processor_type s390_arch;
163 /* Strings to hold which cpu and instruction set architecture to use. */
164 const char *s390_tune_string; /* for -mtune=<xxx> */
165 const char *s390_arch_string; /* for -march=<xxx> */
167 /* Define the structure for the machine field in struct function. */
169 struct machine_function GTY(())
171 /* Label of start of initial literal pool. */
172 rtx literal_pool_label;
174 /* Set, if some of the fprs 8-15 need to be saved (64 bit abi). */
177 /* Number of first and last gpr to be saved, restored. */
179 int first_restore_gpr;
182 /* Size of stack frame. */
183 HOST_WIDE_INT frame_size;
185 /* Some local-dynamic TLS symbol name. */
186 const char *some_ld_name;
189 static int s390_match_ccmode_set PARAMS ((rtx, enum machine_mode));
190 static int s390_branch_condition_mask PARAMS ((rtx));
191 static const char *s390_branch_condition_mnemonic PARAMS ((rtx, int));
192 static int check_mode PARAMS ((rtx, enum machine_mode *));
193 static int general_s_operand PARAMS ((rtx, enum machine_mode, int));
194 static int s390_decompose_address PARAMS ((rtx, struct s390_address *));
195 static rtx get_thread_pointer PARAMS ((void));
196 static rtx legitimize_tls_address PARAMS ((rtx, rtx));
197 static const char *get_some_local_dynamic_name PARAMS ((void));
198 static int get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
199 static int reg_used_in_mem_p PARAMS ((int, rtx));
200 static int addr_generation_dependency_p PARAMS ((rtx, rtx));
201 static int s390_split_branches PARAMS ((rtx, bool *));
202 static void find_constant_pool_ref PARAMS ((rtx, rtx *));
203 static void replace_constant_pool_ref PARAMS ((rtx *, rtx, rtx));
204 static int find_base_register_in_addr PARAMS ((struct s390_address *));
205 static bool find_base_register_ref PARAMS ((rtx));
206 static void replace_base_register_ref PARAMS ((rtx *, rtx));
207 static void s390_optimize_prolog PARAMS ((int));
208 static bool s390_fixup_clobbered_return_reg PARAMS ((rtx));
209 static int find_unused_clobbered_reg PARAMS ((void));
210 static void s390_frame_info PARAMS ((void));
211 static rtx save_fpr PARAMS ((rtx, int, int));
212 static rtx restore_fpr PARAMS ((rtx, int, int));
213 static rtx save_gprs PARAMS ((rtx, int, int, int));
214 static rtx restore_gprs PARAMS ((rtx, int, int, int));
215 static int s390_function_arg_size PARAMS ((enum machine_mode, tree));
216 static struct machine_function * s390_init_machine_status PARAMS ((void));
218 /* Return true if SET either doesn't set the CC register, or else
219 the source and destination have matching CC modes and that
220 CC mode is at least as constrained as REQ_MODE. */
223 s390_match_ccmode_set (set, req_mode)
225 enum machine_mode req_mode;
227 enum machine_mode set_mode;
229 if (GET_CODE (set) != SET)
232 if (GET_CODE (SET_DEST (set)) != REG || !CC_REGNO_P (REGNO (SET_DEST (set))))
235 set_mode = GET_MODE (SET_DEST (set));
248 if (req_mode != set_mode)
253 if (req_mode != CCSmode && req_mode != CCUmode && req_mode != CCTmode
254 && req_mode != CCSRmode && req_mode != CCURmode)
260 if (req_mode != CCAmode)
268 return (GET_MODE (SET_SRC (set)) == set_mode);
271 /* Return true if every SET in INSN that sets the CC register
272 has source and destination with matching CC modes and that
273 CC mode is at least as constrained as REQ_MODE.
274 If REQ_MODE is VOIDmode, always return false. */
277 s390_match_ccmode (insn, req_mode)
279 enum machine_mode req_mode;
283 /* s390_tm_ccmode returns VOIDmode to indicate failure. */
284 if (req_mode == VOIDmode)
287 if (GET_CODE (PATTERN (insn)) == SET)
288 return s390_match_ccmode_set (PATTERN (insn), req_mode);
290 if (GET_CODE (PATTERN (insn)) == PARALLEL)
291 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
293 rtx set = XVECEXP (PATTERN (insn), 0, i);
294 if (GET_CODE (set) == SET)
295 if (!s390_match_ccmode_set (set, req_mode))
302 /* If a test-under-mask instruction can be used to implement
303 (compare (and ... OP1) OP2), return the CC mode required
304 to do that. Otherwise, return VOIDmode.
305 MIXED is true if the instruction can distinguish between
306 CC1 and CC2 for mixed selected bits (TMxx), it is false
307 if the instruction cannot (TM). */
310 s390_tm_ccmode (op1, op2, mixed)
317 /* ??? Fixme: should work on CONST_DOUBLE as well. */
318 if (GET_CODE (op1) != CONST_INT || GET_CODE (op2) != CONST_INT)
321 /* Selected bits all zero: CC0. */
322 if (INTVAL (op2) == 0)
325 /* Selected bits all one: CC3. */
326 if (INTVAL (op2) == INTVAL (op1))
329 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. */
332 bit1 = exact_log2 (INTVAL (op2));
333 bit0 = exact_log2 (INTVAL (op1) ^ INTVAL (op2));
334 if (bit0 != -1 && bit1 != -1)
335 return bit0 > bit1 ? CCT1mode : CCT2mode;
341 /* Given a comparison code OP (EQ, NE, etc.) and the operands
342 OP0 and OP1 of a COMPARE, return the mode to be used for the
346 s390_select_ccmode (code, op0, op1)
355 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
356 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
358 if (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS
359 || GET_CODE (op1) == NEG)
362 if (GET_CODE (op0) == AND)
364 /* Check whether we can potentially do it via TM. */
365 enum machine_mode ccmode;
366 ccmode = s390_tm_ccmode (XEXP (op0, 1), op1, 1);
367 if (ccmode != VOIDmode)
369 /* Relax CCTmode to CCZmode to allow fall-back to AND
370 if that turns out to be beneficial. */
371 return ccmode == CCTmode ? CCZmode : ccmode;
375 if (register_operand (op0, HImode)
376 && GET_CODE (op1) == CONST_INT
377 && (INTVAL (op1) == -1 || INTVAL (op1) == 65535))
379 if (register_operand (op0, QImode)
380 && GET_CODE (op1) == CONST_INT
381 && (INTVAL (op1) == -1 || INTVAL (op1) == 255))
390 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT
391 && CONST_OK_FOR_LETTER_P (INTVAL (XEXP (op0, 1)), 'K'))
393 if (INTVAL (XEXP((op0), 1)) < 0)
406 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
407 && GET_CODE (op1) != CONST_INT)
413 if (GET_CODE (op0) == PLUS)
416 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
417 && GET_CODE (op1) != CONST_INT)
423 if (GET_CODE (op0) == MINUS)
426 if ((GET_CODE (op0) == SIGN_EXTEND || GET_CODE (op0) == ZERO_EXTEND)
427 && GET_CODE (op1) != CONST_INT)
436 /* Return branch condition mask to implement a branch
437 specified by CODE. */
440 s390_branch_condition_mask (code)
443 const int CC0 = 1 << 3;
444 const int CC1 = 1 << 2;
445 const int CC2 = 1 << 1;
446 const int CC3 = 1 << 0;
448 if (GET_CODE (XEXP (code, 0)) != REG
449 || REGNO (XEXP (code, 0)) != CC_REGNUM
450 || XEXP (code, 1) != const0_rtx)
453 switch (GET_MODE (XEXP (code, 0)))
456 switch (GET_CODE (code))
459 case NE: return CC1 | CC2 | CC3;
466 switch (GET_CODE (code))
469 case NE: return CC0 | CC2 | CC3;
476 switch (GET_CODE (code))
479 case NE: return CC0 | CC1 | CC3;
486 switch (GET_CODE (code))
489 case NE: return CC0 | CC1 | CC2;
496 switch (GET_CODE (code))
498 case EQ: return CC0 | CC2;
499 case NE: return CC1 | CC3;
506 switch (GET_CODE (code))
508 case LTU: return CC2 | CC3; /* carry */
509 case GEU: return CC0 | CC1; /* no carry */
516 switch (GET_CODE (code))
518 case GTU: return CC0 | CC1; /* borrow */
519 case LEU: return CC2 | CC3; /* no borrow */
526 switch (GET_CODE (code))
529 case NE: return CC1 | CC2 | CC3;
530 case LTU: return CC1;
531 case GTU: return CC2;
532 case LEU: return CC0 | CC1;
533 case GEU: return CC0 | CC2;
540 switch (GET_CODE (code))
543 case NE: return CC2 | CC1 | CC3;
544 case LTU: return CC2;
545 case GTU: return CC1;
546 case LEU: return CC0 | CC2;
547 case GEU: return CC0 | CC1;
554 switch (GET_CODE (code))
557 case NE: return CC1 | CC2 | CC3;
558 case LT: return CC1 | CC3;
560 case LE: return CC0 | CC1 | CC3;
561 case GE: return CC0 | CC2;
568 switch (GET_CODE (code))
571 case NE: return CC1 | CC2 | CC3;
573 case GT: return CC2 | CC3;
574 case LE: return CC0 | CC1;
575 case GE: return CC0 | CC2 | CC3;
582 switch (GET_CODE (code))
585 case NE: return CC1 | CC2 | CC3;
588 case LE: return CC0 | CC1;
589 case GE: return CC0 | CC2;
590 case UNORDERED: return CC3;
591 case ORDERED: return CC0 | CC1 | CC2;
592 case UNEQ: return CC0 | CC3;
593 case UNLT: return CC1 | CC3;
594 case UNGT: return CC2 | CC3;
595 case UNLE: return CC0 | CC1 | CC3;
596 case UNGE: return CC0 | CC2 | CC3;
597 case LTGT: return CC1 | CC2;
604 switch (GET_CODE (code))
607 case NE: return CC2 | CC1 | CC3;
610 case LE: return CC0 | CC2;
611 case GE: return CC0 | CC1;
612 case UNORDERED: return CC3;
613 case ORDERED: return CC0 | CC2 | CC1;
614 case UNEQ: return CC0 | CC3;
615 case UNLT: return CC2 | CC3;
616 case UNGT: return CC1 | CC3;
617 case UNLE: return CC0 | CC2 | CC3;
618 case UNGE: return CC0 | CC1 | CC3;
619 case LTGT: return CC2 | CC1;
630 /* If INV is false, return assembler mnemonic string to implement
631 a branch specified by CODE. If INV is true, return mnemonic
632 for the corresponding inverted branch. */
635 s390_branch_condition_mnemonic (code, inv)
639 static const char *const mnemonic[16] =
641 NULL, "o", "h", "nle",
642 "l", "nhe", "lh", "ne",
643 "e", "nlh", "he", "nl",
644 "le", "nh", "no", NULL
647 int mask = s390_branch_condition_mask (code);
652 if (mask < 1 || mask > 14)
655 return mnemonic[mask];
658 /* If OP is an integer constant of mode MODE with exactly one
659 HImode subpart unequal to DEF, return the number of that
660 subpart. As a special case, all HImode subparts of OP are
661 equal to DEF, return zero. Otherwise, return -1. */
664 s390_single_hi (op, mode, def)
666 enum machine_mode mode;
669 if (GET_CODE (op) == CONST_INT)
671 unsigned HOST_WIDE_INT value = 0;
672 int n_parts = GET_MODE_SIZE (mode) / 2;
675 for (i = 0; i < n_parts; i++)
678 value = (unsigned HOST_WIDE_INT) INTVAL (op);
682 if ((value & 0xffff) != (unsigned)(def & 0xffff))
691 return part == -1 ? 0 : (n_parts - 1 - part);
694 else if (GET_CODE (op) == CONST_DOUBLE
695 && GET_MODE (op) == VOIDmode)
697 unsigned HOST_WIDE_INT value = 0;
698 int n_parts = GET_MODE_SIZE (mode) / 2;
701 for (i = 0; i < n_parts; i++)
704 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
705 else if (i == HOST_BITS_PER_WIDE_INT / 16)
706 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
710 if ((value & 0xffff) != (unsigned)(def & 0xffff))
719 return part == -1 ? 0 : (n_parts - 1 - part);
725 /* Extract the HImode part number PART from integer
726 constant OP of mode MODE. */
729 s390_extract_hi (op, mode, part)
731 enum machine_mode mode;
734 int n_parts = GET_MODE_SIZE (mode) / 2;
735 if (part < 0 || part >= n_parts)
738 part = n_parts - 1 - part;
740 if (GET_CODE (op) == CONST_INT)
742 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
743 return ((value >> (16 * part)) & 0xffff);
745 else if (GET_CODE (op) == CONST_DOUBLE
746 && GET_MODE (op) == VOIDmode)
748 unsigned HOST_WIDE_INT value;
749 if (part < HOST_BITS_PER_WIDE_INT / 16)
750 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
752 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
753 part -= HOST_BITS_PER_WIDE_INT / 16;
755 return ((value >> (16 * part)) & 0xffff);
761 /* If OP is an integer constant of mode MODE with exactly one
762 QImode subpart unequal to DEF, return the number of that
763 subpart. As a special case, all QImode subparts of OP are
764 equal to DEF, return zero. Otherwise, return -1. */
767 s390_single_qi (op, mode, def)
769 enum machine_mode mode;
772 if (GET_CODE (op) == CONST_INT)
774 unsigned HOST_WIDE_INT value = 0;
775 int n_parts = GET_MODE_SIZE (mode);
778 for (i = 0; i < n_parts; i++)
781 value = (unsigned HOST_WIDE_INT) INTVAL (op);
785 if ((value & 0xff) != (unsigned)(def & 0xff))
794 return part == -1 ? 0 : (n_parts - 1 - part);
797 else if (GET_CODE (op) == CONST_DOUBLE
798 && GET_MODE (op) == VOIDmode)
800 unsigned HOST_WIDE_INT value = 0;
801 int n_parts = GET_MODE_SIZE (mode);
804 for (i = 0; i < n_parts; i++)
807 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
808 else if (i == HOST_BITS_PER_WIDE_INT / 8)
809 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op);
813 if ((value & 0xff) != (unsigned)(def & 0xff))
822 return part == -1 ? 0 : (n_parts - 1 - part);
828 /* Extract the QImode part number PART from integer
829 constant OP of mode MODE. */
832 s390_extract_qi (op, mode, part)
834 enum machine_mode mode;
837 int n_parts = GET_MODE_SIZE (mode);
838 if (part < 0 || part >= n_parts)
841 part = n_parts - 1 - part;
843 if (GET_CODE (op) == CONST_INT)
845 unsigned HOST_WIDE_INT value = (unsigned HOST_WIDE_INT) INTVAL (op);
846 return ((value >> (8 * part)) & 0xff);
848 else if (GET_CODE (op) == CONST_DOUBLE
849 && GET_MODE (op) == VOIDmode)
851 unsigned HOST_WIDE_INT value;
852 if (part < HOST_BITS_PER_WIDE_INT / 8)
853 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (op);
855 value = (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (op),
856 part -= HOST_BITS_PER_WIDE_INT / 8;
858 return ((value >> (8 * part)) & 0xff);
865 /* Change optimizations to be performed, depending on the
868 LEVEL is the optimization level specified; 2 if `-O2' is
869 specified, 1 if `-O' is specified, and 0 if neither is specified.
871 SIZE is nonzero if `-Os' is specified and zero otherwise. */
874 optimization_options (level, size)
875 int level ATTRIBUTE_UNUSED;
876 int size ATTRIBUTE_UNUSED;
878 /* ??? There are apparently still problems with -fcaller-saves. */
879 flag_caller_saves = 0;
881 /* By default, always emit DWARF-2 unwind info. This allows debugging
882 without maintaining a stack frame back-chain. */
883 flag_asynchronous_unwind_tables = 1;
890 static const char * const cpu_names[] = TARGET_CPU_DEFAULT_NAMES;
893 const char *const name; /* processor name or nickname. */
894 const enum processor_type processor;
901 const processor_alias_table[] =
903 {"g5", PROCESSOR_9672_G5, PTA_IEEE_FLOAT},
904 {"g6", PROCESSOR_9672_G6, PTA_IEEE_FLOAT},
905 {"z900", PROCESSOR_2064_Z900, PTA_IEEE_FLOAT | PTA_ZARCH},
908 int const pta_size = ARRAY_SIZE (processor_alias_table);
910 /* Acquire a unique set number for our register saves and restores. */
911 s390_sr_alias_set = new_alias_set ();
913 /* Set up function hooks. */
914 init_machine_status = s390_init_machine_status;
916 /* Set cpu and arch, if only partially given. */
917 if (!s390_tune_string && s390_arch_string)
918 s390_tune_string = s390_arch_string;
919 if (!s390_tune_string)
920 s390_tune_string = cpu_names [TARGET_64BIT ? TARGET_CPU_DEFAULT_2064
921 : TARGET_CPU_DEFAULT_9672];
922 if (!s390_arch_string)
923 #ifdef DEFAULT_TARGET_64BIT
924 s390_arch_string = "z900";
926 s390_arch_string = "g5";
929 for (i = 0; i < pta_size; i++)
930 if (! strcmp (s390_arch_string, processor_alias_table[i].name))
932 s390_arch = processor_alias_table[i].processor;
933 /* Default cpu tuning to the architecture. */
934 s390_cpu = s390_arch;
936 if (!(processor_alias_table[i].flags & PTA_ZARCH)
938 error ("64-bit ABI not supported on %s", s390_arch_string);
940 if (!(processor_alias_table[i].flags & PTA_ZARCH)
942 error ("z/Architecture not supported on %s", s390_arch_string);
948 error ("bad value (%s) for -march= switch", s390_arch_string);
950 /* ESA implies 31 bit mode. */
951 if ((target_flags_explicit & MASK_ZARCH) && !TARGET_ZARCH)
953 if ((target_flags_explicit & MASK_64BIT) && TARGET_64BIT)
954 error ("64-bit ABI not possible in ESA/390 mode");
956 target_flags &= ~MASK_64BIT;
959 for (i = 0; i < pta_size; i++)
960 if (! strcmp (s390_tune_string, processor_alias_table[i].name))
962 s390_cpu = processor_alias_table[i].processor;
967 error ("bad value (%s) for -mtune= switch", s390_tune_string);
970 /* Map for smallest class containing reg regno. */
972 const enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
973 { GENERAL_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
974 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
975 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
976 ADDR_REGS, ADDR_REGS, ADDR_REGS, ADDR_REGS,
977 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
978 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
979 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
980 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
981 ADDR_REGS, NO_REGS, ADDR_REGS
984 /* Return attribute type of insn. */
986 static enum attr_type
987 s390_safe_attr_type (insn)
990 if (recog_memoized (insn) >= 0)
991 return get_attr_type (insn);
996 /* Return true if OP a (const_int 0) operand.
997 OP is the current operation.
998 MODE is the current operation mode. */
1001 const0_operand (op, mode)
1003 enum machine_mode mode;
1005 return op == CONST0_RTX (mode);
1008 /* Return true if OP is constant.
1009 OP is the current operation.
1010 MODE is the current operation mode. */
1013 consttable_operand (op, mode)
1015 enum machine_mode mode ATTRIBUTE_UNUSED;
1017 return CONSTANT_P (op);
1020 /* Return true if the mode of operand OP matches MODE.
1021 If MODE is set to VOIDmode, set it to the mode of OP. */
1024 check_mode (op, mode)
1026 enum machine_mode *mode;
1028 if (*mode == VOIDmode)
1029 *mode = GET_MODE (op);
1032 if (GET_MODE (op) != VOIDmode && GET_MODE (op) != *mode)
1038 /* Return true if OP a valid operand for the LARL instruction.
1039 OP is the current operation.
1040 MODE is the current operation mode. */
1043 larl_operand (op, mode)
1045 enum machine_mode mode;
1047 if (! check_mode (op, &mode))
1050 /* Allow labels and local symbols. */
1051 if (GET_CODE (op) == LABEL_REF)
1053 if (GET_CODE (op) == SYMBOL_REF
1054 && !tls_symbolic_operand (op)
1055 && (!flag_pic || SYMBOL_REF_FLAG (op)
1056 || CONSTANT_POOL_ADDRESS_P (op)))
1059 /* Everything else must have a CONST, so strip it. */
1060 if (GET_CODE (op) != CONST)
1064 /* Allow adding *even* constants. */
1065 if (GET_CODE (op) == PLUS)
1067 if (GET_CODE (XEXP (op, 1)) != CONST_INT
1068 || (INTVAL (XEXP (op, 1)) & 1) != 0)
1073 /* Labels and local symbols allowed here as well. */
1074 if (GET_CODE (op) == LABEL_REF)
1076 if (GET_CODE (op) == SYMBOL_REF
1077 && !tls_symbolic_operand (op)
1078 && (!flag_pic || SYMBOL_REF_FLAG (op)
1079 || CONSTANT_POOL_ADDRESS_P (op)))
1082 /* Now we must have a @GOTENT offset or @PLT stub
1083 or an @INDNTPOFF TLS offset. */
1084 if (GET_CODE (op) == UNSPEC
1085 && XINT (op, 1) == 111)
1087 if (GET_CODE (op) == UNSPEC
1088 && XINT (op, 1) == 113)
1090 if (GET_CODE (op) == UNSPEC
1091 && XINT (op, 1) == UNSPEC_INDNTPOFF)
1097 /* Helper routine to implement s_operand and s_imm_operand.
1098 OP is the current operation.
1099 MODE is the current operation mode.
1100 ALLOW_IMMEDIATE specifies whether immediate operands should
1101 be accepted or not. */
1104 general_s_operand (op, mode, allow_immediate)
1106 enum machine_mode mode;
1107 int allow_immediate;
1109 struct s390_address addr;
1111 /* Call general_operand first, so that we don't have to
1112 check for many special cases. */
1113 if (!general_operand (op, mode))
1116 /* Just like memory_operand, allow (subreg (mem ...))
1118 if (reload_completed
1119 && GET_CODE (op) == SUBREG
1120 && GET_CODE (SUBREG_REG (op)) == MEM)
1121 op = SUBREG_REG (op);
1123 switch (GET_CODE (op))
1125 /* Constants that we are sure will be forced to the
1126 literal pool in reload are OK as s-operand. Note
1127 that we cannot call s390_preferred_reload_class here
1128 because it might not be known yet at this point
1129 whether the current function is a leaf or not. */
1132 if (!allow_immediate || reload_completed)
1134 if (!legitimate_reload_constant_p (op))
1140 /* Memory operands are OK unless they already use an
1143 if (GET_CODE (XEXP (op, 0)) == ADDRESSOF)
1145 if (s390_decompose_address (XEXP (op, 0), &addr)
1157 /* Return true if OP is a valid S-type operand.
1158 OP is the current operation.
1159 MODE is the current operation mode. */
1162 s_operand (op, mode)
1164 enum machine_mode mode;
1166 return general_s_operand (op, mode, 0);
1169 /* Return true if OP is a valid S-type operand or an immediate
1170 operand that can be addressed as S-type operand by forcing
1171 it into the literal pool.
1172 OP is the current operation.
1173 MODE is the current operation mode. */
1176 s_imm_operand (op, mode)
1178 enum machine_mode mode;
1180 return general_s_operand (op, mode, 1);
1183 /* Return true if OP is a valid operand for a 'Q' constraint.
1184 This differs from s_operand in that only memory operands
1185 without index register are accepted, nothing else. */
1191 struct s390_address addr;
1193 if (GET_CODE (op) != MEM)
1196 if (!s390_decompose_address (XEXP (op, 0), &addr))
1205 /* Compute a (partial) cost for rtx X. Return true if the complete
1206 cost has been computed, and false if subexpressions should be
1207 scanned. In either case, *TOTAL contains the cost result. */
1210 s390_rtx_costs (x, code, outer_code, total)
1212 int code, outer_code;
1218 if (GET_CODE (XEXP (x, 0)) == MINUS
1219 && GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
1226 /* Force_const_mem does not work out of reload, because the
1227 saveable_obstack is set to reload_obstack, which does not
1228 live long enough. Because of this we cannot use force_const_mem
1229 in addsi3. This leads to problems with gen_add2_insn with a
1230 constant greater than a short. Because of that we give an
1231 addition of greater constants a cost of 3 (reload1.c 10096). */
1232 /* ??? saveable_obstack no longer exists. */
1233 if (outer_code == PLUS
1234 && (INTVAL (x) > 32767 || INTVAL (x) < -32768))
1235 *total = COSTS_N_INSNS (3);
1256 *total = COSTS_N_INSNS (1);
1260 if (GET_MODE (XEXP (x, 0)) == DImode)
1261 *total = COSTS_N_INSNS (40);
1263 *total = COSTS_N_INSNS (7);
1270 *total = COSTS_N_INSNS (33);
1278 /* Return the cost of an address rtx ADDR. */
1281 s390_address_cost (addr)
1284 struct s390_address ad;
1285 if (!s390_decompose_address (addr, &ad))
1288 return ad.indx? COSTS_N_INSNS (1) + 1 : COSTS_N_INSNS (1);
1291 /* Return true if OP is a valid operand for the BRAS instruction.
1292 OP is the current operation.
1293 MODE is the current operation mode. */
1296 bras_sym_operand (op, mode)
1298 enum machine_mode mode ATTRIBUTE_UNUSED;
1300 register enum rtx_code code = GET_CODE (op);
1302 /* Allow SYMBOL_REFs. */
1303 if (code == SYMBOL_REF)
1306 /* Allow @PLT stubs. */
1308 && GET_CODE (XEXP (op, 0)) == UNSPEC
1309 && XINT (XEXP (op, 0), 1) == 113)
1314 /* If OP is a SYMBOL_REF of a thread-local symbol, return its TLS mode,
1315 otherwise return 0. */
1318 tls_symbolic_operand (op)
1321 const char *symbol_str;
1323 if (GET_CODE (op) != SYMBOL_REF)
1325 symbol_str = XSTR (op, 0);
1327 if (symbol_str[0] != '%')
1329 return strchr (tls_model_chars, symbol_str[1]) - tls_model_chars;
1332 /* Return true if OP is a load multiple operation. It is known to be a
1333 PARALLEL and the first section will be tested.
1334 OP is the current operation.
1335 MODE is the current operation mode. */
1338 load_multiple_operation (op, mode)
1340 enum machine_mode mode ATTRIBUTE_UNUSED;
1342 int count = XVECLEN (op, 0);
1343 unsigned int dest_regno;
1348 /* Perform a quick check so we don't blow up below. */
1350 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1351 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
1352 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
1355 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
1356 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
1358 /* Check, is base, or base + displacement. */
1360 if (GET_CODE (src_addr) == REG)
1362 else if (GET_CODE (src_addr) == PLUS
1363 && GET_CODE (XEXP (src_addr, 0)) == REG
1364 && GET_CODE (XEXP (src_addr, 1)) == CONST_INT)
1366 off = INTVAL (XEXP (src_addr, 1));
1367 src_addr = XEXP (src_addr, 0);
1372 if (src_addr == frame_pointer_rtx || src_addr == arg_pointer_rtx)
1375 for (i = 1; i < count; i++)
1377 rtx elt = XVECEXP (op, 0, i);
1379 if (GET_CODE (elt) != SET
1380 || GET_CODE (SET_DEST (elt)) != REG
1381 || GET_MODE (SET_DEST (elt)) != Pmode
1382 || REGNO (SET_DEST (elt)) != dest_regno + i
1383 || GET_CODE (SET_SRC (elt)) != MEM
1384 || GET_MODE (SET_SRC (elt)) != Pmode
1385 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
1386 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
1387 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
1388 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1))
1389 != off + i * UNITS_PER_WORD)
1396 /* Return true if OP is a store multiple operation. It is known to be a
1397 PARALLEL and the first section will be tested.
1398 OP is the current operation.
1399 MODE is the current operation mode. */
1402 store_multiple_operation (op, mode)
1404 enum machine_mode mode ATTRIBUTE_UNUSED;
1406 int count = XVECLEN (op, 0);
1407 unsigned int src_regno;
1411 /* Perform a quick check so we don't blow up below. */
1413 || GET_CODE (XVECEXP (op, 0, 0)) != SET
1414 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
1415 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
1418 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
1419 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
1421 /* Check, is base, or base + displacement. */
1423 if (GET_CODE (dest_addr) == REG)
1425 else if (GET_CODE (dest_addr) == PLUS
1426 && GET_CODE (XEXP (dest_addr, 0)) == REG
1427 && GET_CODE (XEXP (dest_addr, 1)) == CONST_INT)
1429 off = INTVAL (XEXP (dest_addr, 1));
1430 dest_addr = XEXP (dest_addr, 0);
1435 if (dest_addr == frame_pointer_rtx || dest_addr == arg_pointer_rtx)
1438 for (i = 1; i < count; i++)
1440 rtx elt = XVECEXP (op, 0, i);
1442 if (GET_CODE (elt) != SET
1443 || GET_CODE (SET_SRC (elt)) != REG
1444 || GET_MODE (SET_SRC (elt)) != Pmode
1445 || REGNO (SET_SRC (elt)) != src_regno + i
1446 || GET_CODE (SET_DEST (elt)) != MEM
1447 || GET_MODE (SET_DEST (elt)) != Pmode
1448 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
1449 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
1450 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
1451 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1))
1452 != off + i * UNITS_PER_WORD)
1459 /* Return true if OP contains a symbol reference */
1462 symbolic_reference_mentioned_p (op)
1465 register const char *fmt;
1468 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1471 fmt = GET_RTX_FORMAT (GET_CODE (op));
1472 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1478 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1479 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1483 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1490 /* Return true if OP contains a reference to a thread-local symbol. */
1493 tls_symbolic_reference_mentioned_p (op)
1496 register const char *fmt;
1499 if (GET_CODE (op) == SYMBOL_REF)
1500 return tls_symbolic_operand (op);
1502 fmt = GET_RTX_FORMAT (GET_CODE (op));
1503 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1509 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1510 if (tls_symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1514 else if (fmt[i] == 'e' && tls_symbolic_reference_mentioned_p (XEXP (op, i)))
1522 /* Return true if OP is a legitimate general operand when
1523 generating PIC code. It is given that flag_pic is on
1524 and that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1527 legitimate_pic_operand_p (op)
1530 /* Accept all non-symbolic constants. */
1531 if (!SYMBOLIC_CONST (op))
1534 /* Reject everything else; must be handled
1535 via emit_symbolic_move. */
1539 /* Returns true if the constant value OP is a legitimate general operand.
1540 It is given that OP satisfies CONSTANT_P or is a CONST_DOUBLE. */
1543 legitimate_constant_p (op)
1546 /* Accept all non-symbolic constants. */
1547 if (!SYMBOLIC_CONST (op))
1550 /* Accept immediate LARL operands. */
1551 if (TARGET_64BIT && larl_operand (op, VOIDmode))
1554 /* Thread-local symbols are never legal constants. This is
1555 so that emit_call knows that computing such addresses
1556 might require a function call. */
1557 if (TLS_SYMBOLIC_CONST (op))
1560 /* In the PIC case, symbolic constants must *not* be
1561 forced into the literal pool. We accept them here,
1562 so that they will be handled by emit_symbolic_move. */
1566 /* All remaining non-PIC symbolic constants are
1567 forced into the literal pool. */
1571 /* Determine if it's legal to put X into the constant pool. This
1572 is not possible if X contains the address of a symbol that is
1573 not constant (TLS) or not known at final link time (PIC). */
1576 s390_cannot_force_const_mem (x)
1579 switch (GET_CODE (x))
1583 /* Accept all non-symbolic constants. */
1587 /* Labels are OK iff we are non-PIC. */
1588 return flag_pic != 0;
1591 /* 'Naked' TLS symbol references are never OK,
1592 non-TLS symbols are OK iff we are non-PIC. */
1593 if (tls_symbolic_operand (x))
1596 return flag_pic != 0;
1599 return s390_cannot_force_const_mem (XEXP (x, 0));
1602 return s390_cannot_force_const_mem (XEXP (x, 0))
1603 || s390_cannot_force_const_mem (XEXP (x, 1));
1606 switch (XINT (x, 1))
1608 /* Only lt-relative or GOT-relative UNSPECs are OK. */
1617 case UNSPEC_GOTNTPOFF:
1618 case UNSPEC_INDNTPOFF:
1631 /* Returns true if the constant value OP is a legitimate general
1632 operand during and after reload. The difference to
1633 legitimate_constant_p is that this function will not accept
1634 a constant that would need to be forced to the literal pool
1635 before it can be used as operand. */
1638 legitimate_reload_constant_p (op)
1641 /* Accept l(g)hi operands. */
1642 if (GET_CODE (op) == CONST_INT
1643 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1646 /* Accept lliXX operands. */
1648 && s390_single_hi (op, DImode, 0) >= 0)
1651 /* Accept larl operands. */
1653 && larl_operand (op, VOIDmode))
1656 /* Everything else cannot be handled without reload. */
1660 /* Given an rtx OP being reloaded into a reg required to be in class CLASS,
1661 return the class of reg to actually use. */
1664 s390_preferred_reload_class (op, class)
1666 enum reg_class class;
1668 /* This can happen if a floating point constant is being
1669 reloaded into an integer register. Leave well alone. */
1670 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1671 && class != FP_REGS)
1674 switch (GET_CODE (op))
1676 /* Constants we cannot reload must be forced into the
1681 if (legitimate_reload_constant_p (op))
1686 /* If a symbolic constant or a PLUS is reloaded,
1687 it is most likely being used as an address, so
1688 prefer ADDR_REGS. If 'class' is not a superset
1689 of ADDR_REGS, e.g. FP_REGS, reject this reload. */
1694 if (reg_class_subset_p (ADDR_REGS, class))
1706 /* Return the register class of a scratch register needed to
1707 load IN into a register of class CLASS in MODE.
1709 We need a temporary when loading a PLUS expression which
1710 is not a legitimate operand of the LOAD ADDRESS instruction. */
1713 s390_secondary_input_reload_class (class, mode, in)
1714 enum reg_class class ATTRIBUTE_UNUSED;
1715 enum machine_mode mode;
1718 if (s390_plus_operand (in, mode))
1724 /* Return true if OP is a PLUS that is not a legitimate
1725 operand for the LA instruction.
1726 OP is the current operation.
1727 MODE is the current operation mode. */
1730 s390_plus_operand (op, mode)
1732 enum machine_mode mode;
1734 if (!check_mode (op, &mode) || mode != Pmode)
1737 if (GET_CODE (op) != PLUS)
1740 if (legitimate_la_operand_p (op))
1746 /* Generate code to load SRC, which is PLUS that is not a
1747 legitimate operand for the LA instruction, into TARGET.
1748 SCRATCH may be used as scratch register. */
1751 s390_expand_plus_operand (target, src, scratch)
1752 register rtx target;
1754 register rtx scratch;
1757 struct s390_address ad;
1759 /* src must be a PLUS; get its two operands. */
1760 if (GET_CODE (src) != PLUS || GET_MODE (src) != Pmode)
1763 /* Check if any of the two operands is already scheduled
1764 for replacement by reload. This can happen e.g. when
1765 float registers occur in an address. */
1766 sum1 = find_replacement (&XEXP (src, 0));
1767 sum2 = find_replacement (&XEXP (src, 1));
1768 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1770 /* If the address is already strictly valid, there's nothing to do. */
1771 if (!s390_decompose_address (src, &ad)
1772 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
1773 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
1775 /* Otherwise, one of the operands cannot be an address register;
1776 we reload its value into the scratch register. */
1777 if (true_regnum (sum1) < 1 || true_regnum (sum1) > 15)
1779 emit_move_insn (scratch, sum1);
1782 if (true_regnum (sum2) < 1 || true_regnum (sum2) > 15)
1784 emit_move_insn (scratch, sum2);
1788 /* According to the way these invalid addresses are generated
1789 in reload.c, it should never happen (at least on s390) that
1790 *neither* of the PLUS components, after find_replacements
1791 was applied, is an address register. */
1792 if (sum1 == scratch && sum2 == scratch)
1798 src = gen_rtx_PLUS (Pmode, sum1, sum2);
1801 /* Emit the LOAD ADDRESS pattern. Note that reload of PLUS
1802 is only ever performed on addresses, so we can mark the
1803 sum as legitimate for LA in any case. */
1804 s390_load_address (target, src);
1808 /* Decompose a RTL expression ADDR for a memory address into
1809 its components, returned in OUT.
1811 Returns 0 if ADDR is not a valid memory address, nonzero
1812 otherwise. If OUT is NULL, don't return the components,
1813 but check for validity only.
1815 Note: Only addresses in canonical form are recognized.
1816 LEGITIMIZE_ADDRESS should convert non-canonical forms to the
1817 canonical form so that they will be recognized. */
1820 s390_decompose_address (addr, out)
1822 struct s390_address *out;
1824 rtx base = NULL_RTX;
1825 rtx indx = NULL_RTX;
1826 rtx disp = NULL_RTX;
1827 int pointer = FALSE;
1829 /* Decompose address into base + index + displacement. */
1831 if (GET_CODE (addr) == REG || GET_CODE (addr) == UNSPEC)
1834 else if (GET_CODE (addr) == PLUS)
1836 rtx op0 = XEXP (addr, 0);
1837 rtx op1 = XEXP (addr, 1);
1838 enum rtx_code code0 = GET_CODE (op0);
1839 enum rtx_code code1 = GET_CODE (op1);
1841 if (code0 == REG || code0 == UNSPEC)
1843 if (code1 == REG || code1 == UNSPEC)
1845 indx = op0; /* index + base */
1851 base = op0; /* base + displacement */
1856 else if (code0 == PLUS)
1858 indx = XEXP (op0, 0); /* index + base + disp */
1859 base = XEXP (op0, 1);
1870 disp = addr; /* displacement */
1873 /* Prefer to use pointer as base, not index. */
1876 int base_ptr = GET_CODE (base) == UNSPEC
1877 || (REG_P (base) && REG_POINTER (base));
1878 int indx_ptr = GET_CODE (indx) == UNSPEC
1879 || (REG_P (indx) && REG_POINTER (indx));
1881 if (!base_ptr && indx_ptr)
1889 /* Validate base register. */
1892 if (GET_CODE (base) == UNSPEC)
1894 if (XVECLEN (base, 0) != 1 || XINT (base, 1) != 101)
1896 base = XVECEXP (base, 0, 0);
1900 if (GET_CODE (base) != REG || GET_MODE (base) != Pmode)
1903 if (REGNO (base) == BASE_REGISTER
1904 || REGNO (base) == STACK_POINTER_REGNUM
1905 || REGNO (base) == FRAME_POINTER_REGNUM
1906 || ((reload_completed || reload_in_progress)
1907 && frame_pointer_needed
1908 && REGNO (base) == HARD_FRAME_POINTER_REGNUM)
1909 || REGNO (base) == ARG_POINTER_REGNUM
1910 || (REGNO (base) >= FIRST_VIRTUAL_REGISTER
1911 && REGNO (base) <= LAST_VIRTUAL_REGISTER)
1913 && REGNO (base) == PIC_OFFSET_TABLE_REGNUM))
1917 /* Validate index register. */
1920 if (GET_CODE (indx) == UNSPEC)
1922 if (XVECLEN (indx, 0) != 1 || XINT (indx, 1) != 101)
1924 indx = XVECEXP (indx, 0, 0);
1928 if (GET_CODE (indx) != REG || GET_MODE (indx) != Pmode)
1931 if (REGNO (indx) == BASE_REGISTER
1932 || REGNO (indx) == STACK_POINTER_REGNUM
1933 || REGNO (indx) == FRAME_POINTER_REGNUM
1934 || ((reload_completed || reload_in_progress)
1935 && frame_pointer_needed
1936 && REGNO (indx) == HARD_FRAME_POINTER_REGNUM)
1937 || REGNO (indx) == ARG_POINTER_REGNUM
1938 || (REGNO (indx) >= FIRST_VIRTUAL_REGISTER
1939 && REGNO (indx) <= LAST_VIRTUAL_REGISTER)
1941 && REGNO (indx) == PIC_OFFSET_TABLE_REGNUM))
1945 /* Validate displacement. */
1948 /* Allow integer constant in range. */
1949 if (GET_CODE (disp) == CONST_INT)
1951 /* If the argument pointer is involved, the displacement will change
1952 later anyway as the argument pointer gets eliminated. This could
1953 make a valid displacement invalid, but it is more likely to make
1954 an invalid displacement valid, because we sometimes access the
1955 register save area via negative offsets to the arg pointer.
1956 Thus we don't check the displacement for validity here. If after
1957 elimination the displacement turns out to be invalid after all,
1958 this is fixed up by reload in any case. */
1959 if (base != arg_pointer_rtx && indx != arg_pointer_rtx)
1961 if (INTVAL (disp) < 0 || INTVAL (disp) >= 4096)
1966 /* In the small-PIC case, the linker converts @GOT12
1967 and @GOTNTPOFF offsets to possible displacements. */
1968 else if (GET_CODE (disp) == CONST
1969 && GET_CODE (XEXP (disp, 0)) == UNSPEC
1970 && (XINT (XEXP (disp, 0), 1) == 110
1971 || XINT (XEXP (disp, 0), 1) == UNSPEC_GOTNTPOFF))
1979 /* Accept chunkfied literal pool symbol references. */
1980 else if (GET_CODE (disp) == CONST
1981 && GET_CODE (XEXP (disp, 0)) == MINUS
1982 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == LABEL_REF
1983 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == LABEL_REF)
1988 /* Likewise if a constant offset is present. */
1989 else if (GET_CODE (disp) == CONST
1990 && GET_CODE (XEXP (disp, 0)) == PLUS
1991 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT
1992 && GET_CODE (XEXP (XEXP (disp, 0), 0)) == MINUS
1993 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 0)) == LABEL_REF
1994 && GET_CODE (XEXP (XEXP (XEXP (disp, 0), 0), 1)) == LABEL_REF)
1999 /* We can convert literal pool addresses to
2000 displacements by basing them off the base register. */
2003 /* In some cases, we can accept an additional
2004 small constant offset. Split these off here. */
2006 unsigned int offset = 0;
2008 if (GET_CODE (disp) == CONST
2009 && GET_CODE (XEXP (disp, 0)) == PLUS
2010 && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
2012 offset = INTVAL (XEXP (XEXP (disp, 0), 1));
2013 disp = XEXP (XEXP (disp, 0), 0);
2016 /* Now we must have a literal pool address. */
2017 if (GET_CODE (disp) != SYMBOL_REF
2018 || !CONSTANT_POOL_ADDRESS_P (disp))
2021 /* If we have an offset, make sure it does not
2022 exceed the size of the constant pool entry. */
2023 if (offset && offset >= GET_MODE_SIZE (get_pool_mode (disp)))
2026 /* Either base or index must be free to
2027 hold the base register. */
2031 /* Convert the address. */
2033 indx = gen_rtx_REG (Pmode, BASE_REGISTER);
2035 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2037 disp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, disp), 100);
2038 disp = gen_rtx_CONST (Pmode, disp);
2041 disp = plus_constant (disp, offset);
2055 out->pointer = pointer;
2061 /* Return nonzero if ADDR is a valid memory address.
2062 STRICT specifies whether strict register checking applies. */
2065 legitimate_address_p (mode, addr, strict)
2066 enum machine_mode mode ATTRIBUTE_UNUSED;
2070 struct s390_address ad;
2071 if (!s390_decompose_address (addr, &ad))
2076 if (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
2078 if (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx))
2083 if (ad.base && !REG_OK_FOR_BASE_NONSTRICT_P (ad.base))
2085 if (ad.indx && !REG_OK_FOR_INDEX_NONSTRICT_P (ad.indx))
2092 /* Return 1 if OP is a valid operand for the LA instruction.
2093 In 31-bit, we need to prove that the result is used as an
2094 address, as LA performs only a 31-bit addition. */
2097 legitimate_la_operand_p (op)
2100 struct s390_address addr;
2101 if (!s390_decompose_address (op, &addr))
2104 if (TARGET_64BIT || addr.pointer)
2110 /* Return 1 if OP is a valid operand for the LA instruction,
2111 and we prefer to use LA over addition to compute it. */
2114 preferred_la_operand_p (op)
2117 struct s390_address addr;
2118 if (!s390_decompose_address (op, &addr))
2121 if (!TARGET_64BIT && !addr.pointer)
2127 if ((addr.base && REG_P (addr.base) && REG_POINTER (addr.base))
2128 || (addr.indx && REG_P (addr.indx) && REG_POINTER (addr.indx)))
2134 /* Emit a forced load-address operation to load SRC into DST.
2135 This will use the LOAD ADDRESS instruction even in situations
2136 where legitimate_la_operand_p (SRC) returns false. */
2139 s390_load_address (dst, src)
2144 emit_move_insn (dst, src);
2146 emit_insn (gen_force_la_31 (dst, src));
2149 /* Return a legitimate reference for ORIG (an address) using the
2150 register REG. If REG is 0, a new pseudo is generated.
2152 There are two types of references that must be handled:
2154 1. Global data references must load the address from the GOT, via
2155 the PIC reg. An insn is emitted to do this load, and the reg is
2158 2. Static data references, constant pool addresses, and code labels
2159 compute the address as an offset from the GOT, whose base is in
2160 the PIC reg. Static data objects have SYMBOL_REF_FLAG set to
2161 differentiate them from global data objects. The returned
2162 address is the PIC reg + an unspec constant.
2164 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2165 reg also appears in the address. */
2168 legitimize_pic_address (orig, reg)
2176 if (GET_CODE (addr) == LABEL_REF
2177 || (GET_CODE (addr) == SYMBOL_REF
2178 && (SYMBOL_REF_FLAG (addr)
2179 || CONSTANT_POOL_ADDRESS_P (addr))))
2181 /* This is a local symbol. */
2184 /* Access local symbols PC-relative via LARL.
2185 This is the same as in the non-PIC case, so it is
2186 handled automatically ... */
2190 /* Access local symbols relative to the literal pool. */
2192 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2194 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 100);
2195 addr = gen_rtx_CONST (SImode, addr);
2196 addr = force_const_mem (SImode, addr);
2197 emit_move_insn (temp, addr);
2199 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2200 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2201 new = gen_rtx_PLUS (Pmode, base, temp);
2205 emit_move_insn (reg, new);
2210 else if (GET_CODE (addr) == SYMBOL_REF)
2213 reg = gen_reg_rtx (Pmode);
2217 /* Assume GOT offset < 4k. This is handled the same way
2218 in both 31- and 64-bit code (@GOT12). */
2220 if (reload_in_progress || reload_completed)
2221 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2223 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 110);
2224 new = gen_rtx_CONST (Pmode, new);
2225 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2226 new = gen_rtx_MEM (Pmode, new);
2227 RTX_UNCHANGING_P (new) = 1;
2228 emit_move_insn (reg, new);
2231 else if (TARGET_64BIT)
2233 /* If the GOT offset might be >= 4k, we determine the position
2234 of the GOT entry via a PC-relative LARL (@GOTENT). */
2236 rtx temp = gen_reg_rtx (Pmode);
2238 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), 111);
2239 new = gen_rtx_CONST (Pmode, new);
2240 emit_move_insn (temp, new);
2242 new = gen_rtx_MEM (Pmode, temp);
2243 RTX_UNCHANGING_P (new) = 1;
2244 emit_move_insn (reg, new);
2249 /* If the GOT offset might be >= 4k, we have to load it
2250 from the literal pool (@GOT). */
2252 rtx temp = gen_reg_rtx (Pmode);
2254 if (reload_in_progress || reload_completed)
2255 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2257 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 112);
2258 addr = gen_rtx_CONST (SImode, addr);
2259 addr = force_const_mem (SImode, addr);
2260 emit_move_insn (temp, addr);
2262 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2263 new = gen_rtx_MEM (Pmode, new);
2264 RTX_UNCHANGING_P (new) = 1;
2265 emit_move_insn (reg, new);
2271 if (GET_CODE (addr) == CONST)
2273 addr = XEXP (addr, 0);
2274 if (GET_CODE (addr) == UNSPEC)
2276 if (XVECLEN (addr, 0) != 1)
2278 switch (XINT (addr, 1))
2280 /* If someone moved an @GOT or lt-relative UNSPEC
2281 out of the literal pool, force them back in. */
2285 new = force_const_mem (SImode, orig);
2288 /* @GOTENT is OK as is. */
2292 /* @PLT is OK as is on 64-bit, must be converted to
2293 lt-relative PLT on 31-bit. */
2297 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2299 addr = XVECEXP (addr, 0, 0);
2300 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, addr), 114);
2301 addr = gen_rtx_CONST (SImode, addr);
2302 addr = force_const_mem (SImode, addr);
2303 emit_move_insn (temp, addr);
2305 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2306 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2307 new = gen_rtx_PLUS (Pmode, base, temp);
2311 emit_move_insn (reg, new);
2317 /* Everything else cannot happen. */
2322 else if (GET_CODE (addr) != PLUS)
2325 if (GET_CODE (addr) == PLUS)
2327 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1);
2328 /* Check first to see if this is a constant offset
2329 from a local symbol reference. */
2330 if ((GET_CODE (op0) == LABEL_REF
2331 || (GET_CODE (op0) == SYMBOL_REF
2332 && (SYMBOL_REF_FLAG (op0)
2333 || CONSTANT_POOL_ADDRESS_P (op0))))
2334 && GET_CODE (op1) == CONST_INT)
2338 if (INTVAL (op1) & 1)
2340 /* LARL can't handle odd offsets, so emit a
2341 pair of LARL and LA. */
2342 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2344 if (INTVAL (op1) < 0 || INTVAL (op1) >= 4096)
2346 int even = INTVAL (op1) - 1;
2347 op0 = gen_rtx_PLUS (Pmode, op0, GEN_INT (even));
2348 op0 = gen_rtx_CONST (Pmode, op0);
2352 emit_move_insn (temp, op0);
2353 new = gen_rtx_PLUS (Pmode, temp, op1);
2357 emit_move_insn (reg, new);
2363 /* If the offset is even, we can just use LARL.
2364 This will happen automatically. */
2369 /* Access local symbols relative to the literal pool. */
2371 rtx temp = reg? reg : gen_reg_rtx (Pmode);
2373 addr = gen_rtx_UNSPEC (SImode, gen_rtvec (1, op0), 100);
2374 addr = gen_rtx_PLUS (SImode, addr, op1);
2375 addr = gen_rtx_CONST (SImode, addr);
2376 addr = force_const_mem (SImode, addr);
2377 emit_move_insn (temp, addr);
2379 base = gen_rtx_REG (Pmode, BASE_REGISTER);
2380 base = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, base), 101);
2381 new = gen_rtx_PLUS (Pmode, base, temp);
2385 emit_move_insn (reg, new);
2391 /* Now, check whether it is an LT-relative symbol plus offset
2392 that was pulled out of the literal pool. Force it back in. */
2394 else if (GET_CODE (op0) == UNSPEC
2395 && GET_CODE (op1) == CONST_INT)
2397 if (XVECLEN (op0, 0) != 1)
2399 if (XINT (op0, 1) != 100)
2402 new = force_const_mem (SImode, orig);
2405 /* Otherwise, compute the sum. */
2408 base = legitimize_pic_address (XEXP (addr, 0), reg);
2409 new = legitimize_pic_address (XEXP (addr, 1),
2410 base == reg ? NULL_RTX : reg);
2411 if (GET_CODE (new) == CONST_INT)
2412 new = plus_constant (base, INTVAL (new));
2415 if (GET_CODE (new) == PLUS && CONSTANT_P (XEXP (new, 1)))
2417 base = gen_rtx_PLUS (Pmode, base, XEXP (new, 0));
2418 new = XEXP (new, 1);
2420 new = gen_rtx_PLUS (Pmode, base, new);
2423 if (GET_CODE (new) == CONST)
2424 new = XEXP (new, 0);
2425 new = force_operand (new, 0);
2432 /* Load the thread pointer into a register. */
2435 get_thread_pointer ()
2439 tp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TP);
2440 tp = force_reg (Pmode, tp);
2441 mark_reg_pointer (tp, BITS_PER_WORD);
2446 /* Construct the SYMBOL_REF for the tls_get_offset function. */
2448 static GTY(()) rtx s390_tls_symbol;
2450 s390_tls_get_offset ()
2452 if (!s390_tls_symbol)
2453 s390_tls_symbol = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_offset");
2455 return s390_tls_symbol;
2458 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2459 this (thread-local) address. REG may be used as temporary. */
2462 legitimize_tls_address (addr, reg)
2466 rtx new, tls_call, temp, base, r2, insn;
2468 if (GET_CODE (addr) == SYMBOL_REF)
2469 switch (tls_symbolic_operand (addr))
2471 case TLS_MODEL_GLOBAL_DYNAMIC:
2473 r2 = gen_rtx_REG (Pmode, 2);
2474 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_TLSGD);
2475 new = gen_rtx_CONST (Pmode, tls_call);
2476 new = force_const_mem (Pmode, new);
2477 emit_move_insn (r2, new);
2478 emit_call_insn (gen_call_value_tls (r2, tls_call));
2479 insn = get_insns ();
2482 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2483 temp = gen_reg_rtx (Pmode);
2484 emit_libcall_block (insn, temp, r2, new);
2486 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2489 s390_load_address (reg, new);
2494 case TLS_MODEL_LOCAL_DYNAMIC:
2496 r2 = gen_rtx_REG (Pmode, 2);
2497 tls_call = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM);
2498 new = gen_rtx_CONST (Pmode, tls_call);
2499 new = force_const_mem (Pmode, new);
2500 emit_move_insn (r2, new);
2501 emit_call_insn (gen_call_value_tls (r2, tls_call));
2502 insn = get_insns ();
2505 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_NTPOFF);
2506 temp = gen_reg_rtx (Pmode);
2507 emit_libcall_block (insn, temp, r2, new);
2509 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2510 base = gen_reg_rtx (Pmode);
2511 s390_load_address (base, new);
2513 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_DTPOFF);
2514 new = gen_rtx_CONST (Pmode, new);
2515 new = force_const_mem (Pmode, new);
2516 temp = gen_reg_rtx (Pmode);
2517 emit_move_insn (temp, new);
2519 new = gen_rtx_PLUS (Pmode, base, temp);
2522 s390_load_address (reg, new);
2527 case TLS_MODEL_INITIAL_EXEC:
2530 /* Assume GOT offset < 4k. This is handled the same way
2531 in both 31- and 64-bit code. */
2533 if (reload_in_progress || reload_completed)
2534 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2536 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2537 new = gen_rtx_CONST (Pmode, new);
2538 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, new);
2539 new = gen_rtx_MEM (Pmode, new);
2540 RTX_UNCHANGING_P (new) = 1;
2541 temp = gen_reg_rtx (Pmode);
2542 emit_move_insn (temp, new);
2544 else if (TARGET_64BIT)
2546 /* If the GOT offset might be >= 4k, we determine the position
2547 of the GOT entry via a PC-relative LARL. */
2549 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2550 new = gen_rtx_CONST (Pmode, new);
2551 temp = gen_reg_rtx (Pmode);
2552 emit_move_insn (temp, new);
2554 new = gen_rtx_MEM (Pmode, temp);
2555 RTX_UNCHANGING_P (new) = 1;
2556 temp = gen_reg_rtx (Pmode);
2557 emit_move_insn (temp, new);
2561 /* If the GOT offset might be >= 4k, we have to load it
2562 from the literal pool. */
2564 if (reload_in_progress || reload_completed)
2565 regs_ever_live[PIC_OFFSET_TABLE_REGNUM] = 1;
2567 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_GOTNTPOFF);
2568 new = gen_rtx_CONST (Pmode, new);
2569 new = force_const_mem (Pmode, new);
2570 temp = gen_reg_rtx (Pmode);
2571 emit_move_insn (temp, new);
2573 new = gen_rtx_PLUS (Pmode, pic_offset_table_rtx, temp);
2574 new = gen_rtx_MEM (Pmode, new);
2575 RTX_UNCHANGING_P (new) = 1;
2577 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2578 temp = gen_reg_rtx (Pmode);
2579 emit_insn (gen_rtx_SET (Pmode, temp, new));
2583 /* In position-dependent code, load the absolute address of
2584 the GOT entry from the literal pool. */
2586 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_INDNTPOFF);
2587 new = gen_rtx_CONST (Pmode, new);
2588 new = force_const_mem (Pmode, new);
2589 temp = gen_reg_rtx (Pmode);
2590 emit_move_insn (temp, new);
2593 new = gen_rtx_MEM (Pmode, new);
2594 RTX_UNCHANGING_P (new) = 1;
2596 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (2, new, addr), UNSPEC_TLS_LOAD);
2597 temp = gen_reg_rtx (Pmode);
2598 emit_insn (gen_rtx_SET (Pmode, temp, new));
2601 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2604 s390_load_address (reg, new);
2609 case TLS_MODEL_LOCAL_EXEC:
2610 new = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), UNSPEC_NTPOFF);
2611 new = gen_rtx_CONST (Pmode, new);
2612 new = force_const_mem (Pmode, new);
2613 temp = gen_reg_rtx (Pmode);
2614 emit_move_insn (temp, new);
2616 new = gen_rtx_PLUS (Pmode, get_thread_pointer (), temp);
2619 s390_load_address (reg, new);
2628 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == UNSPEC)
2630 switch (XINT (XEXP (addr, 0), 1))
2632 case UNSPEC_INDNTPOFF:
2645 abort (); /* for now ... */
2650 /* Emit insns to move operands[1] into operands[0]. */
2653 emit_symbolic_move (operands)
2656 rtx temp = no_new_pseudos ? operands[0] : gen_reg_rtx (Pmode);
2658 if (GET_CODE (operands[0]) == MEM)
2659 operands[1] = force_reg (Pmode, operands[1]);
2660 else if (TLS_SYMBOLIC_CONST (operands[1]))
2661 operands[1] = legitimize_tls_address (operands[1], temp);
2663 operands[1] = legitimize_pic_address (operands[1], temp);
2666 /* Try machine-dependent ways of modifying an illegitimate address X
2667 to be legitimate. If we find one, return the new, valid address.
2669 OLDX is the address as it was before break_out_memory_refs was called.
2670 In some cases it is useful to look at this to decide what needs to be done.
2672 MODE is the mode of the operand pointed to by X.
2674 When -fpic is used, special handling is needed for symbolic references.
2675 See comments by legitimize_pic_address for details. */
2678 legitimize_address (x, oldx, mode)
2680 register rtx oldx ATTRIBUTE_UNUSED;
2681 enum machine_mode mode ATTRIBUTE_UNUSED;
2683 rtx constant_term = const0_rtx;
2685 if (TLS_SYMBOLIC_CONST (x))
2687 x = legitimize_tls_address (x, 0);
2689 if (legitimate_address_p (mode, x, FALSE))
2694 if (SYMBOLIC_CONST (x)
2695 || (GET_CODE (x) == PLUS
2696 && (SYMBOLIC_CONST (XEXP (x, 0))
2697 || SYMBOLIC_CONST (XEXP (x, 1)))))
2698 x = legitimize_pic_address (x, 0);
2700 if (legitimate_address_p (mode, x, FALSE))
2704 x = eliminate_constant_term (x, &constant_term);
2706 /* Optimize loading of large displacements by splitting them
2707 into the multiple of 4K and the rest; this allows the
2708 former to be CSE'd if possible.
2710 Don't do this if the displacement is added to a register
2711 pointing into the stack frame, as the offsets will
2712 change later anyway. */
2714 if (GET_CODE (constant_term) == CONST_INT
2715 && (INTVAL (constant_term) < 0
2716 || INTVAL (constant_term) >= 4096)
2717 && !(REG_P (x) && REGNO_PTR_FRAME_P (REGNO (x))))
2719 HOST_WIDE_INT lower = INTVAL (constant_term) & 0xfff;
2720 HOST_WIDE_INT upper = INTVAL (constant_term) ^ lower;
2722 rtx temp = gen_reg_rtx (Pmode);
2723 rtx val = force_operand (GEN_INT (upper), temp);
2725 emit_move_insn (temp, val);
2727 x = gen_rtx_PLUS (Pmode, x, temp);
2728 constant_term = GEN_INT (lower);
2731 if (GET_CODE (x) == PLUS)
2733 if (GET_CODE (XEXP (x, 0)) == REG)
2735 register rtx temp = gen_reg_rtx (Pmode);
2736 register rtx val = force_operand (XEXP (x, 1), temp);
2738 emit_move_insn (temp, val);
2740 x = gen_rtx_PLUS (Pmode, XEXP (x, 0), temp);
2743 else if (GET_CODE (XEXP (x, 1)) == REG)
2745 register rtx temp = gen_reg_rtx (Pmode);
2746 register rtx val = force_operand (XEXP (x, 0), temp);
2748 emit_move_insn (temp, val);
2750 x = gen_rtx_PLUS (Pmode, temp, XEXP (x, 1));
2754 if (constant_term != const0_rtx)
2755 x = gen_rtx_PLUS (Pmode, x, constant_term);
2760 /* Emit code to move LEN bytes from DST to SRC. */
2763 s390_expand_movstr (dst, src, len)
2768 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2769 TARGET_64BIT ? gen_movstr_short_64 : gen_movstr_short_31;
2770 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2771 TARGET_64BIT ? gen_movstr_long_64 : gen_movstr_long_31;
2774 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2776 if (INTVAL (len) > 0)
2777 emit_insn ((*gen_short) (dst, src, GEN_INT (INTVAL (len) - 1)));
2780 else if (TARGET_MVCLE)
2782 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2783 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2784 rtx reg0 = gen_reg_rtx (double_mode);
2785 rtx reg1 = gen_reg_rtx (double_mode);
2787 emit_move_insn (gen_highpart (single_mode, reg0),
2788 force_operand (XEXP (dst, 0), NULL_RTX));
2789 emit_move_insn (gen_highpart (single_mode, reg1),
2790 force_operand (XEXP (src, 0), NULL_RTX));
2792 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2793 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2795 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2800 rtx dst_addr, src_addr, count, blocks, temp;
2801 rtx end_label = gen_label_rtx ();
2802 enum machine_mode mode;
2805 mode = GET_MODE (len);
2806 if (mode == VOIDmode)
2809 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2813 dst_addr = gen_reg_rtx (Pmode);
2814 src_addr = gen_reg_rtx (Pmode);
2815 count = gen_reg_rtx (mode);
2816 blocks = gen_reg_rtx (mode);
2818 convert_move (count, len, 1);
2819 emit_cmp_and_jump_insns (count, const0_rtx,
2820 EQ, NULL_RTX, mode, 1, end_label);
2822 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2823 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX));
2824 dst = change_address (dst, VOIDmode, dst_addr);
2825 src = change_address (src, VOIDmode, src_addr);
2827 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2829 emit_move_insn (count, temp);
2831 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2833 emit_move_insn (blocks, temp);
2835 expand_start_loop (1);
2836 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2837 make_tree (type, blocks),
2838 make_tree (type, const0_rtx)));
2840 emit_insn ((*gen_short) (dst, src, GEN_INT (255)));
2841 s390_load_address (dst_addr,
2842 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2843 s390_load_address (src_addr,
2844 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256)));
2846 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2848 emit_move_insn (blocks, temp);
2852 emit_insn ((*gen_short) (dst, src, convert_to_mode (word_mode, count, 1)));
2853 emit_label (end_label);
2857 /* Emit code to clear LEN bytes at DST. */
2860 s390_expand_clrstr (dst, len)
2864 rtx (*gen_short) PARAMS ((rtx, rtx)) =
2865 TARGET_64BIT ? gen_clrstr_short_64 : gen_clrstr_short_31;
2866 rtx (*gen_long) PARAMS ((rtx, rtx, rtx)) =
2867 TARGET_64BIT ? gen_clrstr_long_64 : gen_clrstr_long_31;
2870 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2872 if (INTVAL (len) > 0)
2873 emit_insn ((*gen_short) (dst, GEN_INT (INTVAL (len) - 1)));
2876 else if (TARGET_MVCLE)
2878 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2879 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2880 rtx reg0 = gen_reg_rtx (double_mode);
2881 rtx reg1 = gen_reg_rtx (double_mode);
2883 emit_move_insn (gen_highpart (single_mode, reg0),
2884 force_operand (XEXP (dst, 0), NULL_RTX));
2885 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2887 emit_move_insn (gen_highpart (single_mode, reg1), const0_rtx);
2888 emit_move_insn (gen_lowpart (single_mode, reg1), const0_rtx);
2890 emit_insn ((*gen_long) (reg0, reg1, reg0));
2895 rtx dst_addr, src_addr, count, blocks, temp;
2896 rtx end_label = gen_label_rtx ();
2897 enum machine_mode mode;
2900 mode = GET_MODE (len);
2901 if (mode == VOIDmode)
2904 type = (*lang_hooks.types.type_for_mode) (mode, 1);
2908 dst_addr = gen_reg_rtx (Pmode);
2909 src_addr = gen_reg_rtx (Pmode);
2910 count = gen_reg_rtx (mode);
2911 blocks = gen_reg_rtx (mode);
2913 convert_move (count, len, 1);
2914 emit_cmp_and_jump_insns (count, const0_rtx,
2915 EQ, NULL_RTX, mode, 1, end_label);
2917 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX));
2918 dst = change_address (dst, VOIDmode, dst_addr);
2920 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
2922 emit_move_insn (count, temp);
2924 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
2926 emit_move_insn (blocks, temp);
2928 expand_start_loop (1);
2929 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
2930 make_tree (type, blocks),
2931 make_tree (type, const0_rtx)));
2933 emit_insn ((*gen_short) (dst, GEN_INT (255)));
2934 s390_load_address (dst_addr,
2935 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256)));
2937 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
2939 emit_move_insn (blocks, temp);
2943 emit_insn ((*gen_short) (dst, convert_to_mode (word_mode, count, 1)));
2944 emit_label (end_label);
2948 /* Emit code to compare LEN bytes at OP0 with those at OP1,
2949 and return the result in TARGET. */
2952 s390_expand_cmpstr (target, op0, op1, len)
2958 rtx (*gen_short) PARAMS ((rtx, rtx, rtx)) =
2959 TARGET_64BIT ? gen_cmpstr_short_64 : gen_cmpstr_short_31;
2960 rtx (*gen_long) PARAMS ((rtx, rtx, rtx, rtx)) =
2961 TARGET_64BIT ? gen_cmpstr_long_64 : gen_cmpstr_long_31;
2962 rtx (*gen_result) PARAMS ((rtx)) =
2963 GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
2965 op0 = protect_from_queue (op0, 0);
2966 op1 = protect_from_queue (op1, 0);
2967 len = protect_from_queue (len, 0);
2969 if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
2971 if (INTVAL (len) > 0)
2973 emit_insn ((*gen_short) (op0, op1, GEN_INT (INTVAL (len) - 1)));
2974 emit_insn ((*gen_result) (target));
2977 emit_move_insn (target, const0_rtx);
2980 else if (TARGET_MVCLE)
2982 enum machine_mode double_mode = TARGET_64BIT ? TImode : DImode;
2983 enum machine_mode single_mode = TARGET_64BIT ? DImode : SImode;
2984 rtx reg0 = gen_reg_rtx (double_mode);
2985 rtx reg1 = gen_reg_rtx (double_mode);
2987 emit_move_insn (gen_highpart (single_mode, reg0),
2988 force_operand (XEXP (op0, 0), NULL_RTX));
2989 emit_move_insn (gen_highpart (single_mode, reg1),
2990 force_operand (XEXP (op1, 0), NULL_RTX));
2992 convert_move (gen_lowpart (single_mode, reg0), len, 1);
2993 convert_move (gen_lowpart (single_mode, reg1), len, 1);
2995 emit_insn ((*gen_long) (reg0, reg1, reg0, reg1));
2996 emit_insn ((*gen_result) (target));
3001 rtx addr0, addr1, count, blocks, temp;
3002 rtx end_label = gen_label_rtx ();
3003 enum machine_mode mode;
3006 mode = GET_MODE (len);
3007 if (mode == VOIDmode)
3010 type = (*lang_hooks.types.type_for_mode) (mode, 1);
3014 addr0 = gen_reg_rtx (Pmode);
3015 addr1 = gen_reg_rtx (Pmode);
3016 count = gen_reg_rtx (mode);
3017 blocks = gen_reg_rtx (mode);
3019 convert_move (count, len, 1);
3020 emit_cmp_and_jump_insns (count, const0_rtx,
3021 EQ, NULL_RTX, mode, 1, end_label);
3023 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX));
3024 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX));
3025 op0 = change_address (op0, VOIDmode, addr0);
3026 op1 = change_address (op1, VOIDmode, addr1);
3028 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0);
3030 emit_move_insn (count, temp);
3032 temp = expand_binop (mode, ashr_optab, count, GEN_INT (8), blocks, 1, 0);
3034 emit_move_insn (blocks, temp);
3036 expand_start_loop (1);
3037 expand_exit_loop_top_cond (0, build (NE_EXPR, type,
3038 make_tree (type, blocks),
3039 make_tree (type, const0_rtx)));
3041 emit_insn ((*gen_short) (op0, op1, GEN_INT (255)));
3042 temp = gen_rtx_NE (VOIDmode, gen_rtx_REG (CCSmode, 33), const0_rtx);
3043 temp = gen_rtx_IF_THEN_ELSE (VOIDmode, temp,
3044 gen_rtx_LABEL_REF (VOIDmode, end_label), pc_rtx);
3045 temp = gen_rtx_SET (VOIDmode, pc_rtx, temp);
3046 emit_jump_insn (temp);
3048 s390_load_address (addr0,
3049 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256)));
3050 s390_load_address (addr1,
3051 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256)));
3053 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0);
3055 emit_move_insn (blocks, temp);
3059 emit_insn ((*gen_short) (op0, op1, convert_to_mode (word_mode, count, 1)));
3060 emit_label (end_label);
3062 emit_insn ((*gen_result) (target));
3066 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
3067 We need to emit DTP-relative relocations. */
3070 s390_output_dwarf_dtprel (file, size, x)
3078 fputs ("\t.long\t", file);
3081 fputs ("\t.quad\t", file);
3086 output_addr_const (file, x);
3087 fputs ("@DTPOFF", file);
3090 /* In the name of slightly smaller debug output, and to cater to
3091 general assembler losage, recognize various UNSPEC sequences
3092 and turn them back into a direct symbol reference. */
3095 s390_delegitimize_address (orig_x)
3100 if (GET_CODE (x) != MEM)
3104 if (GET_CODE (x) == PLUS
3105 && GET_CODE (XEXP (x, 1)) == CONST
3106 && GET_CODE (XEXP (x, 0)) == REG
3107 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
3109 y = XEXP (XEXP (x, 1), 0);
3110 if (GET_CODE (y) == UNSPEC
3111 && XINT (y, 1) == 110)
3112 return XVECEXP (y, 0, 0);
3116 if (GET_CODE (x) == CONST)
3119 if (GET_CODE (y) == UNSPEC
3120 && XINT (y, 1) == 111)
3121 return XVECEXP (y, 0, 0);
3128 /* Locate some local-dynamic symbol still in use by this function
3129 so that we can print its name in local-dynamic base patterns. */
3132 get_some_local_dynamic_name ()
3136 if (cfun->machine->some_ld_name)
3137 return cfun->machine->some_ld_name;
3139 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
3141 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
3142 return cfun->machine->some_ld_name;
3148 get_some_local_dynamic_name_1 (px, data)
3150 void *data ATTRIBUTE_UNUSED;
3154 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
3156 x = get_pool_constant (x);
3157 return for_each_rtx (&x, get_some_local_dynamic_name_1, 0);
3160 if (GET_CODE (x) == SYMBOL_REF
3161 && tls_symbolic_operand (x) == TLS_MODEL_LOCAL_DYNAMIC)
3163 cfun->machine->some_ld_name = XSTR (x, 0);
3170 /* Output symbolic constant X in assembler syntax to
3171 stdio stream FILE. */
3174 s390_output_symbolic_const (file, x)
3178 switch (GET_CODE (x))
3183 s390_output_symbolic_const (file, XEXP (x, 0));
3187 s390_output_symbolic_const (file, XEXP (x, 0));
3188 fprintf (file, "+");
3189 s390_output_symbolic_const (file, XEXP (x, 1));
3193 s390_output_symbolic_const (file, XEXP (x, 0));
3194 fprintf (file, "-");
3195 s390_output_symbolic_const (file, XEXP (x, 1));
3202 output_addr_const (file, x);
3206 if (XVECLEN (x, 0) != 1)
3207 output_operand_lossage ("invalid UNSPEC as operand (1)");
3208 switch (XINT (x, 1))
3212 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3213 fprintf (file, "-");
3214 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3217 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3218 fprintf (file, "-");
3219 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3222 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3223 fprintf (file, "@GOT12");
3226 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3227 fprintf (file, "@GOTENT");
3230 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3231 fprintf (file, "@GOT");
3234 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3235 fprintf (file, "@PLT");
3238 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3239 fprintf (file, "@PLT-");
3240 s390_output_symbolic_const (file, cfun->machine->literal_pool_label);
3243 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3244 fprintf (file, "@TLSGD");
3247 assemble_name (file, get_some_local_dynamic_name ());
3248 fprintf (file, "@TLSLDM");
3251 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3252 fprintf (file, "@DTPOFF");
3255 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3256 fprintf (file, "@NTPOFF");
3258 case UNSPEC_GOTNTPOFF:
3259 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3260 fprintf (file, "@GOTNTPOFF");
3262 case UNSPEC_INDNTPOFF:
3263 s390_output_symbolic_const (file, XVECEXP (x, 0, 0));
3264 fprintf (file, "@INDNTPOFF");
3267 output_operand_lossage ("invalid UNSPEC as operand (2)");
3273 fatal_insn ("UNKNOWN in s390_output_symbolic_const !?", x);
3278 /* Output address operand ADDR in assembler syntax to
3279 stdio stream FILE. */
3282 print_operand_address (file, addr)
3286 struct s390_address ad;
3288 if (!s390_decompose_address (addr, &ad)
3289 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3290 || (ad.indx && !REG_OK_FOR_INDEX_STRICT_P (ad.indx)))
3291 output_operand_lossage ("Cannot decompose address.");
3294 s390_output_symbolic_const (file, ad.disp);
3296 fprintf (file, "0");
3298 if (ad.base && ad.indx)
3299 fprintf (file, "(%s,%s)", reg_names[REGNO (ad.indx)],
3300 reg_names[REGNO (ad.base)]);
3302 fprintf (file, "(%s)", reg_names[REGNO (ad.base)]);
3305 /* Output operand X in assembler syntax to stdio stream FILE.
3306 CODE specified the format flag. The following format flags
3309 'C': print opcode suffix for branch condition.
3310 'D': print opcode suffix for inverse branch condition.
3311 'J': print tls_load/tls_gdcall/tls_ldcall suffix
3312 'O': print only the displacement of a memory reference.
3313 'R': print only the base register of a memory reference.
3314 'N': print the second word of a DImode operand.
3315 'M': print the second word of a TImode operand.
3317 'b': print integer X as if it's an unsigned byte.
3318 'x': print integer X as if it's an unsigned word.
3319 'h': print integer X as if it's a signed word. */
3322 print_operand (file, x, code)
3330 fprintf (file, s390_branch_condition_mnemonic (x, FALSE));
3334 fprintf (file, s390_branch_condition_mnemonic (x, TRUE));
3338 if (GET_CODE (x) == SYMBOL_REF)
3340 fprintf (file, "%s", ":tls_load:");
3341 output_addr_const (file, x);
3343 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSGD)
3345 fprintf (file, "%s", ":tls_gdcall:");
3346 output_addr_const (file, XVECEXP (x, 0, 0));
3348 else if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_TLSLDM)
3350 fprintf (file, "%s", ":tls_ldcall:");
3351 assemble_name (file, get_some_local_dynamic_name ());
3359 struct s390_address ad;
3361 if (GET_CODE (x) != MEM
3362 || !s390_decompose_address (XEXP (x, 0), &ad)
3363 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3368 s390_output_symbolic_const (file, ad.disp);
3370 fprintf (file, "0");
3376 struct s390_address ad;
3378 if (GET_CODE (x) != MEM
3379 || !s390_decompose_address (XEXP (x, 0), &ad)
3380 || (ad.base && !REG_OK_FOR_BASE_STRICT_P (ad.base))
3385 fprintf (file, "%s", reg_names[REGNO (ad.base)]);
3387 fprintf (file, "0");
3392 if (GET_CODE (x) == REG)
3393 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3394 else if (GET_CODE (x) == MEM)
3395 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 4));
3401 if (GET_CODE (x) == REG)
3402 x = gen_rtx_REG (GET_MODE (x), REGNO (x) + 1);
3403 else if (GET_CODE (x) == MEM)
3404 x = change_address (x, VOIDmode, plus_constant (XEXP (x, 0), 8));
3410 switch (GET_CODE (x))
3413 fprintf (file, "%s", reg_names[REGNO (x)]);
3417 output_address (XEXP (x, 0));
3424 s390_output_symbolic_const (file, x);
3429 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xff);
3430 else if (code == 'x')
3431 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0xffff);
3432 else if (code == 'h')
3433 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((INTVAL (x) & 0xffff) ^ 0x8000) - 0x8000);
3435 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3439 if (GET_MODE (x) != VOIDmode)
3442 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xff);
3443 else if (code == 'x')
3444 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x) & 0xffff);
3445 else if (code == 'h')
3446 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ((CONST_DOUBLE_LOW (x) & 0xffff) ^ 0x8000) - 0x8000);
3452 fatal_insn ("UNKNOWN in print_operand !?", x);
3457 /* Target hook for assembling integer objects. We need to define it
3458 here to work a round a bug in some versions of GAS, which couldn't
3459 handle values smaller than INT_MIN when printed in decimal. */
3462 s390_assemble_integer (x, size, aligned_p)
3467 if (size == 8 && aligned_p
3468 && GET_CODE (x) == CONST_INT && INTVAL (x) < INT_MIN)
3470 fputs ("\t.quad\t", asm_out_file);
3471 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
3472 putc ('\n', asm_out_file);
3475 return default_assemble_integer (x, size, aligned_p);
3478 /* Returns true if register REGNO is used for forming
3479 a memory address in expression X. */
3482 reg_used_in_mem_p (regno, x)
3486 enum rtx_code code = GET_CODE (x);
3492 if (refers_to_regno_p (regno, regno+1,
3496 else if (code == SET
3497 && GET_CODE (SET_DEST (x)) == PC)
3499 if (refers_to_regno_p (regno, regno+1,
3504 fmt = GET_RTX_FORMAT (code);
3505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3508 && reg_used_in_mem_p (regno, XEXP (x, i)))
3511 else if (fmt[i] == 'E')
3512 for (j = 0; j < XVECLEN (x, i); j++)
3513 if (reg_used_in_mem_p (regno, XVECEXP (x, i, j)))
3519 /* Returns true if expression DEP_RTX sets an address register
3520 used by instruction INSN to address memory. */
3523 addr_generation_dependency_p (dep_rtx, insn)
3529 if (GET_CODE (dep_rtx) == INSN)
3530 dep_rtx = PATTERN (dep_rtx);
3532 if (GET_CODE (dep_rtx) == SET)
3534 target = SET_DEST (dep_rtx);
3535 if (GET_CODE (target) == STRICT_LOW_PART)
3536 target = XEXP (target, 0);
3537 while (GET_CODE (target) == SUBREG)
3538 target = SUBREG_REG (target);
3540 if (GET_CODE (target) == REG)
3542 int regno = REGNO (target);
3544 if (s390_safe_attr_type (insn) == TYPE_LA)
3546 pat = PATTERN (insn);
3547 if (GET_CODE (pat) == PARALLEL)
3549 if (XVECLEN (pat, 0) != 2)
3551 pat = XVECEXP (pat, 0, 0);
3553 if (GET_CODE (pat) == SET)
3554 return refers_to_regno_p (regno, regno+1, SET_SRC (pat), 0);
3558 else if (get_attr_atype (insn) == ATYPE_AGEN)
3559 return reg_used_in_mem_p (regno, PATTERN (insn));
3565 /* Return 1, if dep_insn sets register used in insn in the agen unit. */
3569 s390_agen_dep_p(dep_insn, insn)
3573 rtx dep_rtx = PATTERN (dep_insn);
3576 if (GET_CODE (dep_rtx) == SET
3577 && addr_generation_dependency_p (dep_rtx, insn))
3579 else if (GET_CODE (dep_rtx) == PARALLEL)
3581 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3583 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3591 /* Return the modified cost of the dependency of instruction INSN
3592 on instruction DEP_INSN through the link LINK. COST is the
3593 default cost of that dependency.
3595 Data dependencies are all handled without delay. However, if a
3596 register is modified and subsequently used as base or index
3597 register of a memory reference, at least 4 cycles need to pass
3598 between setting and using the register to avoid pipeline stalls.
3599 An exception is the LA instruction. An address generated by LA can
3600 be used by introducing only a one cycle stall on the pipeline. */
3603 s390_adjust_cost (insn, link, dep_insn, cost)
3612 /* If the dependence is an anti-dependence, there is no cost. For an
3613 output dependence, there is sometimes a cost, but it doesn't seem
3614 worth handling those few cases. */
3616 if (REG_NOTE_KIND (link) != 0)
3619 /* If we can't recognize the insns, we can't really do anything. */
3620 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
3623 /* DFA based scheduling checks address dependency in md file. */
3624 if (s390_use_dfa_pipeline_interface ())
3627 dep_rtx = PATTERN (dep_insn);
3629 if (GET_CODE (dep_rtx) == SET
3630 && addr_generation_dependency_p (dep_rtx, insn))
3631 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3632 else if (GET_CODE (dep_rtx) == PARALLEL)
3634 for (i = 0; i < XVECLEN (dep_rtx, 0); i++)
3636 if (addr_generation_dependency_p (XVECEXP (dep_rtx, 0, i), insn))
3637 cost += (s390_safe_attr_type (dep_insn) == TYPE_LA) ? 1 : 4;
3644 /* The number of instructions that can be issued per cycle. */
3652 /* If the following function returns TRUE, we will use the the DFA
3656 s390_use_dfa_pipeline_interface ()
3658 if (s390_cpu == PROCESSOR_2064_Z900)
3664 /* Split all branches that exceed the maximum distance.
3665 Returns true if this created a new literal pool entry.
3667 Code generated by this routine is allowed to use
3668 TEMP_REG as temporary scratch register. If this is
3669 done, TEMP_USED is set to true. */
3672 s390_split_branches (temp_reg, temp_used)
3676 int new_literal = 0;
3677 rtx insn, pat, tmp, target;
3680 /* We need correct insn addresses. */
3682 shorten_branches (get_insns ());
3684 /* Find all branches that exceed 64KB, and split them. */
3686 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3688 if (GET_CODE (insn) != JUMP_INSN)
3691 pat = PATTERN (insn);
3692 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
3693 pat = XVECEXP (pat, 0, 0);
3694 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
3697 if (GET_CODE (SET_SRC (pat)) == LABEL_REF)
3699 label = &SET_SRC (pat);
3701 else if (GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
3703 if (GET_CODE (XEXP (SET_SRC (pat), 1)) == LABEL_REF)
3704 label = &XEXP (SET_SRC (pat), 1);
3705 else if (GET_CODE (XEXP (SET_SRC (pat), 2)) == LABEL_REF)
3706 label = &XEXP (SET_SRC (pat), 2);
3713 if (get_attr_length (insn) <= (TARGET_64BIT ? 6 : 4))
3720 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, *label), insn);
3721 INSN_ADDRESSES_NEW (tmp, -1);
3728 tmp = force_const_mem (Pmode, *label);
3729 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3730 INSN_ADDRESSES_NEW (tmp, -1);
3737 tmp = gen_rtx_UNSPEC (SImode, gen_rtvec (1, *label), 104);
3738 tmp = gen_rtx_CONST (SImode, tmp);
3739 tmp = force_const_mem (SImode, tmp);
3740 tmp = emit_insn_before (gen_rtx_SET (Pmode, temp_reg, tmp), insn);
3741 INSN_ADDRESSES_NEW (tmp, -1);
3743 target = gen_rtx_REG (Pmode, BASE_REGISTER);
3744 target = gen_rtx_PLUS (Pmode, target, temp_reg);
3747 if (!validate_change (insn, label, target, 0))
3755 /* Find a literal pool symbol referenced in RTX X, and store
3756 it at REF. Will abort if X contains references to more than
3757 one such pool symbol; multiple references to the same symbol
3758 are allowed, however.
3760 The rtx pointed to by REF must be initialized to NULL_RTX
3761 by the caller before calling this routine. */
3764 find_constant_pool_ref (x, ref)
3771 if (GET_CODE (x) == SYMBOL_REF
3772 && CONSTANT_POOL_ADDRESS_P (x))
3774 if (*ref == NULL_RTX)
3780 fmt = GET_RTX_FORMAT (GET_CODE (x));
3781 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3785 find_constant_pool_ref (XEXP (x, i), ref);
3787 else if (fmt[i] == 'E')
3789 for (j = 0; j < XVECLEN (x, i); j++)
3790 find_constant_pool_ref (XVECEXP (x, i, j), ref);
3795 /* Replace every reference to the literal pool symbol REF
3796 in X by the address ADDR. Fix up MEMs as required. */
3799 replace_constant_pool_ref (x, ref, addr)
3810 /* Literal pool references can only occur inside a MEM ... */
3811 if (GET_CODE (*x) == MEM)
3813 rtx memref = XEXP (*x, 0);
3817 *x = replace_equiv_address (*x, addr);
3821 if (GET_CODE (memref) == CONST
3822 && GET_CODE (XEXP (memref, 0)) == PLUS
3823 && GET_CODE (XEXP (XEXP (memref, 0), 1)) == CONST_INT
3824 && XEXP (XEXP (memref, 0), 0) == ref)
3826 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (memref, 0), 1));
3827 *x = replace_equiv_address (*x, plus_constant (addr, off));
3832 /* ... or a load-address type pattern. */
3833 if (GET_CODE (*x) == SET)
3835 rtx addrref = SET_SRC (*x);
3839 SET_SRC (*x) = addr;
3843 if (GET_CODE (addrref) == CONST
3844 && GET_CODE (XEXP (addrref, 0)) == PLUS
3845 && GET_CODE (XEXP (XEXP (addrref, 0), 1)) == CONST_INT
3846 && XEXP (XEXP (addrref, 0), 0) == ref)
3848 HOST_WIDE_INT off = INTVAL (XEXP (XEXP (addrref, 0), 1));
3849 SET_SRC (*x) = plus_constant (addr, off);
3854 fmt = GET_RTX_FORMAT (GET_CODE (*x));
3855 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
3859 replace_constant_pool_ref (&XEXP (*x, i), ref, addr);
3861 else if (fmt[i] == 'E')
3863 for (j = 0; j < XVECLEN (*x, i); j++)
3864 replace_constant_pool_ref (&XVECEXP (*x, i, j), ref, addr);
3869 /* Check whether ADDR is an address that uses the base register,
3870 without actually constituting a literal pool access. (This happens
3871 in 31-bit PIC mode, where the base register is used as anchor for
3872 relative addressing of local symbols.)
3874 Returns 1 if the base register occupies the base slot,
3875 returns 2 if the base register occupies the index slot,
3876 returns 0 if the address is not of this form. */
3879 find_base_register_in_addr (addr)
3880 struct s390_address *addr;
3882 /* If DISP is complex, we might have a literal pool reference. */
3883 if (addr->disp && GET_CODE (addr->disp) != CONST_INT)
3886 if (addr->base && REG_P (addr->base) && REGNO (addr->base) == BASE_REGISTER)
3889 if (addr->indx && REG_P (addr->indx) && REGNO (addr->indx) == BASE_REGISTER)
3895 /* Return true if X contains an address that uses the base register,
3896 without actually constituting a literal pool access. */
3899 find_base_register_ref (x)
3903 struct s390_address addr;
3907 /* Addresses can only occur inside a MEM ... */
3908 if (GET_CODE (x) == MEM)
3910 if (s390_decompose_address (XEXP (x, 0), &addr)
3911 && find_base_register_in_addr (&addr))
3915 /* ... or a load-address type pattern. */
3916 if (GET_CODE (x) == SET && GET_CODE (SET_DEST (x)) == REG)
3918 if (s390_decompose_address (SET_SRC (x), &addr)
3919 && find_base_register_in_addr (&addr))
3923 fmt = GET_RTX_FORMAT (GET_CODE (x));
3924 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3928 retv |= find_base_register_ref (XEXP (x, i));
3930 else if (fmt[i] == 'E')
3932 for (j = 0; j < XVECLEN (x, i); j++)
3933 retv |= find_base_register_ref (XVECEXP (x, i, j));
3940 /* If X contains an address that uses the base register,
3941 without actually constituting a literal pool access,
3942 replace the base register with REPL in all such cases.
3944 Handles both MEMs and load address patterns. */
3947 replace_base_register_ref (x, repl)
3951 struct s390_address addr;
3956 /* Addresses can only occur inside a MEM ... */
3957 if (GET_CODE (*x) == MEM)
3959 if (s390_decompose_address (XEXP (*x, 0), &addr)
3960 && (pos = find_base_register_in_addr (&addr)))
3967 new_addr = addr.base;
3969 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3971 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3973 *x = replace_equiv_address (*x, new_addr);
3978 /* ... or a load-address type pattern. */
3979 if (GET_CODE (*x) == SET && GET_CODE (SET_DEST (*x)) == REG)
3981 if (s390_decompose_address (SET_SRC (*x), &addr)
3982 && (pos = find_base_register_in_addr (&addr)))
3989 new_addr = addr.base;
3991 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.indx);
3993 new_addr = gen_rtx_PLUS (Pmode, new_addr, addr.disp);
3995 SET_SRC (*x) = new_addr;
4000 fmt = GET_RTX_FORMAT (GET_CODE (*x));
4001 for (i = GET_RTX_LENGTH (GET_CODE (*x)) - 1; i >= 0; i--)
4005 replace_base_register_ref (&XEXP (*x, i), repl);
4007 else if (fmt[i] == 'E')
4009 for (j = 0; j < XVECLEN (*x, i); j++)
4010 replace_base_register_ref (&XVECEXP (*x, i, j), repl);
4016 /* We keep a list of constants we which we have to add to internal
4017 constant tables in the middle of large functions. */
4019 #define NR_C_MODES 6
4020 enum machine_mode constant_modes[NR_C_MODES] =
4028 rtx (*gen_consttable[NR_C_MODES])(rtx) =
4030 gen_consttable_df, gen_consttable_di,
4031 gen_consttable_sf, gen_consttable_si,
4038 struct constant *next;
4043 struct constant_pool
4045 struct constant_pool *next;
4050 struct constant *constants[NR_C_MODES];
4056 static struct constant_pool * s390_chunkify_start PARAMS ((rtx, bool *));
4057 static void s390_chunkify_finish PARAMS ((struct constant_pool *, rtx));
4058 static void s390_chunkify_cancel PARAMS ((struct constant_pool *));
4060 static struct constant_pool *s390_start_pool PARAMS ((struct constant_pool **, rtx));
4061 static void s390_end_pool PARAMS ((struct constant_pool *, rtx));
4062 static void s390_add_pool_insn PARAMS ((struct constant_pool *, rtx));
4063 static struct constant_pool *s390_find_pool PARAMS ((struct constant_pool *, rtx));
4064 static void s390_add_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4065 static rtx s390_find_constant PARAMS ((struct constant_pool *, rtx, enum machine_mode));
4066 static void s390_add_anchor PARAMS ((struct constant_pool *));
4067 static rtx s390_dump_pool PARAMS ((struct constant_pool *));
4068 static void s390_free_pool PARAMS ((struct constant_pool *));
4070 /* Create new constant pool covering instructions starting at INSN
4071 and chain it to the end of POOL_LIST. */
4073 static struct constant_pool *
4074 s390_start_pool (pool_list, insn)
4075 struct constant_pool **pool_list;
4078 struct constant_pool *pool, **prev;
4081 pool = (struct constant_pool *) xmalloc (sizeof *pool);
4083 for (i = 0; i < NR_C_MODES; i++)
4084 pool->constants[i] = NULL;
4086 pool->label = gen_label_rtx ();
4087 pool->first_insn = insn;
4088 pool->pool_insn = NULL_RTX;
4089 pool->insns = BITMAP_XMALLOC ();
4091 pool->anchor = FALSE;
4093 for (prev = pool_list; *prev; prev = &(*prev)->next)
4100 /* End range of instructions covered by POOL at INSN and emit
4101 placeholder insn representing the pool. */
4104 s390_end_pool (pool, insn)
4105 struct constant_pool *pool;
4108 rtx pool_size = GEN_INT (pool->size + 8 /* alignment slop */);
4111 insn = get_last_insn ();
4113 pool->pool_insn = emit_insn_after (gen_pool (pool_size), insn);
4114 INSN_ADDRESSES_NEW (pool->pool_insn, -1);
4117 /* Add INSN to the list of insns covered by POOL. */
4120 s390_add_pool_insn (pool, insn)
4121 struct constant_pool *pool;
4124 bitmap_set_bit (pool->insns, INSN_UID (insn));
4127 /* Return pool out of POOL_LIST that covers INSN. */
4129 static struct constant_pool *
4130 s390_find_pool (pool_list, insn)
4131 struct constant_pool *pool_list;
4134 struct constant_pool *pool;
4136 for (pool = pool_list; pool; pool = pool->next)
4137 if (bitmap_bit_p (pool->insns, INSN_UID (insn)))
4143 /* Add constant VAL of mode MODE to the constant pool POOL. */
4146 s390_add_constant (pool, val, mode)
4147 struct constant_pool *pool;
4149 enum machine_mode mode;
4154 for (i = 0; i < NR_C_MODES; i++)
4155 if (constant_modes[i] == mode)
4157 if (i == NR_C_MODES)
4160 for (c = pool->constants[i]; c != NULL; c = c->next)
4161 if (rtx_equal_p (val, c->value))
4166 c = (struct constant *) xmalloc (sizeof *c);
4168 c->label = gen_label_rtx ();
4169 c->next = pool->constants[i];
4170 pool->constants[i] = c;
4171 pool->size += GET_MODE_SIZE (mode);
4175 /* Find constant VAL of mode MODE in the constant pool POOL.
4176 Return an RTX describing the distance from the start of
4177 the pool to the location of the new constant. */
4180 s390_find_constant (pool, val, mode)
4181 struct constant_pool *pool;
4183 enum machine_mode mode;
4189 for (i = 0; i < NR_C_MODES; i++)
4190 if (constant_modes[i] == mode)
4192 if (i == NR_C_MODES)
4195 for (c = pool->constants[i]; c != NULL; c = c->next)
4196 if (rtx_equal_p (val, c->value))
4202 offset = gen_rtx_MINUS (Pmode, gen_rtx_LABEL_REF (Pmode, c->label),
4203 gen_rtx_LABEL_REF (Pmode, pool->label));
4204 offset = gen_rtx_CONST (Pmode, offset);
4208 /* Set 'anchor' flag in POOL. */
4211 s390_add_anchor (pool)
4212 struct constant_pool *pool;
4216 pool->anchor = TRUE;
4221 /* Dump out the constants in POOL. */
4224 s390_dump_pool (pool)
4225 struct constant_pool *pool;
4231 /* Pool start insn switches to proper section
4232 and guarantees necessary alignment. */
4234 insn = emit_insn_after (gen_pool_start_64 (), pool->pool_insn);
4236 insn = emit_insn_after (gen_pool_start_31 (), pool->pool_insn);
4237 INSN_ADDRESSES_NEW (insn, -1);
4239 insn = emit_label_after (pool->label, insn);
4240 INSN_ADDRESSES_NEW (insn, -1);
4242 /* Emit anchor if we need one. */
4245 rtx anchor = gen_rtx_LABEL_REF (VOIDmode, pool->label);
4246 anchor = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, anchor), 105);
4247 anchor = gen_rtx_CONST (VOIDmode, anchor);
4248 insn = emit_insn_after (gen_consttable_si (anchor), insn);
4249 INSN_ADDRESSES_NEW (insn, -1);
4252 /* Dump constants in descending alignment requirement order,
4253 ensuring proper alignment for every constant. */
4254 for (i = 0; i < NR_C_MODES; i++)
4255 for (c = pool->constants[i]; c; c = c->next)
4257 /* Convert 104 unspecs to pool-relative references. */
4258 rtx value = c->value;
4259 if (GET_CODE (value) == CONST
4260 && GET_CODE (XEXP (value, 0)) == UNSPEC
4261 && XINT (XEXP (value, 0), 1) == 104
4262 && XVECLEN (XEXP (value, 0), 0) == 1)
4264 value = gen_rtx_MINUS (Pmode, XVECEXP (XEXP (value, 0), 0, 0),
4265 gen_rtx_LABEL_REF (VOIDmode, pool->label));
4266 value = gen_rtx_CONST (VOIDmode, value);
4269 insn = emit_label_after (c->label, insn);
4270 INSN_ADDRESSES_NEW (insn, -1);
4271 insn = emit_insn_after (gen_consttable[i] (value), insn);
4272 INSN_ADDRESSES_NEW (insn, -1);
4275 /* Pool end insn switches back to previous section
4276 and guarantees necessary alignment. */
4278 insn = emit_insn_after (gen_pool_end_64 (), insn);
4280 insn = emit_insn_after (gen_pool_end_31 (), insn);
4281 INSN_ADDRESSES_NEW (insn, -1);
4283 insn = emit_barrier_after (insn);
4284 INSN_ADDRESSES_NEW (insn, -1);
4286 /* Remove placeholder insn. */
4287 remove_insn (pool->pool_insn);
4292 /* Free all memory used by POOL. */
4295 s390_free_pool (pool)
4296 struct constant_pool *pool;
4300 for (i = 0; i < NR_C_MODES; i++)
4302 struct constant *c = pool->constants[i];
4305 struct constant *next = c->next;
4311 BITMAP_XFREE (pool->insns);
4316 /* Chunkify the literal pool if required.
4318 Code generated by this routine is allowed to use
4319 TEMP_REG as temporary scratch register. If this is
4320 done, TEMP_USED is set to true. */
4322 #define S390_POOL_CHUNK_MIN 0xc00
4323 #define S390_POOL_CHUNK_MAX 0xe00
4325 static struct constant_pool *
4326 s390_chunkify_start (temp_reg, temp_used)
4330 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4332 struct constant_pool *curr_pool = NULL, *pool_list = NULL;
4337 rtx (*gen_reload_base) PARAMS ((rtx, rtx)) =
4338 TARGET_64BIT? gen_reload_base_64 : gen_reload_base_31;
4341 /* Do we need to chunkify the literal pool? */
4343 if (get_pool_size () < S390_POOL_CHUNK_MAX)
4346 /* We need correct insn addresses. */
4348 shorten_branches (get_insns ());
4350 /* Scan all insns and move literals to pool chunks.
4351 Also, emit anchor reload insns before every insn that uses
4352 the literal pool base register as anchor pointer. */
4354 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4356 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4358 rtx pool_ref = NULL_RTX;
4359 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4363 curr_pool = s390_start_pool (&pool_list, insn);
4365 s390_add_constant (curr_pool, get_pool_constant (pool_ref),
4366 get_pool_mode (pool_ref));
4367 s390_add_pool_insn (curr_pool, insn);
4370 else if (!TARGET_64BIT && flag_pic
4371 && find_base_register_ref (PATTERN (insn)))
4373 rtx new = gen_reload_anchor (temp_reg, base_reg);
4374 new = emit_insn_before (new, insn);
4375 INSN_ADDRESSES_NEW (new, INSN_ADDRESSES (INSN_UID (insn)));
4380 curr_pool = s390_start_pool (&pool_list, new);
4382 s390_add_anchor (curr_pool);
4383 s390_add_pool_insn (curr_pool, insn);
4387 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
4389 s390_add_pool_insn (curr_pool, insn);
4392 || INSN_ADDRESSES_SIZE () <= (size_t) INSN_UID (insn)
4393 || INSN_ADDRESSES (INSN_UID (insn)) == -1)
4398 if (curr_pool->size < S390_POOL_CHUNK_MAX)
4401 s390_end_pool (curr_pool, NULL_RTX);
4406 int chunk_size = INSN_ADDRESSES (INSN_UID (insn))
4407 - INSN_ADDRESSES (INSN_UID (curr_pool->first_insn))
4410 /* We will later have to insert base register reload insns.
4411 Those will have an effect on code size, which we need to
4412 consider here. This calculation makes rather pessimistic
4413 worst-case assumptions. */
4414 if (GET_CODE (insn) == CODE_LABEL)
4417 if (chunk_size < S390_POOL_CHUNK_MIN
4418 && curr_pool->size < S390_POOL_CHUNK_MIN)
4421 /* Pool chunks can only be inserted after BARRIERs ... */
4422 if (GET_CODE (insn) == BARRIER)
4424 s390_end_pool (curr_pool, insn);
4429 /* ... so if we don't find one in time, create one. */
4430 else if ((chunk_size > S390_POOL_CHUNK_MAX
4431 || curr_pool->size > S390_POOL_CHUNK_MAX))
4433 rtx label, jump, barrier;
4435 /* We can insert the barrier only after a 'real' insn. */
4436 if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
4438 if (get_attr_length (insn) == 0)
4441 /* Don't separate insns created by s390_split_branches. */
4442 if (GET_CODE (insn) == INSN
4443 && GET_CODE (PATTERN (insn)) == SET
4444 && rtx_equal_p (SET_DEST (PATTERN (insn)), temp_reg))
4447 label = gen_label_rtx ();
4448 jump = emit_jump_insn_after (gen_jump (label), insn);
4449 barrier = emit_barrier_after (jump);
4450 insn = emit_label_after (label, barrier);
4451 JUMP_LABEL (jump) = label;
4452 LABEL_NUSES (label) = 1;
4454 INSN_ADDRESSES_NEW (jump, -1);
4455 INSN_ADDRESSES_NEW (barrier, -1);
4456 INSN_ADDRESSES_NEW (insn, -1);
4458 s390_end_pool (curr_pool, barrier);
4466 s390_end_pool (curr_pool, NULL_RTX);
4469 /* Find all labels that are branched into
4470 from an insn belonging to a different chunk. */
4472 far_labels = BITMAP_XMALLOC ();
4474 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4476 /* Labels marked with LABEL_PRESERVE_P can be target
4477 of non-local jumps, so we have to mark them.
4478 The same holds for named labels.
4480 Don't do that, however, if it is the label before
4483 if (GET_CODE (insn) == CODE_LABEL
4484 && (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
4486 rtx vec_insn = next_real_insn (insn);
4487 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4488 PATTERN (vec_insn) : NULL_RTX;
4490 || !(GET_CODE (vec_pat) == ADDR_VEC
4491 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4492 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (insn));
4495 /* If we have a direct jump (conditional or unconditional)
4496 or a casesi jump, check all potential targets. */
4497 else if (GET_CODE (insn) == JUMP_INSN)
4499 rtx pat = PATTERN (insn);
4500 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
4501 pat = XVECEXP (pat, 0, 0);
4503 if (GET_CODE (pat) == SET)
4505 rtx label = JUMP_LABEL (insn);
4508 if (s390_find_pool (pool_list, label)
4509 != s390_find_pool (pool_list, insn))
4510 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4513 else if (GET_CODE (pat) == PARALLEL
4514 && XVECLEN (pat, 0) == 2
4515 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
4516 && GET_CODE (XVECEXP (pat, 0, 1)) == USE
4517 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == LABEL_REF)
4519 /* Find the jump table used by this casesi jump. */
4520 rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
4521 rtx vec_insn = next_real_insn (vec_label);
4522 rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
4523 PATTERN (vec_insn) : NULL_RTX;
4525 && (GET_CODE (vec_pat) == ADDR_VEC
4526 || GET_CODE (vec_pat) == ADDR_DIFF_VEC))
4528 int i, diff_p = GET_CODE (vec_pat) == ADDR_DIFF_VEC;
4530 for (i = 0; i < XVECLEN (vec_pat, diff_p); i++)
4532 rtx label = XEXP (XVECEXP (vec_pat, diff_p, i), 0);
4534 if (s390_find_pool (pool_list, label)
4535 != s390_find_pool (pool_list, insn))
4536 bitmap_set_bit (far_labels, CODE_LABEL_NUMBER (label));
4543 /* Insert base register reload insns before every pool. */
4545 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4547 rtx new_insn = gen_reload_base (base_reg, curr_pool->label);
4548 rtx insn = curr_pool->first_insn;
4549 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1);
4552 /* Insert base register reload insns at every far label. */
4554 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4555 if (GET_CODE (insn) == CODE_LABEL
4556 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
4558 struct constant_pool *pool = s390_find_pool (pool_list, insn);
4561 rtx new_insn = gen_reload_base (base_reg, pool->label);
4562 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1);
4567 BITMAP_XFREE (far_labels);
4570 /* Recompute insn addresses. */
4572 init_insn_lengths ();
4573 shorten_branches (get_insns ());
4578 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4579 After we have decided to use this list, finish implementing
4580 all changes to the current function as required.
4582 Code generated by this routine is allowed to use
4583 TEMP_REG as temporary scratch register. */
4586 s390_chunkify_finish (pool_list, temp_reg)
4587 struct constant_pool *pool_list;
4590 rtx base_reg = gen_rtx_REG (Pmode, BASE_REGISTER);
4591 struct constant_pool *curr_pool = NULL;
4595 /* Replace all literal pool references. */
4597 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4599 curr_pool = s390_find_pool (pool_list, insn);
4603 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
4605 rtx addr, pool_ref = NULL_RTX;
4606 find_constant_pool_ref (PATTERN (insn), &pool_ref);
4609 addr = s390_find_constant (curr_pool, get_pool_constant (pool_ref),
4610 get_pool_mode (pool_ref));
4611 addr = gen_rtx_PLUS (Pmode, base_reg, addr);
4612 replace_constant_pool_ref (&PATTERN (insn), pool_ref, addr);
4613 INSN_CODE (insn) = -1;
4616 else if (!TARGET_64BIT && flag_pic
4617 && find_base_register_ref (PATTERN (insn)))
4619 replace_base_register_ref (&PATTERN (insn), temp_reg);
4624 /* Dump out all literal pools. */
4626 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4627 s390_dump_pool (curr_pool);
4629 /* Free pool list. */
4633 struct constant_pool *next = pool_list->next;
4634 s390_free_pool (pool_list);
4639 /* POOL_LIST is a chunk list as prepared by s390_chunkify_start.
4640 We have decided we cannot use this list, so revert all changes
4641 to the current function that were done by s390_chunkify_start. */
4644 s390_chunkify_cancel (pool_list)
4645 struct constant_pool *pool_list;
4647 struct constant_pool *curr_pool = NULL;
4650 /* Remove all pool placeholder insns. */
4652 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next)
4654 /* Did we insert an extra barrier? Remove it. */
4655 rtx barrier = PREV_INSN (curr_pool->pool_insn);
4656 rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
4657 rtx label = NEXT_INSN (curr_pool->pool_insn);
4659 if (jump && GET_CODE (jump) == JUMP_INSN
4660 && barrier && GET_CODE (barrier) == BARRIER
4661 && label && GET_CODE (label) == CODE_LABEL
4662 && GET_CODE (PATTERN (jump)) == SET
4663 && SET_DEST (PATTERN (jump)) == pc_rtx
4664 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
4665 && XEXP (SET_SRC (PATTERN (jump)), 0) == label)
4668 remove_insn (barrier);
4669 remove_insn (label);
4672 remove_insn (curr_pool->pool_insn);
4675 /* Remove all base/anchor register reload insns. */
4677 for (insn = get_insns (); insn; )
4679 rtx next_insn = NEXT_INSN (insn);
4681 if (GET_CODE (insn) == INSN
4682 && GET_CODE (PATTERN (insn)) == SET
4683 && GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
4684 && (XINT (SET_SRC (PATTERN (insn)), 1) == 210
4685 || XINT (SET_SRC (PATTERN (insn)), 1) == 211))
4691 /* Free pool list. */
4695 struct constant_pool *next = pool_list->next;
4696 s390_free_pool (pool_list);
4702 /* Index of constant pool chunk that is currently being processed.
4703 Set to -1 before function output has started. */
4704 int s390_pool_count = -1;
4706 /* Number of elements of current constant pool. */
4707 int s390_nr_constants;
4709 /* Output main constant pool to stdio stream FILE. */
4712 s390_output_constant_pool (start_label, end_label)
4718 readonly_data_section ();
4719 ASM_OUTPUT_ALIGN (asm_out_file, 3);
4720 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4721 CODE_LABEL_NUMBER (start_label));
4725 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4726 CODE_LABEL_NUMBER (start_label));
4727 ASM_OUTPUT_ALIGN (asm_out_file, 2);
4730 s390_pool_count = 0;
4731 output_constant_pool (current_function_name, current_function_decl);
4732 s390_pool_count = -1;
4734 function_section (current_function_decl);
4737 ASM_OUTPUT_ALIGN (asm_out_file, 1);
4738 (*targetm.asm_out.internal_label) (asm_out_file, "L",
4739 CODE_LABEL_NUMBER (end_label));
4743 /* Rework the prolog/epilog to avoid saving/restoring
4744 registers unnecessarily. If TEMP_REGNO is nonnegative,
4745 it specifies the number of a caller-saved register used
4746 as temporary scratch register by code emitted during
4747 machine dependent reorg. */
4750 s390_optimize_prolog (temp_regno)
4753 int save_first, save_last, restore_first, restore_last;
4755 rtx insn, new_insn, next_insn;
4757 /* Recompute regs_ever_live data for special registers. */
4758 regs_ever_live[BASE_REGISTER] = 0;
4759 regs_ever_live[RETURN_REGNUM] = 0;
4760 regs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
4762 /* If there is (possibly) any pool entry, we need to
4763 load the base register.
4764 ??? FIXME: this should be more precise. */
4765 if (get_pool_size ())
4766 regs_ever_live[BASE_REGISTER] = 1;
4768 /* In non-leaf functions, the prolog/epilog code relies
4769 on RETURN_REGNUM being saved in any case. */
4770 if (!current_function_is_leaf)
4771 regs_ever_live[RETURN_REGNUM] = 1;
4773 /* We need to save/restore the temporary register. */
4774 if (temp_regno >= 0)
4775 regs_ever_live[temp_regno] = 1;
4778 /* Find first and last gpr to be saved. */
4780 for (i = 6; i < 16; i++)
4781 if (regs_ever_live[i])
4783 || i == STACK_POINTER_REGNUM
4784 || i == RETURN_REGNUM
4785 || i == BASE_REGISTER
4786 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
4789 for (j = 15; j > i; j--)
4790 if (regs_ever_live[j])
4792 || j == STACK_POINTER_REGNUM
4793 || j == RETURN_REGNUM
4794 || j == BASE_REGISTER
4795 || (flag_pic && j == (int)PIC_OFFSET_TABLE_REGNUM))
4800 /* Nothing to save/restore. */
4801 save_first = restore_first = -1;
4802 save_last = restore_last = -1;
4806 /* Save/restore from i to j. */
4807 save_first = restore_first = i;
4808 save_last = restore_last = j;
4811 /* Varargs functions need to save gprs 2 to 6. */
4812 if (current_function_stdarg)
4820 /* If all special registers are in fact used, there's nothing we
4821 can do, so no point in walking the insn list. */
4822 if (i <= BASE_REGISTER && j >= BASE_REGISTER
4823 && i <= RETURN_REGNUM && j >= RETURN_REGNUM)
4827 /* Search for prolog/epilog insns and replace them. */
4829 for (insn = get_insns (); insn; insn = next_insn)
4831 int first, last, off;
4832 rtx set, base, offset;
4834 next_insn = NEXT_INSN (insn);
4836 if (GET_CODE (insn) != INSN)
4838 if (GET_CODE (PATTERN (insn)) != PARALLEL)
4841 if (store_multiple_operation (PATTERN (insn), VOIDmode))
4843 set = XVECEXP (PATTERN (insn), 0, 0);
4844 first = REGNO (SET_SRC (set));
4845 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4846 offset = const0_rtx;
4847 base = eliminate_constant_term (XEXP (SET_DEST (set), 0), &offset);
4848 off = INTVAL (offset) - first * UNITS_PER_WORD;
4850 if (GET_CODE (base) != REG || off < 0)
4852 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4854 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4857 if (save_first != -1)
4859 new_insn = save_gprs (base, off, save_first, save_last);
4860 new_insn = emit_insn_before (new_insn, insn);
4861 INSN_ADDRESSES_NEW (new_insn, -1);
4867 if (load_multiple_operation (PATTERN (insn), VOIDmode))
4869 set = XVECEXP (PATTERN (insn), 0, 0);
4870 first = REGNO (SET_DEST (set));
4871 last = first + XVECLEN (PATTERN (insn), 0) - 1;
4872 offset = const0_rtx;
4873 base = eliminate_constant_term (XEXP (SET_SRC (set), 0), &offset);
4874 off = INTVAL (offset) - first * UNITS_PER_WORD;
4876 if (GET_CODE (base) != REG || off < 0)
4878 if (first > BASE_REGISTER && first > RETURN_REGNUM)
4880 if (last < BASE_REGISTER && last < RETURN_REGNUM)
4883 if (restore_first != -1)
4885 new_insn = restore_gprs (base, off, restore_first, restore_last);
4886 new_insn = emit_insn_before (new_insn, insn);
4887 INSN_ADDRESSES_NEW (new_insn, -1);
4895 /* Check whether any insn in the function makes use of the original
4896 value of RETURN_REG (e.g. for __builtin_return_address).
4897 If so, insert an insn reloading that value.
4899 Return true if any such insn was found. */
4902 s390_fixup_clobbered_return_reg (return_reg)
4905 bool replacement_done = 0;
4908 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4910 rtx reg, off, new_insn;
4912 if (GET_CODE (insn) != INSN)
4914 if (!reg_referenced_p (return_reg, PATTERN (insn)))
4916 if (GET_CODE (PATTERN (insn)) == PARALLEL
4917 && store_multiple_operation (PATTERN (insn), VOIDmode))
4920 if (frame_pointer_needed)
4921 reg = hard_frame_pointer_rtx;
4923 reg = stack_pointer_rtx;
4925 off = GEN_INT (cfun->machine->frame_size + REGNO (return_reg) * UNITS_PER_WORD);
4926 if (INTVAL (off) >= 4096)
4928 off = force_const_mem (Pmode, off);
4929 new_insn = gen_rtx_SET (Pmode, return_reg, off);
4930 new_insn = emit_insn_before (new_insn, insn);
4931 INSN_ADDRESSES_NEW (new_insn, -1);
4935 new_insn = gen_rtx_MEM (Pmode, gen_rtx_PLUS (Pmode, reg, off));
4936 new_insn = gen_rtx_SET (Pmode, return_reg, new_insn);
4937 new_insn = emit_insn_before (new_insn, insn);
4938 INSN_ADDRESSES_NEW (new_insn, -1);
4940 replacement_done = 1;
4943 return replacement_done;
4946 /* Perform machine-dependent processing. */
4949 s390_machine_dependent_reorg (first)
4950 rtx first ATTRIBUTE_UNUSED;
4952 bool fixed_up_clobbered_return_reg = 0;
4953 rtx temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
4956 /* Make sure all splits have been performed; splits after
4957 machine_dependent_reorg might confuse insn length counts. */
4958 split_all_insns_noflow ();
4961 /* There are two problematic situations we need to correct:
4963 - the literal pool might be > 4096 bytes in size, so that
4964 some of its elements cannot be directly accessed
4966 - a branch target might be > 64K away from the branch, so that
4967 it is not possible to use a PC-relative instruction.
4969 To fix those, we split the single literal pool into multiple
4970 pool chunks, reloading the pool base register at various
4971 points throughout the function to ensure it always points to
4972 the pool chunk the following code expects, and / or replace
4973 PC-relative branches by absolute branches.
4975 However, the two problems are interdependent: splitting the
4976 literal pool can move a branch further away from its target,
4977 causing the 64K limit to overflow, and on the other hand,
4978 replacing a PC-relative branch by an absolute branch means
4979 we need to put the branch target address into the literal
4980 pool, possibly causing it to overflow.
4982 So, we loop trying to fix up both problems until we manage
4983 to satisfy both conditions at the same time. Note that the
4984 loop is guaranteed to terminate as every pass of the loop
4985 strictly decreases the total number of PC-relative branches
4986 in the function. (This is not completely true as there
4987 might be branch-over-pool insns introduced by chunkify_start.
4988 Those never need to be split however.) */
4992 struct constant_pool *pool_list;
4994 /* Try to chunkify the literal pool. */
4995 pool_list = s390_chunkify_start (temp_reg, &temp_used);
4997 /* Split out-of-range branches. If this has created new
4998 literal pool entries, cancel current chunk list and
5000 if (s390_split_branches (temp_reg, &temp_used))
5003 s390_chunkify_cancel (pool_list);
5008 /* Check whether we have clobbered a use of the return
5009 register (e.g. for __builtin_return_address). If so,
5010 add insns reloading the register where necessary. */
5011 if (temp_used && !fixed_up_clobbered_return_reg
5012 && s390_fixup_clobbered_return_reg (temp_reg))
5014 fixed_up_clobbered_return_reg = 1;
5016 /* The fixup insns might have caused a jump to overflow. */
5018 s390_chunkify_cancel (pool_list);
5023 /* If we made it up to here, both conditions are satisfied.
5024 Finish up pool chunkification if required. */
5026 s390_chunkify_finish (pool_list, temp_reg);
5031 s390_optimize_prolog (temp_used? RETURN_REGNUM : -1);
5035 /* Return an RTL expression representing the value of the return address
5036 for the frame COUNT steps up from the current frame. FRAME is the
5037 frame pointer of that frame. */
5040 s390_return_addr_rtx (count, frame)
5046 /* For the current frame, we use the initial value of RETURN_REGNUM.
5047 This works both in leaf and non-leaf functions. */
5050 return get_hard_reg_initial_val (Pmode, RETURN_REGNUM);
5052 /* For frames farther back, we read the stack slot where the
5053 corresponding RETURN_REGNUM value was saved. */
5055 addr = plus_constant (frame, RETURN_REGNUM * UNITS_PER_WORD);
5056 addr = memory_address (Pmode, addr);
5057 return gen_rtx_MEM (Pmode, addr);
5060 /* Find first call clobbered register unsused in a function.
5061 This could be used as base register in a leaf function
5062 or for holding the return address before epilogue. */
5065 find_unused_clobbered_reg ()
5068 for (i = 0; i < 6; i++)
5069 if (!regs_ever_live[i])
5074 /* Fill FRAME with info about frame of current function. */
5079 char gprs_ever_live[16];
5081 HOST_WIDE_INT fsize = get_frame_size ();
5083 if (fsize > 0x7fff0000)
5084 fatal_error ("Total size of local variables exceeds architecture limit.");
5086 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5087 cfun->machine->save_fprs_p = 0;
5089 for (i = 24; i < 32; i++)
5090 if (regs_ever_live[i] && !global_regs[i])
5092 cfun->machine->save_fprs_p = 1;
5096 cfun->machine->frame_size = fsize + cfun->machine->save_fprs_p * 64;
5098 /* Does function need to setup frame and save area. */
5100 if (! current_function_is_leaf
5101 || cfun->machine->frame_size > 0
5102 || current_function_calls_alloca
5103 || current_function_stdarg)
5104 cfun->machine->frame_size += STARTING_FRAME_OFFSET;
5106 /* Find first and last gpr to be saved. Note that at this point,
5107 we assume the return register and the base register always
5108 need to be saved. This is done because the usage of these
5109 register might change even after the prolog was emitted.
5110 If it turns out later that we really don't need them, the
5111 prolog/epilog code is modified again. */
5113 for (i = 0; i < 16; i++)
5114 gprs_ever_live[i] = regs_ever_live[i] && !global_regs[i];
5117 gprs_ever_live[PIC_OFFSET_TABLE_REGNUM] =
5118 regs_ever_live[PIC_OFFSET_TABLE_REGNUM];
5119 gprs_ever_live[BASE_REGISTER] = 1;
5120 gprs_ever_live[RETURN_REGNUM] = 1;
5121 gprs_ever_live[STACK_POINTER_REGNUM] = cfun->machine->frame_size > 0;
5123 for (i = 6; i < 16; i++)
5124 if (gprs_ever_live[i])
5127 for (j = 15; j > i; j--)
5128 if (gprs_ever_live[j])
5132 /* Save / Restore from gpr i to j. */
5133 cfun->machine->first_save_gpr = i;
5134 cfun->machine->first_restore_gpr = i;
5135 cfun->machine->last_save_gpr = j;
5137 /* Varargs functions need to save gprs 2 to 6. */
5138 if (current_function_stdarg)
5139 cfun->machine->first_save_gpr = 2;
5142 /* Return offset between argument pointer and frame pointer
5143 initially after prologue. */
5146 s390_arg_frame_offset ()
5148 HOST_WIDE_INT fsize = get_frame_size ();
5151 /* fprs 8 - 15 are caller saved for 64 Bit ABI. */
5154 for (i = 24; i < 32; i++)
5155 if (regs_ever_live[i] && !global_regs[i])
5161 fsize = fsize + save_fprs_p * 64;
5163 /* Does function need to setup frame and save area. */
5165 if (! current_function_is_leaf
5167 || current_function_calls_alloca
5168 || current_function_stdarg)
5169 fsize += STARTING_FRAME_OFFSET;
5170 return fsize + STACK_POINTER_OFFSET;
5173 /* Emit insn to save fpr REGNUM at offset OFFSET relative
5174 to register BASE. Return generated insn. */
5177 save_fpr (base, offset, regnum)
5183 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5184 set_mem_alias_set (addr, s390_sr_alias_set);
5186 return emit_move_insn (addr, gen_rtx_REG (DFmode, regnum));
5189 /* Emit insn to restore fpr REGNUM from offset OFFSET relative
5190 to register BASE. Return generated insn. */
5193 restore_fpr (base, offset, regnum)
5199 addr = gen_rtx_MEM (DFmode, plus_constant (base, offset));
5200 set_mem_alias_set (addr, s390_sr_alias_set);
5202 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr);
5205 /* Generate insn to save registers FIRST to LAST into
5206 the register save area located at offset OFFSET
5207 relative to register BASE. */
5210 save_gprs (base, offset, first, last)
5216 rtx addr, insn, note;
5219 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5220 addr = gen_rtx_MEM (Pmode, addr);
5221 set_mem_alias_set (addr, s390_sr_alias_set);
5223 /* Special-case single register. */
5227 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first));
5229 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first));
5231 RTX_FRAME_RELATED_P (insn) = 1;
5236 insn = gen_store_multiple (addr,
5237 gen_rtx_REG (Pmode, first),
5238 GEN_INT (last - first + 1));
5241 /* We need to set the FRAME_RELATED flag on all SETs
5242 inside the store-multiple pattern.
5244 However, we must not emit DWARF records for registers 2..5
5245 if they are stored for use by variable arguments ...
5247 ??? Unfortunately, it is not enough to simply not the the
5248 FRAME_RELATED flags for those SETs, because the first SET
5249 of the PARALLEL is always treated as if it had the flag
5250 set, even if it does not. Therefore we emit a new pattern
5251 without those registers as REG_FRAME_RELATED_EXPR note. */
5255 rtx pat = PATTERN (insn);
5257 for (i = 0; i < XVECLEN (pat, 0); i++)
5258 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
5259 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1;
5261 RTX_FRAME_RELATED_P (insn) = 1;
5265 addr = plus_constant (base, offset + 6 * UNITS_PER_WORD);
5266 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr),
5267 gen_rtx_REG (Pmode, 6),
5268 GEN_INT (last - 6 + 1));
5269 note = PATTERN (note);
5272 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5273 note, REG_NOTES (insn));
5275 for (i = 0; i < XVECLEN (note, 0); i++)
5276 if (GET_CODE (XVECEXP (note, 0, i)) == SET)
5277 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1;
5279 RTX_FRAME_RELATED_P (insn) = 1;
5285 /* Generate insn to restore registers FIRST to LAST from
5286 the register save area located at offset OFFSET
5287 relative to register BASE. */
5290 restore_gprs (base, offset, first, last)
5298 addr = plus_constant (base, offset + first * UNITS_PER_WORD);
5299 addr = gen_rtx_MEM (Pmode, addr);
5300 set_mem_alias_set (addr, s390_sr_alias_set);
5302 /* Special-case single register. */
5306 insn = gen_movdi (gen_rtx_REG (Pmode, first), addr);
5308 insn = gen_movsi (gen_rtx_REG (Pmode, first), addr);
5313 insn = gen_load_multiple (gen_rtx_REG (Pmode, first),
5315 GEN_INT (last - first + 1));
5319 /* Expand the prologue into a bunch of separate insns. */
5322 s390_emit_prologue ()
5326 rtx pool_start_label, pool_end_label;
5329 /* Compute frame_info. */
5333 /* Choose best register to use for temp use within prologue. */
5335 if (!current_function_is_leaf
5336 && !has_hard_reg_initial_val (Pmode, RETURN_REGNUM)
5337 && get_pool_size () < S390_POOL_CHUNK_MAX / 2)
5338 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5340 temp_reg = gen_rtx_REG (Pmode, 1);
5342 /* Save call saved gprs. */
5344 insn = save_gprs (stack_pointer_rtx, 0,
5345 cfun->machine->first_save_gpr, cfun->machine->last_save_gpr);
5348 /* Dump constant pool and set constant pool register. */
5350 pool_start_label = gen_label_rtx();
5351 pool_end_label = gen_label_rtx();
5352 cfun->machine->literal_pool_label = pool_start_label;
5355 insn = emit_insn (gen_literal_pool_64 (gen_rtx_REG (Pmode, BASE_REGISTER),
5356 pool_start_label, pool_end_label));
5358 insn = emit_insn (gen_literal_pool_31 (gen_rtx_REG (Pmode, BASE_REGISTER),
5359 pool_start_label, pool_end_label));
5361 /* Save fprs for variable args. */
5363 if (current_function_stdarg)
5365 /* Save fpr 0 and 2. */
5367 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 32, 16);
5368 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 24, 17);
5372 /* Save fpr 4 and 6. */
5374 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5375 save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5379 /* Save fprs 4 and 6 if used (31 bit ABI). */
5383 /* Save fpr 4 and 6. */
5384 if (regs_ever_live[18] && !global_regs[18])
5386 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 16, 18);
5387 RTX_FRAME_RELATED_P (insn) = 1;
5389 if (regs_ever_live[19] && !global_regs[19])
5391 insn = save_fpr (stack_pointer_rtx, STACK_POINTER_OFFSET - 8, 19);
5392 RTX_FRAME_RELATED_P (insn) = 1;
5396 /* Decrement stack pointer. */
5398 if (cfun->machine->frame_size > 0)
5400 rtx frame_off = GEN_INT (-cfun->machine->frame_size);
5402 /* Save incoming stack pointer into temp reg. */
5404 if (TARGET_BACKCHAIN || cfun->machine->save_fprs_p)
5406 insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
5409 /* Substract frame size from stack pointer. */
5411 frame_off = GEN_INT (-cfun->machine->frame_size);
5412 if (!CONST_OK_FOR_LETTER_P (-cfun->machine->frame_size, 'K'))
5413 frame_off = force_const_mem (Pmode, frame_off);
5415 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
5416 RTX_FRAME_RELATED_P (insn) = 1;
5418 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5419 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
5420 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5421 GEN_INT (-cfun->machine->frame_size))),
5424 /* Set backchain. */
5426 if (TARGET_BACKCHAIN)
5428 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx);
5429 set_mem_alias_set (addr, s390_sr_alias_set);
5430 insn = emit_insn (gen_move_insn (addr, temp_reg));
5434 /* Save fprs 8 - 15 (64 bit ABI). */
5436 if (cfun->machine->save_fprs_p)
5438 insn = emit_insn (gen_add2_insn (temp_reg, GEN_INT(-64)));
5440 for (i = 24; i < 32; i++)
5441 if (regs_ever_live[i] && !global_regs[i])
5443 rtx addr = plus_constant (stack_pointer_rtx,
5444 cfun->machine->frame_size - 64 + (i-24)*8);
5446 insn = save_fpr (temp_reg, (i-24)*8, i);
5447 RTX_FRAME_RELATED_P (insn) = 1;
5449 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
5450 gen_rtx_SET (VOIDmode,
5451 gen_rtx_MEM (DFmode, addr),
5452 gen_rtx_REG (DFmode, i)),
5457 /* Set frame pointer, if needed. */
5459 if (frame_pointer_needed)
5461 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
5462 RTX_FRAME_RELATED_P (insn) = 1;
5465 /* Set up got pointer, if needed. */
5467 if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])
5469 rtx got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
5470 SYMBOL_REF_FLAG (got_symbol) = 1;
5474 insn = emit_insn (gen_movdi (pic_offset_table_rtx,
5477 /* It can happen that the GOT pointer isn't really needed ... */
5478 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5483 got_symbol = gen_rtx_UNSPEC (VOIDmode,
5484 gen_rtvec (1, got_symbol), 100);
5485 got_symbol = gen_rtx_CONST (VOIDmode, got_symbol);
5486 got_symbol = force_const_mem (Pmode, got_symbol);
5487 insn = emit_move_insn (pic_offset_table_rtx,
5489 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5492 insn = emit_insn (gen_add2_insn (pic_offset_table_rtx,
5493 gen_rtx_REG (Pmode, BASE_REGISTER)));
5494 REG_NOTES(insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, NULL_RTX,
5500 /* Expand the epilogue into a bunch of separate insns. */
5503 s390_emit_epilogue ()
5505 rtx frame_pointer, return_reg;
5506 int area_bottom, area_top, offset = 0;
5509 /* Check whether to use frame or stack pointer for restore. */
5511 frame_pointer = frame_pointer_needed ?
5512 hard_frame_pointer_rtx : stack_pointer_rtx;
5514 /* Compute which parts of the save area we need to access. */
5516 if (cfun->machine->first_restore_gpr != -1)
5518 area_bottom = cfun->machine->first_restore_gpr * UNITS_PER_WORD;
5519 area_top = (cfun->machine->last_save_gpr + 1) * UNITS_PER_WORD;
5523 area_bottom = INT_MAX;
5529 if (cfun->machine->save_fprs_p)
5531 if (area_bottom > -64)
5539 if (regs_ever_live[18] && !global_regs[18])
5541 if (area_bottom > STACK_POINTER_OFFSET - 16)
5542 area_bottom = STACK_POINTER_OFFSET - 16;
5543 if (area_top < STACK_POINTER_OFFSET - 8)
5544 area_top = STACK_POINTER_OFFSET - 8;
5546 if (regs_ever_live[19] && !global_regs[19])
5548 if (area_bottom > STACK_POINTER_OFFSET - 8)
5549 area_bottom = STACK_POINTER_OFFSET - 8;
5550 if (area_top < STACK_POINTER_OFFSET)
5551 area_top = STACK_POINTER_OFFSET;
5555 /* Check whether we can access the register save area.
5556 If not, increment the frame pointer as required. */
5558 if (area_top <= area_bottom)
5560 /* Nothing to restore. */
5562 else if (cfun->machine->frame_size + area_bottom >= 0
5563 && cfun->machine->frame_size + area_top <= 4096)
5565 /* Area is in range. */
5566 offset = cfun->machine->frame_size;
5570 rtx insn, frame_off;
5572 offset = area_bottom < 0 ? -area_bottom : 0;
5573 frame_off = GEN_INT (cfun->machine->frame_size - offset);
5575 if (!CONST_OK_FOR_LETTER_P (INTVAL (frame_off), 'K'))
5576 frame_off = force_const_mem (Pmode, frame_off);
5578 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off));
5581 /* Restore call saved fprs. */
5587 if (cfun->machine->save_fprs_p)
5588 for (i = 24; i < 32; i++)
5589 if (regs_ever_live[i] && !global_regs[i])
5590 restore_fpr (frame_pointer,
5591 offset - 64 + (i-24) * 8, i);
5595 if (regs_ever_live[18] && !global_regs[18])
5596 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 16, 18);
5597 if (regs_ever_live[19] && !global_regs[19])
5598 restore_fpr (frame_pointer, offset + STACK_POINTER_OFFSET - 8, 19);
5601 /* Return register. */
5603 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM);
5605 /* Restore call saved gprs. */
5607 if (cfun->machine->first_restore_gpr != -1)
5612 /* Check for global register and save them
5613 to stack location from where they get restored. */
5615 for (i = cfun->machine->first_restore_gpr;
5616 i <= cfun->machine->last_save_gpr;
5619 /* These registers are special and need to be
5620 restored in any case. */
5621 if (i == STACK_POINTER_REGNUM
5622 || i == RETURN_REGNUM
5623 || i == BASE_REGISTER
5624 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM))
5629 addr = plus_constant (frame_pointer,
5630 offset + i * UNITS_PER_WORD);
5631 addr = gen_rtx_MEM (Pmode, addr);
5632 set_mem_alias_set (addr, s390_sr_alias_set);
5633 emit_move_insn (addr, gen_rtx_REG (Pmode, i));
5637 /* Fetch return address from stack before load multiple,
5638 this will do good for scheduling. */
5640 if (!current_function_is_leaf)
5642 int return_regnum = find_unused_clobbered_reg();
5645 return_reg = gen_rtx_REG (Pmode, return_regnum);
5647 addr = plus_constant (frame_pointer,
5648 offset + RETURN_REGNUM * UNITS_PER_WORD);
5649 addr = gen_rtx_MEM (Pmode, addr);
5650 set_mem_alias_set (addr, s390_sr_alias_set);
5651 emit_move_insn (return_reg, addr);
5654 /* ??? As references to the base register are not made
5655 explicit in insn RTX code, we have to add a barrier here
5656 to prevent incorrect scheduling. */
5658 emit_insn (gen_blockage());
5660 insn = restore_gprs (frame_pointer, offset,
5661 cfun->machine->first_restore_gpr,
5662 cfun->machine->last_save_gpr);
5666 /* Return to caller. */
5668 p = rtvec_alloc (2);
5670 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
5671 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode, return_reg);
5672 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
5676 /* Return the size in bytes of a function argument of
5677 type TYPE and/or mode MODE. At least one of TYPE or
5678 MODE must be specified. */
5681 s390_function_arg_size (mode, type)
5682 enum machine_mode mode;
5686 return int_size_in_bytes (type);
5688 /* No type info available for some library calls ... */
5689 if (mode != BLKmode)
5690 return GET_MODE_SIZE (mode);
5692 /* If we have neither type nor mode, abort */
5696 /* Return 1 if a function argument of type TYPE and mode MODE
5697 is to be passed by reference. The ABI specifies that only
5698 structures of size 1, 2, 4, or 8 bytes are passed by value,
5699 all other structures (and complex numbers) are passed by
5703 s390_function_arg_pass_by_reference (mode, type)
5704 enum machine_mode mode;
5707 int size = s390_function_arg_size (mode, type);
5711 if (AGGREGATE_TYPE_P (type) &&
5712 size != 1 && size != 2 && size != 4 && size != 8)
5715 if (TREE_CODE (type) == COMPLEX_TYPE)
5722 /* Update the data in CUM to advance over an argument of mode MODE and
5723 data type TYPE. (TYPE is null for libcalls where that information
5724 may not be available.). The boolean NAMED specifies whether the
5725 argument is a named argument (as opposed to an unnamed argument
5726 matching an ellipsis). */
5729 s390_function_arg_advance (cum, mode, type, named)
5730 CUMULATIVE_ARGS *cum;
5731 enum machine_mode mode;
5733 int named ATTRIBUTE_UNUSED;
5735 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5739 else if (s390_function_arg_pass_by_reference (mode, type))
5745 int size = s390_function_arg_size (mode, type);
5746 cum->gprs += ((size + UNITS_PER_WORD-1) / UNITS_PER_WORD);
5750 /* Define where to put the arguments to a function.
5751 Value is zero to push the argument on the stack,
5752 or a hard register in which to store the argument.
5754 MODE is the argument's machine mode.
5755 TYPE is the data type of the argument (as a tree).
5756 This is null for libcalls where that information may
5758 CUM is a variable of type CUMULATIVE_ARGS which gives info about
5759 the preceding args and about the function being called.
5760 NAMED is nonzero if this argument is a named parameter
5761 (otherwise it is an extra parameter matching an ellipsis).
5763 On S/390, we use general purpose registers 2 through 6 to
5764 pass integer, pointer, and certain structure arguments, and
5765 floating point registers 0 and 2 (0, 2, 4, and 6 on 64-bit)
5766 to pass floating point arguments. All remaining arguments
5767 are pushed to the stack. */
5770 s390_function_arg (cum, mode, type, named)
5771 CUMULATIVE_ARGS *cum;
5772 enum machine_mode mode;
5774 int named ATTRIBUTE_UNUSED;
5776 if (s390_function_arg_pass_by_reference (mode, type))
5779 if (! TARGET_SOFT_FLOAT && (mode == DFmode || mode == SFmode))
5781 if (cum->fprs + 1 > (TARGET_64BIT? 4 : 2))
5784 return gen_rtx (REG, mode, cum->fprs + 16);
5788 int size = s390_function_arg_size (mode, type);
5789 int n_gprs = (size + UNITS_PER_WORD-1) / UNITS_PER_WORD;
5791 if (cum->gprs + n_gprs > 5)
5794 return gen_rtx (REG, mode, cum->gprs + 2);
5799 /* Create and return the va_list datatype.
5801 On S/390, va_list is an array type equivalent to
5803 typedef struct __va_list_tag
5807 void *__overflow_arg_area;
5808 void *__reg_save_area;
5812 where __gpr and __fpr hold the number of general purpose
5813 or floating point arguments used up to now, respectively,
5814 __overflow_arg_area points to the stack location of the
5815 next argument passed on the stack, and __reg_save_area
5816 always points to the start of the register area in the
5817 call frame of the current function. The function prologue
5818 saves all registers used for argument passing into this
5819 area if the function uses variable arguments. */
5822 s390_build_va_list ()
5824 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
5826 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5829 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
5831 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"),
5832 long_integer_type_node);
5833 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"),
5834 long_integer_type_node);
5835 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"),
5837 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"),
5840 DECL_FIELD_CONTEXT (f_gpr) = record;
5841 DECL_FIELD_CONTEXT (f_fpr) = record;
5842 DECL_FIELD_CONTEXT (f_ovf) = record;
5843 DECL_FIELD_CONTEXT (f_sav) = record;
5845 TREE_CHAIN (record) = type_decl;
5846 TYPE_NAME (record) = type_decl;
5847 TYPE_FIELDS (record) = f_gpr;
5848 TREE_CHAIN (f_gpr) = f_fpr;
5849 TREE_CHAIN (f_fpr) = f_ovf;
5850 TREE_CHAIN (f_ovf) = f_sav;
5852 layout_type (record);
5854 /* The correct type is an array type of one element. */
5855 return build_array_type (record, build_index_type (size_zero_node));
5858 /* Implement va_start by filling the va_list structure VALIST.
5859 STDARG_P is always true, and ignored.
5860 NEXTARG points to the first anonymous stack argument.
5862 The following global variables are used to initialize
5863 the va_list structure:
5865 current_function_args_info:
5866 holds number of gprs and fprs used for named arguments.
5867 current_function_arg_offset_rtx:
5868 holds the offset of the first anonymous stack argument
5869 (relative to the virtual arg pointer). */
5872 s390_va_start (valist, nextarg)
5874 rtx nextarg ATTRIBUTE_UNUSED;
5876 HOST_WIDE_INT n_gpr, n_fpr;
5878 tree f_gpr, f_fpr, f_ovf, f_sav;
5879 tree gpr, fpr, ovf, sav, t;
5881 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5882 f_fpr = TREE_CHAIN (f_gpr);
5883 f_ovf = TREE_CHAIN (f_fpr);
5884 f_sav = TREE_CHAIN (f_ovf);
5886 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5887 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5888 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5889 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5890 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5892 /* Count number of gp and fp argument registers used. */
5894 n_gpr = current_function_args_info.gprs;
5895 n_fpr = current_function_args_info.fprs;
5897 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
5898 TREE_SIDE_EFFECTS (t) = 1;
5899 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5901 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
5902 TREE_SIDE_EFFECTS (t) = 1;
5903 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5905 /* Find the overflow area. */
5906 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
5908 off = INTVAL (current_function_arg_offset_rtx);
5909 off = off < 0 ? 0 : off;
5910 if (TARGET_DEBUG_ARG)
5911 fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
5912 (int)n_gpr, (int)n_fpr, off);
5914 t = build (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_2 (off, 0));
5916 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5917 TREE_SIDE_EFFECTS (t) = 1;
5918 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5920 /* Find the register save area. */
5921 t = make_tree (TREE_TYPE (sav), virtual_incoming_args_rtx);
5922 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
5923 build_int_2 (-STACK_POINTER_OFFSET, -1));
5924 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
5925 TREE_SIDE_EFFECTS (t) = 1;
5926 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5929 /* Implement va_arg by updating the va_list structure
5930 VALIST as required to retrieve an argument of type
5931 TYPE, and returning that argument.
5933 Generates code equivalent to:
5935 if (integral value) {
5936 if (size <= 4 && args.gpr < 5 ||
5937 size > 4 && args.gpr < 4 )
5938 ret = args.reg_save_area[args.gpr+8]
5940 ret = *args.overflow_arg_area++;
5941 } else if (float value) {
5943 ret = args.reg_save_area[args.fpr+64]
5945 ret = *args.overflow_arg_area++;
5946 } else if (aggregate value) {
5948 ret = *args.reg_save_area[args.gpr]
5950 ret = **args.overflow_arg_area++;
5954 s390_va_arg (valist, type)
5958 tree f_gpr, f_fpr, f_ovf, f_sav;
5959 tree gpr, fpr, ovf, sav, reg, t, u;
5960 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg;
5961 rtx lab_false, lab_over, addr_rtx, r;
5963 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
5964 f_fpr = TREE_CHAIN (f_gpr);
5965 f_ovf = TREE_CHAIN (f_fpr);
5966 f_sav = TREE_CHAIN (f_ovf);
5968 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
5969 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
5970 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
5971 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
5972 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
5974 size = int_size_in_bytes (type);
5976 if (s390_function_arg_pass_by_reference (TYPE_MODE (type), type))
5978 if (TARGET_DEBUG_ARG)
5980 fprintf (stderr, "va_arg: aggregate type");
5984 /* Aggregates are passed by reference. */
5988 sav_ofs = 2 * UNITS_PER_WORD;
5989 sav_scale = UNITS_PER_WORD;
5990 size = UNITS_PER_WORD;
5993 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
5995 if (TARGET_DEBUG_ARG)
5997 fprintf (stderr, "va_arg: float type");
6001 /* FP args go in FP registers, if present. */
6005 sav_ofs = 16 * UNITS_PER_WORD;
6007 /* TARGET_64BIT has up to 4 parameter in fprs */
6008 max_reg = TARGET_64BIT ? 3 : 1;
6012 if (TARGET_DEBUG_ARG)
6014 fprintf (stderr, "va_arg: other type");
6018 /* Otherwise into GP registers. */
6021 n_reg = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6022 sav_ofs = 2 * UNITS_PER_WORD;
6024 sav_ofs += TYPE_MODE (type) == SImode ? 4 :
6025 TYPE_MODE (type) == HImode ? 6 :
6026 TYPE_MODE (type) == QImode ? 7 : 0;
6028 sav_ofs += TYPE_MODE (type) == HImode ? 2 :
6029 TYPE_MODE (type) == QImode ? 3 : 0;
6031 sav_scale = UNITS_PER_WORD;
6038 /* Pull the value out of the saved registers ... */
6040 lab_false = gen_label_rtx ();
6041 lab_over = gen_label_rtx ();
6042 addr_rtx = gen_reg_rtx (Pmode);
6044 emit_cmp_and_jump_insns (expand_expr (reg, NULL_RTX, Pmode, EXPAND_NORMAL),
6046 GT, const1_rtx, Pmode, 0, lab_false);
6049 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
6053 u = build (MULT_EXPR, long_integer_type_node,
6054 reg, build_int_2 (sav_scale, 0));
6055 TREE_SIDE_EFFECTS (u) = 1;
6057 t = build (PLUS_EXPR, ptr_type_node, t, u);
6058 TREE_SIDE_EFFECTS (t) = 1;
6060 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6062 emit_move_insn (addr_rtx, r);
6065 emit_jump_insn (gen_jump (lab_over));
6067 emit_label (lab_false);
6069 /* ... Otherwise out of the overflow area. */
6071 t = save_expr (ovf);
6074 /* In 64 BIT for each argument on stack, a full 64 bit slot is allocated. */
6075 if (size < UNITS_PER_WORD)
6077 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (UNITS_PER_WORD-size, 0));
6078 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6079 TREE_SIDE_EFFECTS (t) = 1;
6080 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6082 t = save_expr (ovf);
6085 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
6087 emit_move_insn (addr_rtx, r);
6089 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
6090 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
6091 TREE_SIDE_EFFECTS (t) = 1;
6092 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6094 emit_label (lab_over);
6096 /* If less than max_regs a registers are retrieved out
6097 of register save area, increment. */
6099 u = build (PREINCREMENT_EXPR, TREE_TYPE (reg), reg,
6100 build_int_2 (n_reg, 0));
6101 TREE_SIDE_EFFECTS (u) = 1;
6102 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
6106 r = gen_rtx_MEM (Pmode, addr_rtx);
6107 set_mem_alias_set (r, get_varargs_alias_set ());
6108 emit_move_insn (addr_rtx, r);
6120 S390_BUILTIN_THREAD_POINTER,
6121 S390_BUILTIN_SET_THREAD_POINTER,
6126 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = {
6131 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = {
6137 s390_init_builtins ()
6141 ftype = build_function_type (ptr_type_node, void_list_node);
6142 builtin_function ("__builtin_thread_pointer", ftype,
6143 S390_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
6146 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6147 builtin_function ("__builtin_set_thread_pointer", ftype,
6148 S390_BUILTIN_SET_THREAD_POINTER, BUILT_IN_MD,
6152 /* Expand an expression EXP that calls a built-in function,
6153 with result going to TARGET if that's convenient
6154 (and in mode MODE if that's convenient).
6155 SUBTARGET may be used as the target for computing one of EXP's operands.
6156 IGNORE is nonzero if the value is to be ignored. */
6159 s390_expand_builtin (exp, target, subtarget, mode, ignore)
6162 rtx subtarget ATTRIBUTE_UNUSED;
6163 enum machine_mode mode ATTRIBUTE_UNUSED;
6164 int ignore ATTRIBUTE_UNUSED;
6168 unsigned int const *code_for_builtin =
6169 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31;
6171 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6172 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6173 tree arglist = TREE_OPERAND (exp, 1);
6174 enum insn_code icode;
6175 rtx op[MAX_ARGS], pat;
6179 if (fcode >= S390_BUILTIN_max)
6180 internal_error ("bad builtin fcode");
6181 icode = code_for_builtin[fcode];
6183 internal_error ("bad builtin fcode");
6185 nonvoid = TREE_TYPE (TREE_TYPE (fndecl)) != void_type_node;
6187 for (arglist = TREE_OPERAND (exp, 1), arity = 0;
6189 arglist = TREE_CHAIN (arglist), arity++)
6191 const struct insn_operand_data *insn_op;
6193 tree arg = TREE_VALUE (arglist);
6194 if (arg == error_mark_node)
6196 if (arity > MAX_ARGS)
6199 insn_op = &insn_data[icode].operand[arity + nonvoid];
6201 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0);
6203 if (!(*insn_op->predicate) (op[arity], insn_op->mode))
6204 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]);
6209 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6211 || GET_MODE (target) != tmode
6212 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6213 target = gen_reg_rtx (tmode);
6219 pat = GEN_FCN (icode) (target);
6223 pat = GEN_FCN (icode) (target, op[0]);
6225 pat = GEN_FCN (icode) (op[0]);
6228 pat = GEN_FCN (icode) (target, op[0], op[1]);
6244 /* Output assembly code for the trampoline template to
6247 On S/390, we use gpr 1 internally in the trampoline code;
6248 gpr 0 is used to hold the static chain. */
6251 s390_trampoline_template (file)
6256 fprintf (file, "larl\t%s,0f\n", reg_names[1]);
6257 fprintf (file, "lg\t%s,0(%s)\n", reg_names[0], reg_names[1]);
6258 fprintf (file, "lg\t%s,8(%s)\n", reg_names[1], reg_names[1]);
6259 fprintf (file, "br\t%s\n", reg_names[1]);
6260 fprintf (file, "0:\t.quad\t0\n");
6261 fprintf (file, ".quad\t0\n");
6265 fprintf (file, "basr\t%s,0\n", reg_names[1]);
6266 fprintf (file, "l\t%s,10(%s)\n", reg_names[0], reg_names[1]);
6267 fprintf (file, "l\t%s,14(%s)\n", reg_names[1], reg_names[1]);
6268 fprintf (file, "br\t%s\n", reg_names[1]);
6269 fprintf (file, ".long\t0\n");
6270 fprintf (file, ".long\t0\n");
6274 /* Emit RTL insns to initialize the variable parts of a trampoline.
6275 FNADDR is an RTX for the address of the function's pure code.
6276 CXT is an RTX for the static chain value for the function. */
6279 s390_initialize_trampoline (addr, fnaddr, cxt)
6284 emit_move_insn (gen_rtx
6286 memory_address (Pmode,
6287 plus_constant (addr, (TARGET_64BIT ? 20 : 12) ))), cxt);
6288 emit_move_insn (gen_rtx
6290 memory_address (Pmode,
6291 plus_constant (addr, (TARGET_64BIT ? 28 : 16) ))), fnaddr);
6294 /* Return rtx for 64-bit constant formed from the 32-bit subwords
6295 LOW and HIGH, independent of the host word size. */
6298 s390_gen_rtx_const_DI (high, low)
6302 #if HOST_BITS_PER_WIDE_INT >= 64
6304 val = (HOST_WIDE_INT)high;
6306 val |= (HOST_WIDE_INT)low;
6308 return GEN_INT (val);
6310 #if HOST_BITS_PER_WIDE_INT >= 32
6311 return immed_double_const ((HOST_WIDE_INT)low, (HOST_WIDE_INT)high, DImode);
6318 /* Output assembler code to FILE to increment profiler label # LABELNO
6319 for profiling a function entry. */
6322 s390_function_profiler (file, labelno)
6329 ASM_GENERATE_INTERNAL_LABEL (label, "LP", labelno);
6331 fprintf (file, "# function profiler \n");
6333 op[0] = gen_rtx_REG (Pmode, RETURN_REGNUM);
6334 op[1] = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
6335 op[1] = gen_rtx_MEM (Pmode, plus_constant (op[1], UNITS_PER_WORD));
6337 op[2] = gen_rtx_REG (Pmode, 1);
6338 op[3] = gen_rtx_SYMBOL_REF (Pmode, label);
6339 SYMBOL_REF_FLAG (op[3]) = 1;
6341 op[4] = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
6344 op[4] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[4]), 113);
6345 op[4] = gen_rtx_CONST (Pmode, op[4]);
6350 output_asm_insn ("stg\t%0,%1", op);
6351 output_asm_insn ("larl\t%2,%3", op);
6352 output_asm_insn ("brasl\t%0,%4", op);
6353 output_asm_insn ("lg\t%0,%1", op);
6357 op[6] = gen_label_rtx ();
6359 output_asm_insn ("st\t%0,%1", op);
6360 output_asm_insn ("bras\t%2,%l6", op);
6361 output_asm_insn (".long\t%4", op);
6362 output_asm_insn (".long\t%3", op);
6363 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6364 output_asm_insn ("l\t%0,0(%2)", op);
6365 output_asm_insn ("l\t%2,4(%2)", op);
6366 output_asm_insn ("basr\t%0,%0", op);
6367 output_asm_insn ("l\t%0,%1", op);
6371 op[5] = gen_label_rtx ();
6372 op[6] = gen_label_rtx ();
6374 output_asm_insn ("st\t%0,%1", op);
6375 output_asm_insn ("bras\t%2,%l6", op);
6376 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[5]));
6377 output_asm_insn (".long\t%4-%l5", op);
6378 output_asm_insn (".long\t%3-%l5", op);
6379 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[6]));
6380 output_asm_insn ("lr\t%0,%2", op);
6381 output_asm_insn ("a\t%0,0(%2)", op);
6382 output_asm_insn ("a\t%2,4(%2)", op);
6383 output_asm_insn ("basr\t%0,%0", op);
6384 output_asm_insn ("l\t%0,%1", op);
6388 /* Select section for constant in constant pool. In 32-bit mode,
6389 constants go in the function section; in 64-bit mode in .rodata. */
6392 s390_select_rtx_section (mode, x, align)
6393 enum machine_mode mode ATTRIBUTE_UNUSED;
6394 rtx x ATTRIBUTE_UNUSED;
6395 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
6398 readonly_data_section ();
6400 function_section (current_function_decl);
6403 /* Encode symbol attributes (local vs. global, tls model) of a SYMBOL_REF
6404 into its name and SYMBOL_REF_FLAG. */
6407 s390_encode_section_info (decl, first)
6409 int first ATTRIBUTE_UNUSED;
6411 bool local_p = (*targetm.binds_local_p) (decl);
6414 rtl = DECL_P (decl) ? DECL_RTL (decl) : TREE_CST_RTL (decl);
6415 if (GET_CODE (rtl) != MEM)
6417 symbol = XEXP (rtl, 0);
6418 if (GET_CODE (symbol) != SYMBOL_REF)
6421 /* When using PIC, SYMBOL_REF_FLAG marks non-global symbols
6422 that can be accessed directly. */
6424 SYMBOL_REF_FLAG (symbol) = local_p;
6426 /* Encode thread-local data with %[GLil] for "global dynamic",
6427 "local dynamic", "initial exec" or "local exec" TLS models,
6430 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL (decl))
6432 const char *symbol_str = XSTR (symbol, 0);
6435 enum tls_model kind = decl_tls_model (decl);
6439 /* We don't allow non-pic code for shared libraries,
6440 so don't generate GD/LD TLS models for non-pic code. */
6443 case TLS_MODEL_GLOBAL_DYNAMIC:
6444 kind = TLS_MODEL_INITIAL_EXEC; break;
6445 case TLS_MODEL_LOCAL_DYNAMIC:
6446 kind = TLS_MODEL_LOCAL_EXEC; break;
6452 if (symbol_str[0] == '%')
6454 if (symbol_str[1] == tls_model_chars[kind])
6458 len = strlen (symbol_str) + 1;
6459 newstr = alloca (len + 2);
6462 newstr[1] = tls_model_chars[kind];
6463 memcpy (newstr + 2, symbol_str, len);
6465 XSTR (symbol, 0) = ggc_alloc_string (newstr, len + 2 - 1);
6469 /* Undo the above when printing symbol names. */
6472 s390_strip_name_encoding (str)
6482 /* Output thunk to FILE that implements a C++ virtual function call (with
6483 multiple inheritance) to FUNCTION. The thunk adjusts the this pointer
6484 by DELTA, and unless VCALL_OFFSET is zero, applies an additional adjustment
6485 stored at VCALL_OFFSET in the vtable whose address is located at offset 0
6486 relative to the resulting this pointer. */
6489 s390_output_mi_thunk (file, thunk, delta, vcall_offset, function)
6491 tree thunk ATTRIBUTE_UNUSED;
6492 HOST_WIDE_INT delta;
6493 HOST_WIDE_INT vcall_offset;
6498 /* Operand 0 is the target function. */
6499 op[0] = XEXP (DECL_RTL (function), 0);
6500 if (flag_pic && !SYMBOL_REF_FLAG (op[0]))
6502 op[0] = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, op[0]), 113);
6503 op[0] = gen_rtx_CONST (Pmode, op[0]);
6506 /* Operand 1 is the 'this' pointer. */
6507 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
6508 op[1] = gen_rtx_REG (Pmode, 3);
6510 op[1] = gen_rtx_REG (Pmode, 2);
6512 /* Operand 2 is the delta. */
6513 op[2] = GEN_INT (delta);
6515 /* Operand 3 is the vcall_offset. */
6516 op[3] = GEN_INT (vcall_offset);
6518 /* Operand 4 is the temporary register. */
6519 op[4] = gen_rtx_REG (Pmode, 1);
6521 /* Operands 5 to 8 can be used as labels. */
6527 /* Generate code. */
6530 /* Setup literal pool pointer if required. */
6531 if (!CONST_OK_FOR_LETTER_P (delta, 'K')
6532 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6534 op[5] = gen_label_rtx ();
6535 output_asm_insn ("larl\t%4,%5", op);
6538 /* Add DELTA to this pointer. */
6541 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6542 output_asm_insn ("la\t%1,%2(%1)", op);
6543 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6544 output_asm_insn ("aghi\t%1,%2", op);
6547 op[6] = gen_label_rtx ();
6548 output_asm_insn ("agf\t%1,%6-%5(%4)", op);
6552 /* Perform vcall adjustment. */
6555 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6557 output_asm_insn ("lg\t%4,0(%1)", op);
6558 output_asm_insn ("ag\t%1,%3(%4)", op);
6560 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6562 output_asm_insn ("lghi\t%4,%3", op);
6563 output_asm_insn ("ag\t%4,0(%1)", op);
6564 output_asm_insn ("ag\t%1,0(%4)", op);
6568 op[7] = gen_label_rtx ();
6569 output_asm_insn ("llgf\t%4,%7-%5(%4)", op);
6570 output_asm_insn ("ag\t%4,0(%1)", op);
6571 output_asm_insn ("ag\t%1,0(%4)", op);
6575 /* Jump to target. */
6576 output_asm_insn ("jg\t%0", op);
6578 /* Output literal pool if required. */
6581 output_asm_insn (".align\t4", op);
6582 (*targetm.asm_out.internal_label) (file, "L",
6583 CODE_LABEL_NUMBER (op[5]));
6587 (*targetm.asm_out.internal_label) (file, "L",
6588 CODE_LABEL_NUMBER (op[6]));
6589 output_asm_insn (".long\t%2", op);
6593 (*targetm.asm_out.internal_label) (file, "L",
6594 CODE_LABEL_NUMBER (op[7]));
6595 output_asm_insn (".long\t%3", op);
6600 /* Setup base pointer if required. */
6602 || !CONST_OK_FOR_LETTER_P (delta, 'K')
6603 || !CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6605 op[5] = gen_label_rtx ();
6606 output_asm_insn ("basr\t%4,0", op);
6607 (*targetm.asm_out.internal_label) (file, "L",
6608 CODE_LABEL_NUMBER (op[5]));
6611 /* Add DELTA to this pointer. */
6614 if (CONST_OK_FOR_LETTER_P (delta, 'J'))
6615 output_asm_insn ("la\t%1,%2(%1)", op);
6616 else if (CONST_OK_FOR_LETTER_P (delta, 'K'))
6617 output_asm_insn ("ahi\t%1,%2", op);
6620 op[6] = gen_label_rtx ();
6621 output_asm_insn ("a\t%1,%6-%5(%4)", op);
6625 /* Perform vcall adjustment. */
6628 if (CONST_OK_FOR_LETTER_P (vcall_offset, 'J'))
6630 output_asm_insn ("lg\t%4,0(%1)", op);
6631 output_asm_insn ("a\t%1,%3(%4)", op);
6633 else if (CONST_OK_FOR_LETTER_P (vcall_offset, 'K'))
6635 output_asm_insn ("lhi\t%4,%3", op);
6636 output_asm_insn ("a\t%4,0(%1)", op);
6637 output_asm_insn ("a\t%1,0(%4)", op);
6641 op[7] = gen_label_rtx ();
6642 output_asm_insn ("l\t%4,%7-%5(%4)", op);
6643 output_asm_insn ("a\t%4,0(%1)", op);
6644 output_asm_insn ("a\t%1,0(%4)", op);
6647 /* We had to clobber the base pointer register.
6648 Re-setup the base pointer (with a different base). */
6649 op[5] = gen_label_rtx ();
6650 output_asm_insn ("basr\t%4,0", op);
6651 (*targetm.asm_out.internal_label) (file, "L",
6652 CODE_LABEL_NUMBER (op[5]));
6655 /* Jump to target. */
6656 op[8] = gen_label_rtx ();
6658 output_asm_insn ("l\t%4,%8-%5(%4)", op);
6660 output_asm_insn ("a\t%4,%8-%5(%4)", op);
6661 output_asm_insn ("br\t%4", op);
6663 /* Output literal pool. */
6664 output_asm_insn (".align\t4", op);
6665 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (op[8]));
6667 output_asm_insn (".long\t%0", op);
6669 output_asm_insn (".long\t%0-%5", op);
6673 (*targetm.asm_out.internal_label) (file, "L",
6674 CODE_LABEL_NUMBER (op[6]));
6675 output_asm_insn (".long\t%2", op);
6679 (*targetm.asm_out.internal_label) (file, "L",
6680 CODE_LABEL_NUMBER (op[7]));
6681 output_asm_insn (".long\t%3", op);
6686 /* How to allocate a 'struct machine_function'. */
6688 static struct machine_function *
6689 s390_init_machine_status ()
6691 return ggc_alloc_cleared (sizeof (struct machine_function));
6694 #include "gt-s390.h"