1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published
11 by the Free Software Foundation; either version 2, or (at your
12 option) any later version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the
21 Free Software Foundation, 51 Franklin Street, Fifth Floor, Boston,
22 MA 02110-1301, USA. */
26 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "insn-attr.h"
44 #include "basic-block.h"
45 #include "integrate.h"
51 #include "target-def.h"
52 #include "langhooks.h"
54 #include "cfglayout.h"
55 #include "sched-int.h"
56 #include "tree-gimple.h"
59 #include "tm-constrs.h"
61 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
64 #include "gstab.h" /* for N_SLINE */
67 #ifndef TARGET_NO_PROTOTYPE
68 #define TARGET_NO_PROTOTYPE 0
71 #define min(A,B) ((A) < (B) ? (A) : (B))
72 #define max(A,B) ((A) > (B) ? (A) : (B))
74 /* Structure used to define the rs6000 stack */
75 typedef struct rs6000_stack {
76 int first_gp_reg_save; /* first callee saved GP register used */
77 int first_fp_reg_save; /* first callee saved FP register used */
78 int first_altivec_reg_save; /* first callee saved AltiVec register used */
79 int lr_save_p; /* true if the link reg needs to be saved */
80 int cr_save_p; /* true if the CR reg needs to be saved */
81 unsigned int vrsave_mask; /* mask of vec registers to save */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 int world_save_p; /* true if we're saving *everything*:
85 r13-r31, cr, f14-f31, vrsave, v20-v31 */
86 enum rs6000_abi abi; /* which ABI to use */
87 int gp_save_offset; /* offset to save GP regs from initial SP */
88 int fp_save_offset; /* offset to save FP regs from initial SP */
89 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
90 int lr_save_offset; /* offset to save LR from initial SP */
91 int cr_save_offset; /* offset to save CR from initial SP */
92 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
93 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
94 int varargs_save_offset; /* offset to save the varargs registers */
95 int ehrd_offset; /* offset to EH return data */
96 int reg_size; /* register size (4 or 8) */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int vrsave_size; /* size to hold VRSAVE if not in save_size */
106 int altivec_padding_size; /* size of altivec alignment padding if
108 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
109 int spe_padding_size;
110 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
111 int spe_64bit_regs_used;
114 /* A C structure for machine-specific, per-function data.
115 This is added to the cfun structure. */
116 typedef struct machine_function GTY(())
118 /* Flags if __builtin_return_address (n) with n >= 1 was used. */
119 int ra_needs_full_frame;
120 /* Some local-dynamic symbol. */
121 const char *some_ld_name;
122 /* Whether the instruction chain has been scanned already. */
123 int insn_chain_scanned_p;
124 /* Flags if __builtin_return_address (0) was used. */
126 /* Offset from virtual_stack_vars_rtx to the start of the ABI_V4
127 varargs save area. */
128 HOST_WIDE_INT varargs_save_offset;
131 /* Target cpu type */
133 enum processor_type rs6000_cpu;
134 struct rs6000_cpu_select rs6000_select[3] =
136 /* switch name, tune arch */
137 { (const char *)0, "--with-cpu=", 1, 1 },
138 { (const char *)0, "-mcpu=", 1, 1 },
139 { (const char *)0, "-mtune=", 1, 0 },
142 /* Always emit branch hint bits. */
143 static GTY(()) bool rs6000_always_hint;
145 /* Schedule instructions for group formation. */
146 static GTY(()) bool rs6000_sched_groups;
148 /* Support for -msched-costly-dep option. */
149 const char *rs6000_sched_costly_dep_str;
150 enum rs6000_dependence_cost rs6000_sched_costly_dep;
152 /* Support for -minsert-sched-nops option. */
153 const char *rs6000_sched_insert_nops_str;
154 enum rs6000_nop_insertion rs6000_sched_insert_nops;
156 /* Support targetm.vectorize.builtin_mask_for_load. */
157 static GTY(()) tree altivec_builtin_mask_for_load;
159 /* Size of long double. */
160 int rs6000_long_double_type_size;
162 /* IEEE quad extended precision long double. */
165 /* Whether -mabi=altivec has appeared. */
166 int rs6000_altivec_abi;
168 /* Nonzero if we want SPE ABI extensions. */
171 /* Nonzero if floating point operations are done in the GPRs. */
172 int rs6000_float_gprs = 0;
174 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
175 int rs6000_darwin64_abi;
177 /* Set to nonzero once AIX common-mode calls have been defined. */
178 static GTY(()) int common_mode_defined;
180 /* Save information from a "cmpxx" operation until the branch or scc is
182 rtx rs6000_compare_op0, rs6000_compare_op1;
183 int rs6000_compare_fp_p;
185 /* Label number of label created for -mrelocatable, to call to so we can
186 get the address of the GOT section */
187 int rs6000_pic_labelno;
190 /* Which abi to adhere to */
191 const char *rs6000_abi_name;
193 /* Semantics of the small data area */
194 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
196 /* Which small data model to use */
197 const char *rs6000_sdata_name = (char *)0;
199 /* Counter for labels which are to be placed in .fixup. */
200 int fixuplabelno = 0;
203 /* Bit size of immediate TLS offsets and string from which it is decoded. */
204 int rs6000_tls_size = 32;
205 const char *rs6000_tls_size_string;
207 /* ABI enumeration available for subtarget to use. */
208 enum rs6000_abi rs6000_current_abi;
210 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
214 const char *rs6000_debug_name;
215 int rs6000_debug_stack; /* debug stack applications */
216 int rs6000_debug_arg; /* debug argument handling */
218 /* Value is TRUE if register/mode pair is acceptable. */
219 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
221 /* Built in types. */
223 tree rs6000_builtin_types[RS6000_BTI_MAX];
224 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
226 const char *rs6000_traceback_name;
228 traceback_default = 0,
234 /* Flag to say the TOC is initialized */
236 char toc_label_name[10];
238 static GTY(()) section *read_only_data_section;
239 static GTY(()) section *private_data_section;
240 static GTY(()) section *read_only_private_data_section;
241 static GTY(()) section *sdata2_section;
242 static GTY(()) section *toc_section;
244 /* Control alignment for fields within structures. */
245 /* String from -malign-XXXXX. */
246 int rs6000_alignment_flags;
248 /* True for any options that were explicitly set. */
250 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
251 bool alignment; /* True if -malign- was used. */
252 bool abi; /* True if -mabi=spe/nospe was used. */
253 bool spe; /* True if -mspe= was used. */
254 bool float_gprs; /* True if -mfloat-gprs= was used. */
255 bool isel; /* True if -misel was used. */
256 bool long_double; /* True if -mlong-double- was used. */
257 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
258 } rs6000_explicit_options;
260 struct builtin_description
262 /* mask is not const because we're going to alter it below. This
263 nonsense will go away when we rewrite the -march infrastructure
264 to give us more target flag bits. */
266 const enum insn_code icode;
267 const char *const name;
268 const enum rs6000_builtins code;
271 /* Target cpu costs. */
273 struct processor_costs {
274 const int mulsi; /* cost of SImode multiplication. */
275 const int mulsi_const; /* cost of SImode multiplication by constant. */
276 const int mulsi_const9; /* cost of SImode mult by short constant. */
277 const int muldi; /* cost of DImode multiplication. */
278 const int divsi; /* cost of SImode division. */
279 const int divdi; /* cost of DImode division. */
280 const int fp; /* cost of simple SFmode and DFmode insns. */
281 const int dmul; /* cost of DFmode multiplication (and fmadd). */
282 const int sdiv; /* cost of SFmode division (fdivs). */
283 const int ddiv; /* cost of DFmode division (fdiv). */
286 const struct processor_costs *rs6000_cost;
288 /* Processor costs (relative to an add) */
290 /* Instruction size costs on 32bit processors. */
292 struct processor_costs size32_cost = {
293 COSTS_N_INSNS (1), /* mulsi */
294 COSTS_N_INSNS (1), /* mulsi_const */
295 COSTS_N_INSNS (1), /* mulsi_const9 */
296 COSTS_N_INSNS (1), /* muldi */
297 COSTS_N_INSNS (1), /* divsi */
298 COSTS_N_INSNS (1), /* divdi */
299 COSTS_N_INSNS (1), /* fp */
300 COSTS_N_INSNS (1), /* dmul */
301 COSTS_N_INSNS (1), /* sdiv */
302 COSTS_N_INSNS (1), /* ddiv */
305 /* Instruction size costs on 64bit processors. */
307 struct processor_costs size64_cost = {
308 COSTS_N_INSNS (1), /* mulsi */
309 COSTS_N_INSNS (1), /* mulsi_const */
310 COSTS_N_INSNS (1), /* mulsi_const9 */
311 COSTS_N_INSNS (1), /* muldi */
312 COSTS_N_INSNS (1), /* divsi */
313 COSTS_N_INSNS (1), /* divdi */
314 COSTS_N_INSNS (1), /* fp */
315 COSTS_N_INSNS (1), /* dmul */
316 COSTS_N_INSNS (1), /* sdiv */
317 COSTS_N_INSNS (1), /* ddiv */
320 /* Instruction costs on RIOS1 processors. */
322 struct processor_costs rios1_cost = {
323 COSTS_N_INSNS (5), /* mulsi */
324 COSTS_N_INSNS (4), /* mulsi_const */
325 COSTS_N_INSNS (3), /* mulsi_const9 */
326 COSTS_N_INSNS (5), /* muldi */
327 COSTS_N_INSNS (19), /* divsi */
328 COSTS_N_INSNS (19), /* divdi */
329 COSTS_N_INSNS (2), /* fp */
330 COSTS_N_INSNS (2), /* dmul */
331 COSTS_N_INSNS (19), /* sdiv */
332 COSTS_N_INSNS (19), /* ddiv */
335 /* Instruction costs on RIOS2 processors. */
337 struct processor_costs rios2_cost = {
338 COSTS_N_INSNS (2), /* mulsi */
339 COSTS_N_INSNS (2), /* mulsi_const */
340 COSTS_N_INSNS (2), /* mulsi_const9 */
341 COSTS_N_INSNS (2), /* muldi */
342 COSTS_N_INSNS (13), /* divsi */
343 COSTS_N_INSNS (13), /* divdi */
344 COSTS_N_INSNS (2), /* fp */
345 COSTS_N_INSNS (2), /* dmul */
346 COSTS_N_INSNS (17), /* sdiv */
347 COSTS_N_INSNS (17), /* ddiv */
350 /* Instruction costs on RS64A processors. */
352 struct processor_costs rs64a_cost = {
353 COSTS_N_INSNS (20), /* mulsi */
354 COSTS_N_INSNS (12), /* mulsi_const */
355 COSTS_N_INSNS (8), /* mulsi_const9 */
356 COSTS_N_INSNS (34), /* muldi */
357 COSTS_N_INSNS (65), /* divsi */
358 COSTS_N_INSNS (67), /* divdi */
359 COSTS_N_INSNS (4), /* fp */
360 COSTS_N_INSNS (4), /* dmul */
361 COSTS_N_INSNS (31), /* sdiv */
362 COSTS_N_INSNS (31), /* ddiv */
365 /* Instruction costs on MPCCORE processors. */
367 struct processor_costs mpccore_cost = {
368 COSTS_N_INSNS (2), /* mulsi */
369 COSTS_N_INSNS (2), /* mulsi_const */
370 COSTS_N_INSNS (2), /* mulsi_const9 */
371 COSTS_N_INSNS (2), /* muldi */
372 COSTS_N_INSNS (6), /* divsi */
373 COSTS_N_INSNS (6), /* divdi */
374 COSTS_N_INSNS (4), /* fp */
375 COSTS_N_INSNS (5), /* dmul */
376 COSTS_N_INSNS (10), /* sdiv */
377 COSTS_N_INSNS (17), /* ddiv */
380 /* Instruction costs on PPC403 processors. */
382 struct processor_costs ppc403_cost = {
383 COSTS_N_INSNS (4), /* mulsi */
384 COSTS_N_INSNS (4), /* mulsi_const */
385 COSTS_N_INSNS (4), /* mulsi_const9 */
386 COSTS_N_INSNS (4), /* muldi */
387 COSTS_N_INSNS (33), /* divsi */
388 COSTS_N_INSNS (33), /* divdi */
389 COSTS_N_INSNS (11), /* fp */
390 COSTS_N_INSNS (11), /* dmul */
391 COSTS_N_INSNS (11), /* sdiv */
392 COSTS_N_INSNS (11), /* ddiv */
395 /* Instruction costs on PPC405 processors. */
397 struct processor_costs ppc405_cost = {
398 COSTS_N_INSNS (5), /* mulsi */
399 COSTS_N_INSNS (4), /* mulsi_const */
400 COSTS_N_INSNS (3), /* mulsi_const9 */
401 COSTS_N_INSNS (5), /* muldi */
402 COSTS_N_INSNS (35), /* divsi */
403 COSTS_N_INSNS (35), /* divdi */
404 COSTS_N_INSNS (11), /* fp */
405 COSTS_N_INSNS (11), /* dmul */
406 COSTS_N_INSNS (11), /* sdiv */
407 COSTS_N_INSNS (11), /* ddiv */
410 /* Instruction costs on PPC440 processors. */
412 struct processor_costs ppc440_cost = {
413 COSTS_N_INSNS (3), /* mulsi */
414 COSTS_N_INSNS (2), /* mulsi_const */
415 COSTS_N_INSNS (2), /* mulsi_const9 */
416 COSTS_N_INSNS (3), /* muldi */
417 COSTS_N_INSNS (34), /* divsi */
418 COSTS_N_INSNS (34), /* divdi */
419 COSTS_N_INSNS (5), /* fp */
420 COSTS_N_INSNS (5), /* dmul */
421 COSTS_N_INSNS (19), /* sdiv */
422 COSTS_N_INSNS (33), /* ddiv */
425 /* Instruction costs on PPC601 processors. */
427 struct processor_costs ppc601_cost = {
428 COSTS_N_INSNS (5), /* mulsi */
429 COSTS_N_INSNS (5), /* mulsi_const */
430 COSTS_N_INSNS (5), /* mulsi_const9 */
431 COSTS_N_INSNS (5), /* muldi */
432 COSTS_N_INSNS (36), /* divsi */
433 COSTS_N_INSNS (36), /* divdi */
434 COSTS_N_INSNS (4), /* fp */
435 COSTS_N_INSNS (5), /* dmul */
436 COSTS_N_INSNS (17), /* sdiv */
437 COSTS_N_INSNS (31), /* ddiv */
440 /* Instruction costs on PPC603 processors. */
442 struct processor_costs ppc603_cost = {
443 COSTS_N_INSNS (5), /* mulsi */
444 COSTS_N_INSNS (3), /* mulsi_const */
445 COSTS_N_INSNS (2), /* mulsi_const9 */
446 COSTS_N_INSNS (5), /* muldi */
447 COSTS_N_INSNS (37), /* divsi */
448 COSTS_N_INSNS (37), /* divdi */
449 COSTS_N_INSNS (3), /* fp */
450 COSTS_N_INSNS (4), /* dmul */
451 COSTS_N_INSNS (18), /* sdiv */
452 COSTS_N_INSNS (33), /* ddiv */
455 /* Instruction costs on PPC604 processors. */
457 struct processor_costs ppc604_cost = {
458 COSTS_N_INSNS (4), /* mulsi */
459 COSTS_N_INSNS (4), /* mulsi_const */
460 COSTS_N_INSNS (4), /* mulsi_const9 */
461 COSTS_N_INSNS (4), /* muldi */
462 COSTS_N_INSNS (20), /* divsi */
463 COSTS_N_INSNS (20), /* divdi */
464 COSTS_N_INSNS (3), /* fp */
465 COSTS_N_INSNS (3), /* dmul */
466 COSTS_N_INSNS (18), /* sdiv */
467 COSTS_N_INSNS (32), /* ddiv */
470 /* Instruction costs on PPC604e processors. */
472 struct processor_costs ppc604e_cost = {
473 COSTS_N_INSNS (2), /* mulsi */
474 COSTS_N_INSNS (2), /* mulsi_const */
475 COSTS_N_INSNS (2), /* mulsi_const9 */
476 COSTS_N_INSNS (2), /* muldi */
477 COSTS_N_INSNS (20), /* divsi */
478 COSTS_N_INSNS (20), /* divdi */
479 COSTS_N_INSNS (3), /* fp */
480 COSTS_N_INSNS (3), /* dmul */
481 COSTS_N_INSNS (18), /* sdiv */
482 COSTS_N_INSNS (32), /* ddiv */
485 /* Instruction costs on PPC620 processors. */
487 struct processor_costs ppc620_cost = {
488 COSTS_N_INSNS (5), /* mulsi */
489 COSTS_N_INSNS (4), /* mulsi_const */
490 COSTS_N_INSNS (3), /* mulsi_const9 */
491 COSTS_N_INSNS (7), /* muldi */
492 COSTS_N_INSNS (21), /* divsi */
493 COSTS_N_INSNS (37), /* divdi */
494 COSTS_N_INSNS (3), /* fp */
495 COSTS_N_INSNS (3), /* dmul */
496 COSTS_N_INSNS (18), /* sdiv */
497 COSTS_N_INSNS (32), /* ddiv */
500 /* Instruction costs on PPC630 processors. */
502 struct processor_costs ppc630_cost = {
503 COSTS_N_INSNS (5), /* mulsi */
504 COSTS_N_INSNS (4), /* mulsi_const */
505 COSTS_N_INSNS (3), /* mulsi_const9 */
506 COSTS_N_INSNS (7), /* muldi */
507 COSTS_N_INSNS (21), /* divsi */
508 COSTS_N_INSNS (37), /* divdi */
509 COSTS_N_INSNS (3), /* fp */
510 COSTS_N_INSNS (3), /* dmul */
511 COSTS_N_INSNS (17), /* sdiv */
512 COSTS_N_INSNS (21), /* ddiv */
515 /* Instruction costs on PPC750 and PPC7400 processors. */
517 struct processor_costs ppc750_cost = {
518 COSTS_N_INSNS (5), /* mulsi */
519 COSTS_N_INSNS (3), /* mulsi_const */
520 COSTS_N_INSNS (2), /* mulsi_const9 */
521 COSTS_N_INSNS (5), /* muldi */
522 COSTS_N_INSNS (17), /* divsi */
523 COSTS_N_INSNS (17), /* divdi */
524 COSTS_N_INSNS (3), /* fp */
525 COSTS_N_INSNS (3), /* dmul */
526 COSTS_N_INSNS (17), /* sdiv */
527 COSTS_N_INSNS (31), /* ddiv */
530 /* Instruction costs on PPC7450 processors. */
532 struct processor_costs ppc7450_cost = {
533 COSTS_N_INSNS (4), /* mulsi */
534 COSTS_N_INSNS (3), /* mulsi_const */
535 COSTS_N_INSNS (3), /* mulsi_const9 */
536 COSTS_N_INSNS (4), /* muldi */
537 COSTS_N_INSNS (23), /* divsi */
538 COSTS_N_INSNS (23), /* divdi */
539 COSTS_N_INSNS (5), /* fp */
540 COSTS_N_INSNS (5), /* dmul */
541 COSTS_N_INSNS (21), /* sdiv */
542 COSTS_N_INSNS (35), /* ddiv */
545 /* Instruction costs on PPC8540 processors. */
547 struct processor_costs ppc8540_cost = {
548 COSTS_N_INSNS (4), /* mulsi */
549 COSTS_N_INSNS (4), /* mulsi_const */
550 COSTS_N_INSNS (4), /* mulsi_const9 */
551 COSTS_N_INSNS (4), /* muldi */
552 COSTS_N_INSNS (19), /* divsi */
553 COSTS_N_INSNS (19), /* divdi */
554 COSTS_N_INSNS (4), /* fp */
555 COSTS_N_INSNS (4), /* dmul */
556 COSTS_N_INSNS (29), /* sdiv */
557 COSTS_N_INSNS (29), /* ddiv */
560 /* Instruction costs on POWER4 and POWER5 processors. */
562 struct processor_costs power4_cost = {
563 COSTS_N_INSNS (3), /* mulsi */
564 COSTS_N_INSNS (2), /* mulsi_const */
565 COSTS_N_INSNS (2), /* mulsi_const9 */
566 COSTS_N_INSNS (4), /* muldi */
567 COSTS_N_INSNS (18), /* divsi */
568 COSTS_N_INSNS (34), /* divdi */
569 COSTS_N_INSNS (3), /* fp */
570 COSTS_N_INSNS (3), /* dmul */
571 COSTS_N_INSNS (17), /* sdiv */
572 COSTS_N_INSNS (17), /* ddiv */
576 static bool rs6000_function_ok_for_sibcall (tree, tree);
577 static const char *rs6000_invalid_within_doloop (rtx);
578 static rtx rs6000_generate_compare (enum rtx_code);
579 static void rs6000_maybe_dead (rtx);
580 static void rs6000_emit_stack_tie (void);
581 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
582 static rtx spe_synthesize_frame_save (rtx);
583 static bool spe_func_has_64bit_regs_p (void);
584 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
586 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
587 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
588 static unsigned rs6000_hash_constant (rtx);
589 static unsigned toc_hash_function (const void *);
590 static int toc_hash_eq (const void *, const void *);
591 static int constant_pool_expr_1 (rtx, int *, int *);
592 static bool constant_pool_expr_p (rtx);
593 static bool legitimate_small_data_p (enum machine_mode, rtx);
594 static bool legitimate_indexed_address_p (rtx, int);
595 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
596 static struct machine_function * rs6000_init_machine_status (void);
597 static bool rs6000_assemble_integer (rtx, unsigned int, int);
598 static bool no_global_regs_above (int);
599 #ifdef HAVE_GAS_HIDDEN
600 static void rs6000_assemble_visibility (tree, int);
602 static int rs6000_ra_ever_killed (void);
603 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
604 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
605 static bool rs6000_ms_bitfield_layout_p (tree);
606 static tree rs6000_handle_struct_attribute (tree *, tree, tree, int, bool *);
607 static void rs6000_eliminate_indexed_memrefs (rtx operands[2]);
608 static const char *rs6000_mangle_fundamental_type (tree);
609 extern const struct attribute_spec rs6000_attribute_table[];
610 static void rs6000_set_default_type_attributes (tree);
611 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
612 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
613 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
615 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
616 static bool rs6000_return_in_memory (tree, tree);
617 static void rs6000_file_start (void);
619 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
620 static void rs6000_elf_asm_out_constructor (rtx, int);
621 static void rs6000_elf_asm_out_destructor (rtx, int);
622 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED;
623 static void rs6000_elf_asm_init_sections (void);
624 static section *rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
625 static void rs6000_elf_unique_section (tree, int);
626 static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
627 unsigned HOST_WIDE_INT);
628 static void rs6000_elf_encode_section_info (tree, rtx, int)
631 static bool rs6000_use_blocks_for_constant_p (enum machine_mode, rtx);
633 static void rs6000_xcoff_asm_output_anchor (rtx);
634 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
635 static void rs6000_xcoff_asm_init_sections (void);
636 static void rs6000_xcoff_asm_named_section (const char *, unsigned int, tree);
637 static section *rs6000_xcoff_select_section (tree, int,
638 unsigned HOST_WIDE_INT);
639 static void rs6000_xcoff_unique_section (tree, int);
640 static section *rs6000_xcoff_select_rtx_section
641 (enum machine_mode, rtx, unsigned HOST_WIDE_INT);
642 static const char * rs6000_xcoff_strip_name_encoding (const char *);
643 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
644 static void rs6000_xcoff_file_start (void);
645 static void rs6000_xcoff_file_end (void);
647 static int rs6000_variable_issue (FILE *, int, rtx, int);
648 static bool rs6000_rtx_costs (rtx, int, int, int *);
649 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
650 static bool is_microcoded_insn (rtx);
651 static int is_dispatch_slot_restricted (rtx);
652 static bool is_cracked_insn (rtx);
653 static bool is_branch_slot_insn (rtx);
654 static int rs6000_adjust_priority (rtx, int);
655 static int rs6000_issue_rate (void);
656 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
657 static rtx get_next_active_insn (rtx, rtx);
658 static bool insn_terminates_group_p (rtx , enum group_termination);
659 static bool is_costly_group (rtx *, rtx);
660 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
661 static int redefine_groups (FILE *, int, rtx, rtx);
662 static int pad_groups (FILE *, int, rtx, rtx);
663 static void rs6000_sched_finish (FILE *, int);
664 static int rs6000_use_sched_lookahead (void);
665 static tree rs6000_builtin_mask_for_load (void);
667 static void def_builtin (int, const char *, tree, int);
668 static void rs6000_init_builtins (void);
669 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
670 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
671 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
672 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
673 static void altivec_init_builtins (void);
674 static void rs6000_common_init_builtins (void);
675 static void rs6000_init_libfuncs (void);
677 static void enable_mask_for_builtins (struct builtin_description *, int,
678 enum rs6000_builtins,
679 enum rs6000_builtins);
680 static tree build_opaque_vector_type (tree, int);
681 static void spe_init_builtins (void);
682 static rtx spe_expand_builtin (tree, rtx, bool *);
683 static rtx spe_expand_stv_builtin (enum insn_code, tree);
684 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
685 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
686 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
687 static rs6000_stack_t *rs6000_stack_info (void);
688 static void debug_stack_info (rs6000_stack_t *);
690 static rtx altivec_expand_builtin (tree, rtx, bool *);
691 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
692 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
693 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
694 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
695 static rtx altivec_expand_predicate_builtin (enum insn_code,
696 const char *, tree, rtx);
697 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
698 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
699 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
700 static rtx altivec_expand_vec_set_builtin (tree);
701 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
702 static int get_element_number (tree, tree);
703 static bool rs6000_handle_option (size_t, const char *, int);
704 static void rs6000_parse_tls_size_option (void);
705 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
706 static int first_altivec_reg_to_save (void);
707 static unsigned int compute_vrsave_mask (void);
708 static void compute_save_world_info (rs6000_stack_t *info_ptr);
709 static void is_altivec_return_reg (rtx, void *);
710 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
711 int easy_vector_constant (rtx, enum machine_mode);
712 static bool rs6000_is_opaque_type (tree);
713 static rtx rs6000_dwarf_register_span (rtx);
714 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
715 static void rs6000_output_dwarf_dtprel (FILE *, int, rtx) ATTRIBUTE_UNUSED;
716 static rtx rs6000_tls_get_addr (void);
717 static rtx rs6000_got_sym (void);
718 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
719 static const char *rs6000_get_some_local_dynamic_name (void);
720 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
721 static rtx rs6000_complex_function_value (enum machine_mode);
722 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
723 enum machine_mode, tree);
724 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
726 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
727 tree, HOST_WIDE_INT);
728 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
731 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
734 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, tree, int, bool);
735 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int);
736 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
737 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
738 enum machine_mode, tree,
740 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
742 static int rs6000_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
744 static const char *invalid_arg_for_unprototyped_fn (tree, tree, tree);
746 static void macho_branch_islands (void);
747 static void add_compiler_branch_island (tree, tree, int);
748 static int no_previous_def (tree function_name);
749 static tree get_prev_label (tree function_name);
750 static void rs6000_darwin_file_start (void);
753 static tree rs6000_build_builtin_va_list (void);
754 static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
755 static bool rs6000_must_pass_in_stack (enum machine_mode, tree);
756 static bool rs6000_scalar_mode_supported_p (enum machine_mode);
757 static bool rs6000_vector_mode_supported_p (enum machine_mode);
758 static int get_vec_cmp_insn (enum rtx_code, enum machine_mode,
760 static rtx rs6000_emit_vector_compare (enum rtx_code, rtx, rtx,
762 static int get_vsel_insn (enum machine_mode);
763 static void rs6000_emit_vector_select (rtx, rtx, rtx, rtx);
764 static tree rs6000_stack_protect_fail (void);
766 const int INSN_NOT_AVAILABLE = -1;
767 static enum machine_mode rs6000_eh_return_filter_mode (void);
769 /* Hash table stuff for keeping track of TOC entries. */
771 struct toc_hash_struct GTY(())
773 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
774 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
776 enum machine_mode key_mode;
780 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
782 /* Default register names. */
783 char rs6000_reg_names[][8] =
785 "0", "1", "2", "3", "4", "5", "6", "7",
786 "8", "9", "10", "11", "12", "13", "14", "15",
787 "16", "17", "18", "19", "20", "21", "22", "23",
788 "24", "25", "26", "27", "28", "29", "30", "31",
789 "0", "1", "2", "3", "4", "5", "6", "7",
790 "8", "9", "10", "11", "12", "13", "14", "15",
791 "16", "17", "18", "19", "20", "21", "22", "23",
792 "24", "25", "26", "27", "28", "29", "30", "31",
793 "mq", "lr", "ctr","ap",
794 "0", "1", "2", "3", "4", "5", "6", "7",
796 /* AltiVec registers. */
797 "0", "1", "2", "3", "4", "5", "6", "7",
798 "8", "9", "10", "11", "12", "13", "14", "15",
799 "16", "17", "18", "19", "20", "21", "22", "23",
800 "24", "25", "26", "27", "28", "29", "30", "31",
803 "spe_acc", "spefscr",
804 /* Soft frame pointer. */
808 #ifdef TARGET_REGNAMES
809 static const char alt_reg_names[][8] =
811 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
812 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
813 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
814 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
815 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
816 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
817 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
818 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
819 "mq", "lr", "ctr", "ap",
820 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
822 /* AltiVec registers. */
823 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
824 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
825 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
826 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
829 "spe_acc", "spefscr",
830 /* Soft frame pointer. */
835 #ifndef MASK_STRICT_ALIGN
836 #define MASK_STRICT_ALIGN 0
838 #ifndef TARGET_PROFILE_KERNEL
839 #define TARGET_PROFILE_KERNEL 0
842 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
843 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
845 /* Initialize the GCC target structure. */
846 #undef TARGET_ATTRIBUTE_TABLE
847 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
848 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
849 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
851 #undef TARGET_ASM_ALIGNED_DI_OP
852 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
854 /* Default unaligned ops are only provided for ELF. Find the ops needed
855 for non-ELF systems. */
856 #ifndef OBJECT_FORMAT_ELF
858 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
860 #undef TARGET_ASM_UNALIGNED_HI_OP
861 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
862 #undef TARGET_ASM_UNALIGNED_SI_OP
863 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
864 #undef TARGET_ASM_UNALIGNED_DI_OP
865 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
868 #undef TARGET_ASM_UNALIGNED_HI_OP
869 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
870 #undef TARGET_ASM_UNALIGNED_SI_OP
871 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
872 #undef TARGET_ASM_UNALIGNED_DI_OP
873 #define TARGET_ASM_UNALIGNED_DI_OP "\t.quad\t"
874 #undef TARGET_ASM_ALIGNED_DI_OP
875 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
879 /* This hook deals with fixups for relocatable code and DI-mode objects
881 #undef TARGET_ASM_INTEGER
882 #define TARGET_ASM_INTEGER rs6000_assemble_integer
884 #ifdef HAVE_GAS_HIDDEN
885 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
886 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
889 #undef TARGET_HAVE_TLS
890 #define TARGET_HAVE_TLS HAVE_AS_TLS
892 #undef TARGET_CANNOT_FORCE_CONST_MEM
893 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
895 #undef TARGET_ASM_FUNCTION_PROLOGUE
896 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
897 #undef TARGET_ASM_FUNCTION_EPILOGUE
898 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
900 #undef TARGET_SCHED_VARIABLE_ISSUE
901 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
903 #undef TARGET_SCHED_ISSUE_RATE
904 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
905 #undef TARGET_SCHED_ADJUST_COST
906 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
907 #undef TARGET_SCHED_ADJUST_PRIORITY
908 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
909 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
910 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
911 #undef TARGET_SCHED_FINISH
912 #define TARGET_SCHED_FINISH rs6000_sched_finish
914 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
915 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
917 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
918 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
920 #undef TARGET_INIT_BUILTINS
921 #define TARGET_INIT_BUILTINS rs6000_init_builtins
923 #undef TARGET_EXPAND_BUILTIN
924 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
926 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
927 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
929 #undef TARGET_INIT_LIBFUNCS
930 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
933 #undef TARGET_BINDS_LOCAL_P
934 #define TARGET_BINDS_LOCAL_P darwin_binds_local_p
937 #undef TARGET_MS_BITFIELD_LAYOUT_P
938 #define TARGET_MS_BITFIELD_LAYOUT_P rs6000_ms_bitfield_layout_p
940 #undef TARGET_ASM_OUTPUT_MI_THUNK
941 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
943 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
944 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
946 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
947 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
949 #undef TARGET_INVALID_WITHIN_DOLOOP
950 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
952 #undef TARGET_RTX_COSTS
953 #define TARGET_RTX_COSTS rs6000_rtx_costs
954 #undef TARGET_ADDRESS_COST
955 #define TARGET_ADDRESS_COST hook_int_rtx_0
957 #undef TARGET_VECTOR_OPAQUE_P
958 #define TARGET_VECTOR_OPAQUE_P rs6000_is_opaque_type
960 #undef TARGET_DWARF_REGISTER_SPAN
961 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
963 /* On rs6000, function arguments are promoted, as are function return
965 #undef TARGET_PROMOTE_FUNCTION_ARGS
966 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
967 #undef TARGET_PROMOTE_FUNCTION_RETURN
968 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
970 #undef TARGET_RETURN_IN_MEMORY
971 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
973 #undef TARGET_SETUP_INCOMING_VARARGS
974 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
976 /* Always strict argument naming on rs6000. */
977 #undef TARGET_STRICT_ARGUMENT_NAMING
978 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
979 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
980 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
981 #undef TARGET_SPLIT_COMPLEX_ARG
982 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
983 #undef TARGET_MUST_PASS_IN_STACK
984 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
985 #undef TARGET_PASS_BY_REFERENCE
986 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
987 #undef TARGET_ARG_PARTIAL_BYTES
988 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
990 #undef TARGET_BUILD_BUILTIN_VA_LIST
991 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
993 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
994 #define TARGET_GIMPLIFY_VA_ARG_EXPR rs6000_gimplify_va_arg
996 #undef TARGET_EH_RETURN_FILTER_MODE
997 #define TARGET_EH_RETURN_FILTER_MODE rs6000_eh_return_filter_mode
999 #undef TARGET_SCALAR_MODE_SUPPORTED_P
1000 #define TARGET_SCALAR_MODE_SUPPORTED_P rs6000_scalar_mode_supported_p
1002 #undef TARGET_VECTOR_MODE_SUPPORTED_P
1003 #define TARGET_VECTOR_MODE_SUPPORTED_P rs6000_vector_mode_supported_p
1005 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1006 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1008 #undef TARGET_HANDLE_OPTION
1009 #define TARGET_HANDLE_OPTION rs6000_handle_option
1011 #undef TARGET_DEFAULT_TARGET_FLAGS
1012 #define TARGET_DEFAULT_TARGET_FLAGS \
1015 #undef TARGET_STACK_PROTECT_FAIL
1016 #define TARGET_STACK_PROTECT_FAIL rs6000_stack_protect_fail
1018 /* MPC604EUM 3.5.2 Weak Consistency between Multiple Processors
1019 The PowerPC architecture requires only weak consistency among
1020 processors--that is, memory accesses between processors need not be
1021 sequentially consistent and memory accesses among processors can occur
1022 in any order. The ability to order memory accesses weakly provides
1023 opportunities for more efficient use of the system bus. Unless a
1024 dependency exists, the 604e allows read operations to precede store
1026 #undef TARGET_RELAXED_ORDERING
1027 #define TARGET_RELAXED_ORDERING true
1030 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
1031 #define TARGET_ASM_OUTPUT_DWARF_DTPREL rs6000_output_dwarf_dtprel
1034 /* Use a 32-bit anchor range. This leads to sequences like:
1036 addis tmp,anchor,high
1039 where tmp itself acts as an anchor, and can be shared between
1040 accesses to the same 64k page. */
1041 #undef TARGET_MIN_ANCHOR_OFFSET
1042 #define TARGET_MIN_ANCHOR_OFFSET -0x7fffffff - 1
1043 #undef TARGET_MAX_ANCHOR_OFFSET
1044 #define TARGET_MAX_ANCHOR_OFFSET 0x7fffffff
1045 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
1046 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P rs6000_use_blocks_for_constant_p
1048 struct gcc_target targetm = TARGET_INITIALIZER;
1051 /* Value is 1 if hard register REGNO can hold a value of machine-mode
1054 rs6000_hard_regno_mode_ok (int regno, enum machine_mode mode)
1056 /* The GPRs can hold any mode, but values bigger than one register
1057 cannot go past R31. */
1058 if (INT_REGNO_P (regno))
1059 return INT_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1);
1061 /* The float registers can only hold floating modes and DImode.
1062 This also excludes decimal float modes. */
1063 if (FP_REGNO_P (regno))
1065 (SCALAR_FLOAT_MODE_P (mode)
1066 && !DECIMAL_FLOAT_MODE_P (mode)
1067 && FP_REGNO_P (regno + HARD_REGNO_NREGS (regno, mode) - 1))
1068 || (GET_MODE_CLASS (mode) == MODE_INT
1069 && GET_MODE_SIZE (mode) == UNITS_PER_FP_WORD);
1071 /* The CR register can only hold CC modes. */
1072 if (CR_REGNO_P (regno))
1073 return GET_MODE_CLASS (mode) == MODE_CC;
1075 if (XER_REGNO_P (regno))
1076 return mode == PSImode;
1078 /* AltiVec only in AldyVec registers. */
1079 if (ALTIVEC_REGNO_P (regno))
1080 return ALTIVEC_VECTOR_MODE (mode);
1082 /* ...but GPRs can hold SIMD data on the SPE in one register. */
1083 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
1086 /* We cannot put TImode anywhere except general register and it must be
1087 able to fit within the register set. */
1089 return GET_MODE_SIZE (mode) <= UNITS_PER_WORD;
1092 /* Initialize rs6000_hard_regno_mode_ok_p table. */
1094 rs6000_init_hard_regno_mode_ok (void)
1098 for (r = 0; r < FIRST_PSEUDO_REGISTER; ++r)
1099 for (m = 0; m < NUM_MACHINE_MODES; ++m)
1100 if (rs6000_hard_regno_mode_ok (r, m))
1101 rs6000_hard_regno_mode_ok_p[m][r] = true;
1104 /* If not otherwise specified by a target, make 'long double' equivalent to
1107 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
1108 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
1111 /* Override command line options. Mostly we process the processor
1112 type and sometimes adjust other TARGET_ options. */
1115 rs6000_override_options (const char *default_cpu)
1118 struct rs6000_cpu_select *ptr;
1121 /* Simplifications for entries below. */
1124 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1125 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1128 /* This table occasionally claims that a processor does not support
1129 a particular feature even though it does, but the feature is slower
1130 than the alternative. Thus, it shouldn't be relied on as a
1131 complete description of the processor's support.
1133 Please keep this list in order, and don't forget to update the
1134 documentation in invoke.texi when adding a new processor or
1138 const char *const name; /* Canonical processor name. */
1139 const enum processor_type processor; /* Processor type enum value. */
1140 const int target_enable; /* Target flags to enable. */
1141 } const processor_target_table[]
1142 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1143 {"403", PROCESSOR_PPC403,
1144 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1145 {"405", PROCESSOR_PPC405,
1146 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1147 {"405fp", PROCESSOR_PPC405,
1148 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1149 {"440", PROCESSOR_PPC440,
1150 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1151 {"440fp", PROCESSOR_PPC440,
1152 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1153 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1154 {"601", PROCESSOR_PPC601,
1155 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1156 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1157 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1158 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1159 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1160 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1161 {"620", PROCESSOR_PPC620,
1162 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1163 {"630", PROCESSOR_PPC630,
1164 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1165 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1166 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1167 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1168 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1169 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1170 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1171 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1172 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1173 /* 8548 has a dummy entry for now. */
1174 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1175 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1176 {"970", PROCESSOR_POWER4,
1177 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1178 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1179 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1180 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1181 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1182 {"G5", PROCESSOR_POWER4,
1183 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1184 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1185 {"power2", PROCESSOR_POWER,
1186 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1187 {"power3", PROCESSOR_PPC630,
1188 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1189 {"power4", PROCESSOR_POWER4,
1190 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POWERPC64},
1191 {"power5", PROCESSOR_POWER5,
1192 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1193 | MASK_MFCRF | MASK_POPCNTB},
1194 {"power5+", PROCESSOR_POWER5,
1195 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GFXOPT
1196 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1197 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1198 {"powerpc64", PROCESSOR_POWERPC64,
1199 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1200 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1201 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1202 {"rios2", PROCESSOR_RIOS2,
1203 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1204 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1205 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1206 {"rs64", PROCESSOR_RS64A,
1207 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1210 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
1212 /* Some OSs don't support saving the high part of 64-bit registers on
1213 context switch. Other OSs don't support saving Altivec registers.
1214 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
1215 settings; if the user wants either, the user must explicitly specify
1216 them and we won't interfere with the user's specification. */
1219 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1220 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
1221 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1222 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1226 rs6000_init_hard_regno_mode_ok ();
1228 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
1229 #ifdef OS_MISSING_POWERPC64
1230 if (OS_MISSING_POWERPC64)
1231 set_masks &= ~MASK_POWERPC64;
1233 #ifdef OS_MISSING_ALTIVEC
1234 if (OS_MISSING_ALTIVEC)
1235 set_masks &= ~MASK_ALTIVEC;
1238 /* Don't override by the processor default if given explicitly. */
1239 set_masks &= ~target_flags_explicit;
1241 /* Identify the processor type. */
1242 rs6000_select[0].string = default_cpu;
1243 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
1245 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1247 ptr = &rs6000_select[i];
1248 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1250 for (j = 0; j < ptt_size; j++)
1251 if (! strcmp (ptr->string, processor_target_table[j].name))
1253 if (ptr->set_tune_p)
1254 rs6000_cpu = processor_target_table[j].processor;
1256 if (ptr->set_arch_p)
1258 target_flags &= ~set_masks;
1259 target_flags |= (processor_target_table[j].target_enable
1266 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
1273 /* If we are optimizing big endian systems for space, use the load/store
1274 multiple and string instructions. */
1275 if (BYTES_BIG_ENDIAN && optimize_size)
1276 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
1278 /* Don't allow -mmultiple or -mstring on little endian systems
1279 unless the cpu is a 750, because the hardware doesn't support the
1280 instructions used in little endian mode, and causes an alignment
1281 trap. The 750 does not cause an alignment trap (except when the
1282 target is unaligned). */
1284 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
1286 if (TARGET_MULTIPLE)
1288 target_flags &= ~MASK_MULTIPLE;
1289 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
1290 warning (0, "-mmultiple is not supported on little endian systems");
1295 target_flags &= ~MASK_STRING;
1296 if ((target_flags_explicit & MASK_STRING) != 0)
1297 warning (0, "-mstring is not supported on little endian systems");
1301 /* Set debug flags */
1302 if (rs6000_debug_name)
1304 if (! strcmp (rs6000_debug_name, "all"))
1305 rs6000_debug_stack = rs6000_debug_arg = 1;
1306 else if (! strcmp (rs6000_debug_name, "stack"))
1307 rs6000_debug_stack = 1;
1308 else if (! strcmp (rs6000_debug_name, "arg"))
1309 rs6000_debug_arg = 1;
1311 error ("unknown -mdebug-%s switch", rs6000_debug_name);
1314 if (rs6000_traceback_name)
1316 if (! strncmp (rs6000_traceback_name, "full", 4))
1317 rs6000_traceback = traceback_full;
1318 else if (! strncmp (rs6000_traceback_name, "part", 4))
1319 rs6000_traceback = traceback_part;
1320 else if (! strncmp (rs6000_traceback_name, "no", 2))
1321 rs6000_traceback = traceback_none;
1323 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>",
1324 rs6000_traceback_name);
1327 if (!rs6000_explicit_options.long_double)
1328 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1330 #ifndef POWERPC_LINUX
1331 if (!rs6000_explicit_options.ieee)
1332 rs6000_ieeequad = 1;
1335 /* Set Altivec ABI as default for powerpc64 linux. */
1336 if (TARGET_ELF && TARGET_64BIT)
1338 rs6000_altivec_abi = 1;
1339 TARGET_ALTIVEC_VRSAVE = 1;
1342 /* Set the Darwin64 ABI as default for 64-bit Darwin. */
1343 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1345 rs6000_darwin64_abi = 1;
1347 darwin_one_byte_bool = 1;
1349 /* Default to natural alignment, for better performance. */
1350 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1353 /* Handle -mtls-size option. */
1354 rs6000_parse_tls_size_option ();
1356 #ifdef SUBTARGET_OVERRIDE_OPTIONS
1357 SUBTARGET_OVERRIDE_OPTIONS;
1359 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
1360 SUBSUBTARGET_OVERRIDE_OPTIONS;
1362 #ifdef SUB3TARGET_OVERRIDE_OPTIONS
1363 SUB3TARGET_OVERRIDE_OPTIONS;
1369 error ("AltiVec and E500 instructions cannot coexist");
1371 /* The e500 does not have string instructions, and we set
1372 MASK_STRING above when optimizing for size. */
1373 if ((target_flags & MASK_STRING) != 0)
1374 target_flags = target_flags & ~MASK_STRING;
1376 else if (rs6000_select[1].string != NULL)
1378 /* For the powerpc-eabispe configuration, we set all these by
1379 default, so let's unset them if we manually set another
1380 CPU that is not the E500. */
1381 if (!rs6000_explicit_options.abi)
1383 if (!rs6000_explicit_options.spe)
1385 if (!rs6000_explicit_options.float_gprs)
1386 rs6000_float_gprs = 0;
1387 if (!rs6000_explicit_options.isel)
1389 if (!rs6000_explicit_options.long_double)
1390 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1393 rs6000_always_hint = (rs6000_cpu != PROCESSOR_POWER4
1394 && rs6000_cpu != PROCESSOR_POWER5);
1395 rs6000_sched_groups = (rs6000_cpu == PROCESSOR_POWER4
1396 || rs6000_cpu == PROCESSOR_POWER5);
1398 rs6000_sched_restricted_insns_priority
1399 = (rs6000_sched_groups ? 1 : 0);
1401 /* Handle -msched-costly-dep option. */
1402 rs6000_sched_costly_dep
1403 = (rs6000_sched_groups ? store_to_load_dep_costly : no_dep_costly);
1405 if (rs6000_sched_costly_dep_str)
1407 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
1408 rs6000_sched_costly_dep = no_dep_costly;
1409 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
1410 rs6000_sched_costly_dep = all_deps_costly;
1411 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
1412 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
1413 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
1414 rs6000_sched_costly_dep = store_to_load_dep_costly;
1416 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
1419 /* Handle -minsert-sched-nops option. */
1420 rs6000_sched_insert_nops
1421 = (rs6000_sched_groups ? sched_finish_regroup_exact : sched_finish_none);
1423 if (rs6000_sched_insert_nops_str)
1425 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
1426 rs6000_sched_insert_nops = sched_finish_none;
1427 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
1428 rs6000_sched_insert_nops = sched_finish_pad_groups;
1429 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
1430 rs6000_sched_insert_nops = sched_finish_regroup_exact;
1432 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
1435 #ifdef TARGET_REGNAMES
1436 /* If the user desires alternate register names, copy in the
1437 alternate names now. */
1438 if (TARGET_REGNAMES)
1439 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
1442 /* Set aix_struct_return last, after the ABI is determined.
1443 If -maix-struct-return or -msvr4-struct-return was explicitly
1444 used, don't override with the ABI default. */
1445 if (!rs6000_explicit_options.aix_struct_ret)
1446 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
1448 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
1449 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1452 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1454 /* We can only guarantee the availability of DI pseudo-ops when
1455 assembling for 64-bit targets. */
1458 targetm.asm_out.aligned_op.di = NULL;
1459 targetm.asm_out.unaligned_op.di = NULL;
1462 /* Set branch target alignment, if not optimizing for size. */
1465 if (rs6000_sched_groups)
1467 if (align_functions <= 0)
1468 align_functions = 16;
1469 if (align_jumps <= 0)
1471 if (align_loops <= 0)
1474 if (align_jumps_max_skip <= 0)
1475 align_jumps_max_skip = 15;
1476 if (align_loops_max_skip <= 0)
1477 align_loops_max_skip = 15;
1480 /* Arrange to save and restore machine status around nested functions. */
1481 init_machine_status = rs6000_init_machine_status;
1483 /* We should always be splitting complex arguments, but we can't break
1484 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1485 if (DEFAULT_ABI != ABI_AIX)
1486 targetm.calls.split_complex_arg = NULL;
1488 /* Initialize rs6000_cost with the appropriate target costs. */
1490 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
1494 case PROCESSOR_RIOS1:
1495 rs6000_cost = &rios1_cost;
1498 case PROCESSOR_RIOS2:
1499 rs6000_cost = &rios2_cost;
1502 case PROCESSOR_RS64A:
1503 rs6000_cost = &rs64a_cost;
1506 case PROCESSOR_MPCCORE:
1507 rs6000_cost = &mpccore_cost;
1510 case PROCESSOR_PPC403:
1511 rs6000_cost = &ppc403_cost;
1514 case PROCESSOR_PPC405:
1515 rs6000_cost = &ppc405_cost;
1518 case PROCESSOR_PPC440:
1519 rs6000_cost = &ppc440_cost;
1522 case PROCESSOR_PPC601:
1523 rs6000_cost = &ppc601_cost;
1526 case PROCESSOR_PPC603:
1527 rs6000_cost = &ppc603_cost;
1530 case PROCESSOR_PPC604:
1531 rs6000_cost = &ppc604_cost;
1534 case PROCESSOR_PPC604e:
1535 rs6000_cost = &ppc604e_cost;
1538 case PROCESSOR_PPC620:
1539 rs6000_cost = &ppc620_cost;
1542 case PROCESSOR_PPC630:
1543 rs6000_cost = &ppc630_cost;
1546 case PROCESSOR_PPC750:
1547 case PROCESSOR_PPC7400:
1548 rs6000_cost = &ppc750_cost;
1551 case PROCESSOR_PPC7450:
1552 rs6000_cost = &ppc7450_cost;
1555 case PROCESSOR_PPC8540:
1556 rs6000_cost = &ppc8540_cost;
1559 case PROCESSOR_POWER4:
1560 case PROCESSOR_POWER5:
1561 rs6000_cost = &power4_cost;
1569 /* Implement targetm.vectorize.builtin_mask_for_load. */
1571 rs6000_builtin_mask_for_load (void)
1574 return altivec_builtin_mask_for_load;
1579 /* Handle generic options of the form -mfoo=yes/no.
1580 NAME is the option name.
1581 VALUE is the option value.
1582 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1583 whether the option value is 'yes' or 'no' respectively. */
1585 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1589 else if (!strcmp (value, "yes"))
1591 else if (!strcmp (value, "no"))
1594 error ("unknown -m%s= option specified: '%s'", name, value);
1597 /* Validate and record the size specified with the -mtls-size option. */
1600 rs6000_parse_tls_size_option (void)
1602 if (rs6000_tls_size_string == 0)
1604 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1605 rs6000_tls_size = 16;
1606 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1607 rs6000_tls_size = 32;
1608 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1609 rs6000_tls_size = 64;
1611 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
1615 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1617 if (DEFAULT_ABI == ABI_DARWIN)
1618 /* The Darwin libraries never set errno, so we might as well
1619 avoid calling them when that's the only reason we would. */
1620 flag_errno_math = 0;
1622 /* Double growth factor to counter reduced min jump length. */
1623 set_param_value ("max-grow-copy-bb-insns", 16);
1626 /* Implement TARGET_HANDLE_OPTION. */
1629 rs6000_handle_option (size_t code, const char *arg, int value)
1634 target_flags &= ~(MASK_POWER | MASK_POWER2
1635 | MASK_MULTIPLE | MASK_STRING);
1636 target_flags_explicit |= (MASK_POWER | MASK_POWER2
1637 | MASK_MULTIPLE | MASK_STRING);
1639 case OPT_mno_powerpc:
1640 target_flags &= ~(MASK_POWERPC | MASK_PPC_GPOPT
1641 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1642 target_flags_explicit |= (MASK_POWERPC | MASK_PPC_GPOPT
1643 | MASK_PPC_GFXOPT | MASK_POWERPC64);
1646 target_flags &= ~MASK_MINIMAL_TOC;
1647 TARGET_NO_FP_IN_TOC = 0;
1648 TARGET_NO_SUM_IN_TOC = 0;
1649 target_flags_explicit |= MASK_MINIMAL_TOC;
1650 #ifdef TARGET_USES_SYSV4_OPT
1651 /* Note, V.4 no longer uses a normal TOC, so make -mfull-toc, be
1652 just the same as -mminimal-toc. */
1653 target_flags |= MASK_MINIMAL_TOC;
1654 target_flags_explicit |= MASK_MINIMAL_TOC;
1658 #ifdef TARGET_USES_SYSV4_OPT
1660 /* Make -mtoc behave like -mminimal-toc. */
1661 target_flags |= MASK_MINIMAL_TOC;
1662 target_flags_explicit |= MASK_MINIMAL_TOC;
1666 #ifdef TARGET_USES_AIX64_OPT
1671 target_flags |= MASK_POWERPC64 | MASK_POWERPC;
1672 target_flags |= ~target_flags_explicit & MASK_PPC_GFXOPT;
1673 target_flags_explicit |= MASK_POWERPC64 | MASK_POWERPC;
1676 #ifdef TARGET_USES_AIX64_OPT
1681 target_flags &= ~MASK_POWERPC64;
1682 target_flags_explicit |= MASK_POWERPC64;
1685 case OPT_minsert_sched_nops_:
1686 rs6000_sched_insert_nops_str = arg;
1689 case OPT_mminimal_toc:
1692 TARGET_NO_FP_IN_TOC = 0;
1693 TARGET_NO_SUM_IN_TOC = 0;
1700 target_flags |= (MASK_MULTIPLE | MASK_STRING);
1701 target_flags_explicit |= (MASK_MULTIPLE | MASK_STRING);
1708 target_flags |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1709 target_flags_explicit |= (MASK_POWER | MASK_MULTIPLE | MASK_STRING);
1713 case OPT_mpowerpc_gpopt:
1714 case OPT_mpowerpc_gfxopt:
1717 target_flags |= MASK_POWERPC;
1718 target_flags_explicit |= MASK_POWERPC;
1722 case OPT_maix_struct_return:
1723 case OPT_msvr4_struct_return:
1724 rs6000_explicit_options.aix_struct_ret = true;
1728 rs6000_parse_yes_no_option ("vrsave", arg, &(TARGET_ALTIVEC_VRSAVE));
1732 rs6000_explicit_options.isel = true;
1733 rs6000_parse_yes_no_option ("isel", arg, &(rs6000_isel));
1737 rs6000_explicit_options.spe = true;
1738 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
1739 /* No SPE means 64-bit long doubles, even if an E500. */
1741 rs6000_long_double_type_size = 64;
1745 rs6000_debug_name = arg;
1748 #ifdef TARGET_USES_SYSV4_OPT
1750 rs6000_abi_name = arg;
1754 rs6000_sdata_name = arg;
1757 case OPT_mtls_size_:
1758 rs6000_tls_size_string = arg;
1761 case OPT_mrelocatable:
1764 target_flags |= MASK_MINIMAL_TOC;
1765 target_flags_explicit |= MASK_MINIMAL_TOC;
1766 TARGET_NO_FP_IN_TOC = 1;
1770 case OPT_mrelocatable_lib:
1773 target_flags |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1774 target_flags_explicit |= MASK_RELOCATABLE | MASK_MINIMAL_TOC;
1775 TARGET_NO_FP_IN_TOC = 1;
1779 target_flags &= ~MASK_RELOCATABLE;
1780 target_flags_explicit |= MASK_RELOCATABLE;
1786 if (!strcmp (arg, "altivec"))
1788 rs6000_explicit_options.abi = true;
1789 rs6000_altivec_abi = 1;
1792 else if (! strcmp (arg, "no-altivec"))
1794 /* ??? Don't set rs6000_explicit_options.abi here, to allow
1795 the default for rs6000_spe_abi to be chosen later. */
1796 rs6000_altivec_abi = 0;
1798 else if (! strcmp (arg, "spe"))
1800 rs6000_explicit_options.abi = true;
1802 rs6000_altivec_abi = 0;
1803 if (!TARGET_SPE_ABI)
1804 error ("not configured for ABI: '%s'", arg);
1806 else if (! strcmp (arg, "no-spe"))
1808 rs6000_explicit_options.abi = true;
1812 /* These are here for testing during development only, do not
1813 document in the manual please. */
1814 else if (! strcmp (arg, "d64"))
1816 rs6000_darwin64_abi = 1;
1817 warning (0, "Using darwin64 ABI");
1819 else if (! strcmp (arg, "d32"))
1821 rs6000_darwin64_abi = 0;
1822 warning (0, "Using old darwin ABI");
1825 else if (! strcmp (arg, "ibmlongdouble"))
1827 rs6000_explicit_options.ieee = true;
1828 rs6000_ieeequad = 0;
1829 warning (0, "Using IBM extended precision long double");
1831 else if (! strcmp (arg, "ieeelongdouble"))
1833 rs6000_explicit_options.ieee = true;
1834 rs6000_ieeequad = 1;
1835 warning (0, "Using IEEE extended precision long double");
1840 error ("unknown ABI specified: '%s'", arg);
1846 rs6000_select[1].string = arg;
1850 rs6000_select[2].string = arg;
1853 case OPT_mtraceback_:
1854 rs6000_traceback_name = arg;
1857 case OPT_mfloat_gprs_:
1858 rs6000_explicit_options.float_gprs = true;
1859 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
1860 rs6000_float_gprs = 1;
1861 else if (! strcmp (arg, "double"))
1862 rs6000_float_gprs = 2;
1863 else if (! strcmp (arg, "no"))
1864 rs6000_float_gprs = 0;
1867 error ("invalid option for -mfloat-gprs: '%s'", arg);
1872 case OPT_mlong_double_:
1873 rs6000_explicit_options.long_double = true;
1874 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1875 if (value != 64 && value != 128)
1877 error ("Unknown switch -mlong-double-%s", arg);
1878 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
1882 rs6000_long_double_type_size = value;
1885 case OPT_msched_costly_dep_:
1886 rs6000_sched_costly_dep_str = arg;
1890 rs6000_explicit_options.alignment = true;
1891 if (! strcmp (arg, "power"))
1893 /* On 64-bit Darwin, power alignment is ABI-incompatible with
1894 some C library functions, so warn about it. The flag may be
1895 useful for performance studies from time to time though, so
1896 don't disable it entirely. */
1897 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT)
1898 warning (0, "-malign-power is not supported for 64-bit Darwin;"
1899 " it is incompatible with the installed C and C++ libraries");
1900 rs6000_alignment_flags = MASK_ALIGN_POWER;
1902 else if (! strcmp (arg, "natural"))
1903 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1906 error ("unknown -malign-XXXXX option specified: '%s'", arg);
1914 /* Do anything needed at the start of the asm file. */
1917 rs6000_file_start (void)
1921 const char *start = buffer;
1922 struct rs6000_cpu_select *ptr;
1923 const char *default_cpu = TARGET_CPU_DEFAULT;
1924 FILE *file = asm_out_file;
1926 default_file_start ();
1928 #ifdef TARGET_BI_ARCH
1929 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1933 if (flag_verbose_asm)
1935 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1936 rs6000_select[0].string = default_cpu;
1938 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1940 ptr = &rs6000_select[i];
1941 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1943 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1948 if (PPC405_ERRATUM77)
1950 fprintf (file, "%s PPC405CR_ERRATUM77", start);
1954 #ifdef USING_ELFOS_H
1955 switch (rs6000_sdata)
1957 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1958 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1959 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1960 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1963 if (rs6000_sdata && g_switch_value)
1965 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1975 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
1977 switch_to_section (toc_section);
1978 switch_to_section (text_section);
1983 /* Return nonzero if this function is known to have a null epilogue. */
1986 direct_return (void)
1988 if (reload_completed)
1990 rs6000_stack_t *info = rs6000_stack_info ();
1992 if (info->first_gp_reg_save == 32
1993 && info->first_fp_reg_save == 64
1994 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1995 && ! info->lr_save_p
1996 && ! info->cr_save_p
1997 && info->vrsave_mask == 0
2005 /* Return the number of instructions it takes to form a constant in an
2006 integer register. */
2009 num_insns_constant_wide (HOST_WIDE_INT value)
2011 /* signed constant loadable with {cal|addi} */
2012 if ((unsigned HOST_WIDE_INT) (value + 0x8000) < 0x10000)
2015 /* constant loadable with {cau|addis} */
2016 else if ((value & 0xffff) == 0
2017 && (value >> 31 == -1 || value >> 31 == 0))
2020 #if HOST_BITS_PER_WIDE_INT == 64
2021 else if (TARGET_POWERPC64)
2023 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
2024 HOST_WIDE_INT high = value >> 31;
2026 if (high == 0 || high == -1)
2032 return num_insns_constant_wide (high) + 1;
2034 return (num_insns_constant_wide (high)
2035 + num_insns_constant_wide (low) + 1);
2044 num_insns_constant (rtx op, enum machine_mode mode)
2046 HOST_WIDE_INT low, high;
2048 switch (GET_CODE (op))
2051 #if HOST_BITS_PER_WIDE_INT == 64
2052 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
2053 && mask64_operand (op, mode))
2057 return num_insns_constant_wide (INTVAL (op));
2065 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2066 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2067 return num_insns_constant_wide ((HOST_WIDE_INT) l);
2070 if (mode == VOIDmode || mode == DImode)
2072 high = CONST_DOUBLE_HIGH (op);
2073 low = CONST_DOUBLE_LOW (op);
2080 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
2081 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
2082 high = l[WORDS_BIG_ENDIAN == 0];
2083 low = l[WORDS_BIG_ENDIAN != 0];
2087 return (num_insns_constant_wide (low)
2088 + num_insns_constant_wide (high));
2091 if ((high == 0 && low >= 0)
2092 || (high == -1 && low < 0))
2093 return num_insns_constant_wide (low);
2095 else if (mask64_operand (op, mode))
2099 return num_insns_constant_wide (high) + 1;
2102 return (num_insns_constant_wide (high)
2103 + num_insns_constant_wide (low) + 1);
2112 /* Return true if OP can be synthesized with a particular vspltisb, vspltish
2113 or vspltisw instruction. OP is a CONST_VECTOR. Which instruction is used
2114 depends on STEP and COPIES, one of which will be 1. If COPIES > 1,
2115 all items are set to the same value and contain COPIES replicas of the
2116 vsplt's operand; if STEP > 1, one in STEP elements is set to the vsplt's
2117 operand and the others are set to the value of the operand's msb. */
2120 vspltis_constant (rtx op, unsigned step, unsigned copies)
2122 enum machine_mode mode = GET_MODE (op);
2123 enum machine_mode inner = GET_MODE_INNER (mode);
2126 unsigned nunits = GET_MODE_NUNITS (mode);
2127 unsigned bitsize = GET_MODE_BITSIZE (inner);
2128 unsigned mask = GET_MODE_MASK (inner);
2130 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2131 HOST_WIDE_INT val = INTVAL (last);
2132 HOST_WIDE_INT splat_val = val;
2133 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1;
2135 /* Construct the value to be splatted, if possible. If not, return 0. */
2136 for (i = 2; i <= copies; i *= 2)
2138 HOST_WIDE_INT small_val;
2140 small_val = splat_val >> bitsize;
2142 if (splat_val != ((small_val << bitsize) | (small_val & mask)))
2144 splat_val = small_val;
2147 /* Check if SPLAT_VAL can really be the operand of a vspltis[bhw]. */
2148 if (EASY_VECTOR_15 (splat_val))
2151 /* Also check if we can splat, and then add the result to itself. Do so if
2152 the value is positive, of if the splat instruction is using OP's mode;
2153 for splat_val < 0, the splat and the add should use the same mode. */
2154 else if (EASY_VECTOR_15_ADD_SELF (splat_val)
2155 && (splat_val >= 0 || (step == 1 && copies == 1)))
2161 /* Check if VAL is present in every STEP-th element, and the
2162 other elements are filled with its most significant bit. */
2163 for (i = 0; i < nunits - 1; ++i)
2165 HOST_WIDE_INT desired_val;
2166 if (((i + 1) & (step - 1)) == 0)
2169 desired_val = msb_val;
2171 if (desired_val != INTVAL (CONST_VECTOR_ELT (op, i)))
2179 /* Return true if OP is of the given MODE and can be synthesized
2180 with a vspltisb, vspltish or vspltisw. */
2183 easy_altivec_constant (rtx op, enum machine_mode mode)
2185 unsigned step, copies;
2187 if (mode == VOIDmode)
2188 mode = GET_MODE (op);
2189 else if (mode != GET_MODE (op))
2192 /* Start with a vspltisw. */
2193 step = GET_MODE_NUNITS (mode) / 4;
2196 if (vspltis_constant (op, step, copies))
2199 /* Then try with a vspltish. */
2205 if (vspltis_constant (op, step, copies))
2208 /* And finally a vspltisb. */
2214 if (vspltis_constant (op, step, copies))
2220 /* Generate a VEC_DUPLICATE representing a vspltis[bhw] instruction whose
2221 result is OP. Abort if it is not possible. */
2224 gen_easy_altivec_constant (rtx op)
2226 enum machine_mode mode = GET_MODE (op);
2227 int nunits = GET_MODE_NUNITS (mode);
2228 rtx last = CONST_VECTOR_ELT (op, nunits - 1);
2229 unsigned step = nunits / 4;
2230 unsigned copies = 1;
2232 /* Start with a vspltisw. */
2233 if (vspltis_constant (op, step, copies))
2234 return gen_rtx_VEC_DUPLICATE (V4SImode, gen_lowpart (SImode, last));
2236 /* Then try with a vspltish. */
2242 if (vspltis_constant (op, step, copies))
2243 return gen_rtx_VEC_DUPLICATE (V8HImode, gen_lowpart (HImode, last));
2245 /* And finally a vspltisb. */
2251 if (vspltis_constant (op, step, copies))
2252 return gen_rtx_VEC_DUPLICATE (V16QImode, gen_lowpart (QImode, last));
2258 output_vec_const_move (rtx *operands)
2261 enum machine_mode mode;
2266 mode = GET_MODE (dest);
2271 if (zero_constant (vec, mode))
2272 return "vxor %0,%0,%0";
2274 splat_vec = gen_easy_altivec_constant (vec);
2275 gcc_assert (GET_CODE (splat_vec) == VEC_DUPLICATE);
2276 operands[1] = XEXP (splat_vec, 0);
2277 if (!EASY_VECTOR_15 (INTVAL (operands[1])))
2280 switch (GET_MODE (splat_vec))
2283 return "vspltisw %0,%1";
2286 return "vspltish %0,%1";
2289 return "vspltisb %0,%1";
2296 gcc_assert (TARGET_SPE);
2298 /* Vector constant 0 is handled as a splitter of V2SI, and in the
2299 pattern of V1DI, V4HI, and V2SF.
2301 FIXME: We should probably return # and add post reload
2302 splitters for these, but this way is so easy ;-). */
2303 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
2304 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
2305 operands[1] = CONST_VECTOR_ELT (vec, 0);
2306 operands[2] = CONST_VECTOR_ELT (vec, 1);
2308 return "li %0,%1\n\tevmergelo %0,%0,%0";
2310 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
2313 /* Initialize vector TARGET to VALS. */
2316 rs6000_expand_vector_init (rtx target, rtx vals)
2318 enum machine_mode mode = GET_MODE (target);
2319 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2320 int n_elts = GET_MODE_NUNITS (mode);
2321 int n_var = 0, one_var = -1;
2322 bool all_same = true, all_const_zero = true;
2326 for (i = 0; i < n_elts; ++i)
2328 x = XVECEXP (vals, 0, i);
2329 if (!CONSTANT_P (x))
2330 ++n_var, one_var = i;
2331 else if (x != CONST0_RTX (inner_mode))
2332 all_const_zero = false;
2334 if (i > 0 && !rtx_equal_p (x, XVECEXP (vals, 0, 0)))
2340 if (mode != V4SFmode && all_const_zero)
2342 /* Zero register. */
2343 emit_insn (gen_rtx_SET (VOIDmode, target,
2344 gen_rtx_XOR (mode, target, target)));
2347 else if (mode != V4SFmode && easy_vector_constant (vals, mode))
2349 /* Splat immediate. */
2350 emit_insn (gen_rtx_SET (VOIDmode, target, vals));
2354 ; /* Splat vector element. */
2357 /* Load from constant pool. */
2358 emit_move_insn (target, gen_rtx_CONST_VECTOR (mode, XVEC (vals, 0)));
2363 /* Store value to stack temp. Load vector element. Splat. */
2366 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2367 emit_move_insn (adjust_address_nv (mem, inner_mode, 0),
2368 XVECEXP (vals, 0, 0));
2369 x = gen_rtx_UNSPEC (VOIDmode,
2370 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2371 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2373 gen_rtx_SET (VOIDmode,
2376 x = gen_rtx_VEC_SELECT (inner_mode, target,
2377 gen_rtx_PARALLEL (VOIDmode,
2378 gen_rtvec (1, const0_rtx)));
2379 emit_insn (gen_rtx_SET (VOIDmode, target,
2380 gen_rtx_VEC_DUPLICATE (mode, x)));
2384 /* One field is non-constant. Load constant then overwrite
2388 rtx copy = copy_rtx (vals);
2390 /* Load constant part of vector, substitute neighboring value for
2392 XVECEXP (copy, 0, one_var) = XVECEXP (vals, 0, (one_var + 1) % n_elts);
2393 rs6000_expand_vector_init (target, copy);
2395 /* Insert variable. */
2396 rs6000_expand_vector_set (target, XVECEXP (vals, 0, one_var), one_var);
2400 /* Construct the vector in memory one field at a time
2401 and load the whole vector. */
2402 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2403 for (i = 0; i < n_elts; i++)
2404 emit_move_insn (adjust_address_nv (mem, inner_mode,
2405 i * GET_MODE_SIZE (inner_mode)),
2406 XVECEXP (vals, 0, i));
2407 emit_move_insn (target, mem);
2410 /* Set field ELT of TARGET to VAL. */
2413 rs6000_expand_vector_set (rtx target, rtx val, int elt)
2415 enum machine_mode mode = GET_MODE (target);
2416 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2417 rtx reg = gen_reg_rtx (mode);
2419 int width = GET_MODE_SIZE (inner_mode);
2422 /* Load single variable value. */
2423 mem = assign_stack_temp (mode, GET_MODE_SIZE (inner_mode), 0);
2424 emit_move_insn (adjust_address_nv (mem, inner_mode, 0), val);
2425 x = gen_rtx_UNSPEC (VOIDmode,
2426 gen_rtvec (1, const0_rtx), UNSPEC_LVE);
2427 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2429 gen_rtx_SET (VOIDmode,
2433 /* Linear sequence. */
2434 mask = gen_rtx_PARALLEL (V16QImode, rtvec_alloc (16));
2435 for (i = 0; i < 16; ++i)
2436 XVECEXP (mask, 0, i) = GEN_INT (i);
2438 /* Set permute mask to insert element into target. */
2439 for (i = 0; i < width; ++i)
2440 XVECEXP (mask, 0, elt*width + i)
2441 = GEN_INT (i + 0x10);
2442 x = gen_rtx_CONST_VECTOR (V16QImode, XVEC (mask, 0));
2443 x = gen_rtx_UNSPEC (mode,
2444 gen_rtvec (3, target, reg,
2445 force_reg (V16QImode, x)),
2447 emit_insn (gen_rtx_SET (VOIDmode, target, x));
2450 /* Extract field ELT from VEC into TARGET. */
2453 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
2455 enum machine_mode mode = GET_MODE (vec);
2456 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2459 /* Allocate mode-sized buffer. */
2460 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2462 /* Add offset to field within buffer matching vector element. */
2463 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode));
2465 /* Store single field into mode-sized buffer. */
2466 x = gen_rtx_UNSPEC (VOIDmode,
2467 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
2468 emit_insn (gen_rtx_PARALLEL (VOIDmode,
2470 gen_rtx_SET (VOIDmode,
2473 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
2476 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2477 implement ANDing by the mask IN. */
2479 build_mask64_2_operands (rtx in, rtx *out)
2481 #if HOST_BITS_PER_WIDE_INT >= 64
2482 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2485 gcc_assert (GET_CODE (in) == CONST_INT);
2490 /* Assume c initially something like 0x00fff000000fffff. The idea
2491 is to rotate the word so that the middle ^^^^^^ group of zeros
2492 is at the MS end and can be cleared with an rldicl mask. We then
2493 rotate back and clear off the MS ^^ group of zeros with a
2495 c = ~c; /* c == 0xff000ffffff00000 */
2496 lsb = c & -c; /* lsb == 0x0000000000100000 */
2497 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2498 c = ~c; /* c == 0x00fff000000fffff */
2499 c &= -lsb; /* c == 0x00fff00000000000 */
2500 lsb = c & -c; /* lsb == 0x0000100000000000 */
2501 c = ~c; /* c == 0xff000fffffffffff */
2502 c &= -lsb; /* c == 0xff00000000000000 */
2504 while ((lsb >>= 1) != 0)
2505 shift++; /* shift == 44 on exit from loop */
2506 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2507 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2508 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2512 /* Assume c initially something like 0xff000f0000000000. The idea
2513 is to rotate the word so that the ^^^ middle group of zeros
2514 is at the LS end and can be cleared with an rldicr mask. We then
2515 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2517 lsb = c & -c; /* lsb == 0x0000010000000000 */
2518 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2519 c = ~c; /* c == 0x00fff0ffffffffff */
2520 c &= -lsb; /* c == 0x00fff00000000000 */
2521 lsb = c & -c; /* lsb == 0x0000100000000000 */
2522 c = ~c; /* c == 0xff000fffffffffff */
2523 c &= -lsb; /* c == 0xff00000000000000 */
2525 while ((lsb >>= 1) != 0)
2526 shift++; /* shift == 44 on exit from loop */
2527 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2528 m1 >>= shift; /* m1 == 0x0000000000000fff */
2529 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2532 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2533 masks will be all 1's. We are guaranteed more than one transition. */
2534 out[0] = GEN_INT (64 - shift);
2535 out[1] = GEN_INT (m1);
2536 out[2] = GEN_INT (shift);
2537 out[3] = GEN_INT (m2);
2545 /* Return TRUE if OP is an invalid SUBREG operation on the e500. */
2548 invalid_e500_subreg (rtx op, enum machine_mode mode)
2550 /* Reject (subreg:SI (reg:DF)). */
2551 if (GET_CODE (op) == SUBREG
2553 && REG_P (SUBREG_REG (op))
2554 && GET_MODE (SUBREG_REG (op)) == DFmode)
2557 /* Reject (subreg:DF (reg:DI)). */
2558 if (GET_CODE (op) == SUBREG
2560 && REG_P (SUBREG_REG (op))
2561 && GET_MODE (SUBREG_REG (op)) == DImode)
2567 /* Darwin, AIX increases natural record alignment to doubleword if the first
2568 field is an FP double while the FP fields remain word aligned. */
2571 rs6000_special_round_type_align (tree type, unsigned int computed,
2572 unsigned int specified)
2574 unsigned int align = MAX (computed, specified);
2575 tree field = TYPE_FIELDS (type);
2577 /* Skip all non field decls */
2578 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
2579 field = TREE_CHAIN (field);
2581 if (field != NULL && field != type)
2583 type = TREE_TYPE (field);
2584 while (TREE_CODE (type) == ARRAY_TYPE)
2585 type = TREE_TYPE (type);
2587 if (type != error_mark_node && TYPE_MODE (type) == DFmode)
2588 align = MAX (align, 64);
2594 /* Return 1 for an operand in small memory on V.4/eabi. */
2597 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2598 enum machine_mode mode ATTRIBUTE_UNUSED)
2603 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2606 if (DEFAULT_ABI != ABI_V4)
2609 if (GET_CODE (op) == SYMBOL_REF)
2612 else if (GET_CODE (op) != CONST
2613 || GET_CODE (XEXP (op, 0)) != PLUS
2614 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2615 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2620 rtx sum = XEXP (op, 0);
2621 HOST_WIDE_INT summand;
2623 /* We have to be careful here, because it is the referenced address
2624 that must be 32k from _SDA_BASE_, not just the symbol. */
2625 summand = INTVAL (XEXP (sum, 1));
2626 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2629 sym_ref = XEXP (sum, 0);
2632 return SYMBOL_REF_SMALL_P (sym_ref);
2638 /* Return true if either operand is a general purpose register. */
2641 gpr_or_gpr_p (rtx op0, rtx op1)
2643 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2644 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2648 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2651 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2653 switch (GET_CODE (op))
2656 if (RS6000_SYMBOL_REF_TLS_P (op))
2658 else if (CONSTANT_POOL_ADDRESS_P (op))
2660 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2668 else if (! strcmp (XSTR (op, 0), toc_label_name))
2677 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2678 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2680 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2689 constant_pool_expr_p (rtx op)
2693 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2697 toc_relative_expr_p (rtx op)
2701 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2705 legitimate_constant_pool_address_p (rtx x)
2708 && GET_CODE (x) == PLUS
2709 && GET_CODE (XEXP (x, 0)) == REG
2710 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2711 && constant_pool_expr_p (XEXP (x, 1)));
2715 legitimate_small_data_p (enum machine_mode mode, rtx x)
2717 return (DEFAULT_ABI == ABI_V4
2718 && !flag_pic && !TARGET_TOC
2719 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2720 && small_data_operand (x, mode));
2723 /* SPE offset addressing is limited to 5-bits worth of double words. */
2724 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2727 rs6000_legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2729 unsigned HOST_WIDE_INT offset, extra;
2731 if (GET_CODE (x) != PLUS)
2733 if (GET_CODE (XEXP (x, 0)) != REG)
2735 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2737 if (legitimate_constant_pool_address_p (x))
2739 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2742 offset = INTVAL (XEXP (x, 1));
2750 /* AltiVec vector modes. Only reg+reg addressing is valid and
2751 constant offset zero should not occur due to canonicalization.
2752 Allow any offset when not strict before reload. */
2759 /* SPE vector modes. */
2760 return SPE_CONST_OFFSET_OK (offset);
2763 if (TARGET_E500_DOUBLE)
2764 return SPE_CONST_OFFSET_OK (offset);
2767 /* On e500v2, we may have:
2769 (subreg:DF (mem:DI (plus (reg) (const_int))) 0).
2771 Which gets addressed with evldd instructions. */
2772 if (TARGET_E500_DOUBLE)
2773 return SPE_CONST_OFFSET_OK (offset);
2775 if (mode == DFmode || !TARGET_POWERPC64)
2777 else if (offset & 3)
2783 if (mode == TFmode || !TARGET_POWERPC64)
2785 else if (offset & 3)
2796 return (offset < 0x10000) && (offset + extra < 0x10000);
2800 legitimate_indexed_address_p (rtx x, int strict)
2804 if (GET_CODE (x) != PLUS)
2810 /* Recognize the rtl generated by reload which we know will later be
2811 replaced with proper base and index regs. */
2813 && reload_in_progress
2814 && (REG_P (op0) || GET_CODE (op0) == PLUS)
2818 return (REG_P (op0) && REG_P (op1)
2819 && ((INT_REG_OK_FOR_BASE_P (op0, strict)
2820 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2821 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2822 && INT_REG_OK_FOR_INDEX_P (op0, strict))));
2826 legitimate_indirect_address_p (rtx x, int strict)
2828 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2832 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2834 if (!TARGET_MACHO || !flag_pic
2835 || mode != SImode || GET_CODE (x) != MEM)
2839 if (GET_CODE (x) != LO_SUM)
2841 if (GET_CODE (XEXP (x, 0)) != REG)
2843 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2847 return CONSTANT_P (x);
2851 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2853 if (GET_CODE (x) != LO_SUM)
2855 if (GET_CODE (XEXP (x, 0)) != REG)
2857 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2859 /* Restrict addressing for DI because of our SUBREG hackery. */
2860 if (TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
2864 if (TARGET_ELF || TARGET_MACHO)
2866 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2870 if (GET_MODE_NUNITS (mode) != 1)
2872 if (GET_MODE_BITSIZE (mode) > 64
2873 || (GET_MODE_BITSIZE (mode) > 32 && !TARGET_POWERPC64
2874 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode)))
2877 return CONSTANT_P (x);
2884 /* Try machine-dependent ways of modifying an illegitimate address
2885 to be legitimate. If we find one, return the new, valid address.
2886 This is used from only one place: `memory_address' in explow.c.
2888 OLDX is the address as it was before break_out_memory_refs was
2889 called. In some cases it is useful to look at this to decide what
2892 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2894 It is always safe for this function to do nothing. It exists to
2895 recognize opportunities to optimize the output.
2897 On RS/6000, first check for the sum of a register with a constant
2898 integer that is out of range. If so, generate code to add the
2899 constant with the low-order 16 bits masked to the register and force
2900 this result into another register (this can be done with `cau').
2901 Then generate an address of REG+(CONST&0xffff), allowing for the
2902 possibility of bit 16 being a one.
2904 Then check for the sum of a register and something not constant, try to
2905 load the other things into a register and return the sum. */
2908 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2909 enum machine_mode mode)
2911 if (GET_CODE (x) == SYMBOL_REF)
2913 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2915 return rs6000_legitimize_tls_address (x, model);
2918 if (GET_CODE (x) == PLUS
2919 && GET_CODE (XEXP (x, 0)) == REG
2920 && GET_CODE (XEXP (x, 1)) == CONST_INT
2921 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2923 HOST_WIDE_INT high_int, low_int;
2925 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2926 high_int = INTVAL (XEXP (x, 1)) - low_int;
2927 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2928 GEN_INT (high_int)), 0);
2929 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2931 else if (GET_CODE (x) == PLUS
2932 && GET_CODE (XEXP (x, 0)) == REG
2933 && GET_CODE (XEXP (x, 1)) != CONST_INT
2934 && GET_MODE_NUNITS (mode) == 1
2935 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2937 || (((mode != DImode && mode != DFmode) || TARGET_E500_DOUBLE)
2939 && (TARGET_POWERPC64 || mode != DImode)
2942 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2943 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2945 else if (ALTIVEC_VECTOR_MODE (mode))
2949 /* Make sure both operands are registers. */
2950 if (GET_CODE (x) == PLUS)
2951 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2952 force_reg (Pmode, XEXP (x, 1)));
2954 reg = force_reg (Pmode, x);
2957 else if (SPE_VECTOR_MODE (mode)
2958 || (TARGET_E500_DOUBLE && (mode == DFmode
2959 || mode == DImode)))
2963 /* We accept [reg + reg] and [reg + OFFSET]. */
2965 if (GET_CODE (x) == PLUS)
2967 rtx op1 = XEXP (x, 0);
2968 rtx op2 = XEXP (x, 1);
2970 op1 = force_reg (Pmode, op1);
2972 if (GET_CODE (op2) != REG
2973 && (GET_CODE (op2) != CONST_INT
2974 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2975 op2 = force_reg (Pmode, op2);
2977 return gen_rtx_PLUS (Pmode, op1, op2);
2980 return force_reg (Pmode, x);
2986 && GET_CODE (x) != CONST_INT
2987 && GET_CODE (x) != CONST_DOUBLE
2989 && GET_MODE_NUNITS (mode) == 1
2990 && (GET_MODE_BITSIZE (mode) <= 32
2991 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2993 rtx reg = gen_reg_rtx (Pmode);
2994 emit_insn (gen_elf_high (reg, x));
2995 return gen_rtx_LO_SUM (Pmode, reg, x);
2997 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
3000 && ! MACHO_DYNAMIC_NO_PIC_P
3002 && GET_CODE (x) != CONST_INT
3003 && GET_CODE (x) != CONST_DOUBLE
3005 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
3009 rtx reg = gen_reg_rtx (Pmode);
3010 emit_insn (gen_macho_high (reg, x));
3011 return gen_rtx_LO_SUM (Pmode, reg, x);
3014 && constant_pool_expr_p (x)
3015 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
3017 return create_TOC_reference (x);
3023 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
3024 We need to emit DTP-relative relocations. */
3027 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
3032 fputs ("\t.long\t", file);
3035 fputs (DOUBLE_INT_ASM_OP, file);
3040 output_addr_const (file, x);
3041 fputs ("@dtprel+0x8000", file);
3044 /* Construct the SYMBOL_REF for the tls_get_addr function. */
3046 static GTY(()) rtx rs6000_tls_symbol;
3048 rs6000_tls_get_addr (void)
3050 if (!rs6000_tls_symbol)
3051 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
3053 return rs6000_tls_symbol;
3056 /* Construct the SYMBOL_REF for TLS GOT references. */
3058 static GTY(()) rtx rs6000_got_symbol;
3060 rs6000_got_sym (void)
3062 if (!rs6000_got_symbol)
3064 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
3065 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
3066 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
3069 return rs6000_got_symbol;
3072 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
3073 this (thread-local) address. */
3076 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
3080 dest = gen_reg_rtx (Pmode);
3081 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
3087 tlsreg = gen_rtx_REG (Pmode, 13);
3088 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
3092 tlsreg = gen_rtx_REG (Pmode, 2);
3093 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
3097 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
3101 tmp = gen_reg_rtx (Pmode);
3104 tlsreg = gen_rtx_REG (Pmode, 13);
3105 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
3109 tlsreg = gen_rtx_REG (Pmode, 2);
3110 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
3114 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
3116 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
3121 rtx r3, got, tga, tmp1, tmp2, eqv;
3123 /* We currently use relocations like @got@tlsgd for tls, which
3124 means the linker will handle allocation of tls entries, placing
3125 them in the .got section. So use a pointer to the .got section,
3126 not one to secondary TOC sections used by 64-bit -mminimal-toc,
3127 or to secondary GOT sections used by 32-bit -fPIC. */
3129 got = gen_rtx_REG (Pmode, 2);
3133 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
3136 rtx gsym = rs6000_got_sym ();
3137 got = gen_reg_rtx (Pmode);
3139 rs6000_emit_move (got, gsym, Pmode);
3142 rtx tempLR, tmp3, mem;
3145 tempLR = gen_reg_rtx (Pmode);
3146 tmp1 = gen_reg_rtx (Pmode);
3147 tmp2 = gen_reg_rtx (Pmode);
3148 tmp3 = gen_reg_rtx (Pmode);
3149 mem = gen_const_mem (Pmode, tmp1);
3151 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, gsym));
3152 emit_move_insn (tmp1, tempLR);
3153 emit_move_insn (tmp2, mem);
3154 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
3155 last = emit_move_insn (got, tmp3);
3156 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
3158 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
3160 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
3166 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
3168 r3 = gen_rtx_REG (Pmode, 3);
3170 insn = gen_tls_gd_64 (r3, got, addr);
3172 insn = gen_tls_gd_32 (r3, got, addr);
3175 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3176 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3177 insn = emit_call_insn (insn);
3178 CONST_OR_PURE_CALL_P (insn) = 1;
3179 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3180 insn = get_insns ();
3182 emit_libcall_block (insn, dest, r3, addr);
3184 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
3186 r3 = gen_rtx_REG (Pmode, 3);
3188 insn = gen_tls_ld_64 (r3, got);
3190 insn = gen_tls_ld_32 (r3, got);
3193 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
3194 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
3195 insn = emit_call_insn (insn);
3196 CONST_OR_PURE_CALL_P (insn) = 1;
3197 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
3198 insn = get_insns ();
3200 tmp1 = gen_reg_rtx (Pmode);
3201 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
3203 emit_libcall_block (insn, tmp1, r3, eqv);
3204 if (rs6000_tls_size == 16)
3207 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3209 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3211 else if (rs6000_tls_size == 32)
3213 tmp2 = gen_reg_rtx (Pmode);
3215 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3217 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3220 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3222 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3226 tmp2 = gen_reg_rtx (Pmode);
3228 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3230 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3232 insn = gen_rtx_SET (Pmode, dest,
3233 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3239 /* IE, or 64 bit offset LE. */
3240 tmp2 = gen_reg_rtx (Pmode);
3242 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3244 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3247 insn = gen_tls_tls_64 (dest, tmp2, addr);
3249 insn = gen_tls_tls_32 (dest, tmp2, addr);
3257 /* Return 1 if X contains a thread-local symbol. */
3260 rs6000_tls_referenced_p (rtx x)
3262 if (! TARGET_HAVE_TLS)
3265 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3268 /* Return 1 if *X is a thread-local symbol. This is the same as
3269 rs6000_tls_symbol_ref except for the type of the unused argument. */
3272 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3274 return RS6000_SYMBOL_REF_TLS_P (*x);
3277 /* The convention appears to be to define this wherever it is used.
3278 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3279 is now used here. */
3280 #ifndef REG_MODE_OK_FOR_BASE_P
3281 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3284 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3285 replace the input X, or the original X if no replacement is called for.
3286 The output parameter *WIN is 1 if the calling macro should goto WIN,
3289 For RS/6000, we wish to handle large displacements off a base
3290 register by splitting the addend across an addiu/addis and the mem insn.
3291 This cuts number of extra insns needed from 3 to 1.
3293 On Darwin, we use this to generate code for floating point constants.
3294 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3295 The Darwin code is inside #if TARGET_MACHO because only then is
3296 machopic_function_base_name() defined. */
3298 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3299 int opnum, int type,
3300 int ind_levels ATTRIBUTE_UNUSED, int *win)
3302 /* We must recognize output that we have already generated ourselves. */
3303 if (GET_CODE (x) == PLUS
3304 && GET_CODE (XEXP (x, 0)) == PLUS
3305 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3306 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3307 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3309 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3310 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3311 opnum, (enum reload_type)type);
3317 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3318 && GET_CODE (x) == LO_SUM
3319 && GET_CODE (XEXP (x, 0)) == PLUS
3320 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3321 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3322 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3323 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3324 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3325 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3326 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3328 /* Result of previous invocation of this function on Darwin
3329 floating point constant. */
3330 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3331 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3332 opnum, (enum reload_type)type);
3338 /* Force ld/std non-word aligned offset into base register by wrapping
3340 if (GET_CODE (x) == PLUS
3341 && GET_CODE (XEXP (x, 0)) == REG
3342 && REGNO (XEXP (x, 0)) < 32
3343 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3344 && GET_CODE (XEXP (x, 1)) == CONST_INT
3345 && (INTVAL (XEXP (x, 1)) & 3) != 0
3346 && !ALTIVEC_VECTOR_MODE (mode)
3347 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
3348 && TARGET_POWERPC64)
3350 x = gen_rtx_PLUS (GET_MODE (x), x, GEN_INT (0));
3351 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3352 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3353 opnum, (enum reload_type) type);
3358 if (GET_CODE (x) == PLUS
3359 && GET_CODE (XEXP (x, 0)) == REG
3360 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3361 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3362 && GET_CODE (XEXP (x, 1)) == CONST_INT
3363 && !SPE_VECTOR_MODE (mode)
3364 && !(TARGET_E500_DOUBLE && (mode == DFmode
3366 && !ALTIVEC_VECTOR_MODE (mode))
3368 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3369 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3371 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3373 /* Check for 32-bit overflow. */
3374 if (high + low != val)
3380 /* Reload the high part into a base reg; leave the low part
3381 in the mem directly. */
3383 x = gen_rtx_PLUS (GET_MODE (x),
3384 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3388 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3389 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3390 opnum, (enum reload_type)type);
3395 if (GET_CODE (x) == SYMBOL_REF
3396 && !ALTIVEC_VECTOR_MODE (mode)
3398 && DEFAULT_ABI == ABI_DARWIN
3399 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3401 && DEFAULT_ABI == ABI_V4
3404 /* Don't do this for TFmode, since the result isn't offsettable.
3405 The same goes for DImode without 64-bit gprs and DFmode
3408 && (mode != DImode || TARGET_POWERPC64)
3409 && (mode != DFmode || TARGET_POWERPC64
3410 || (TARGET_FPRS && TARGET_HARD_FLOAT)))
3415 rtx offset = gen_rtx_CONST (Pmode,
3416 gen_rtx_MINUS (Pmode, x,
3417 machopic_function_base_sym ()));
3418 x = gen_rtx_LO_SUM (GET_MODE (x),
3419 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3420 gen_rtx_HIGH (Pmode, offset)), offset);
3424 x = gen_rtx_LO_SUM (GET_MODE (x),
3425 gen_rtx_HIGH (Pmode, x), x);
3427 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3428 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3429 opnum, (enum reload_type)type);
3434 /* Reload an offset address wrapped by an AND that represents the
3435 masking of the lower bits. Strip the outer AND and let reload
3436 convert the offset address into an indirect address. */
3438 && ALTIVEC_VECTOR_MODE (mode)
3439 && GET_CODE (x) == AND
3440 && GET_CODE (XEXP (x, 0)) == PLUS
3441 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3442 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3443 && GET_CODE (XEXP (x, 1)) == CONST_INT
3444 && INTVAL (XEXP (x, 1)) == -16)
3452 && constant_pool_expr_p (x)
3453 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3455 (x) = create_TOC_reference (x);
3463 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3464 that is a valid memory address for an instruction.
3465 The MODE argument is the machine mode for the MEM expression
3466 that wants to use this address.
3468 On the RS/6000, there are four valid address: a SYMBOL_REF that
3469 refers to a constant pool entry of an address (or the sum of it
3470 plus a constant), a short (16-bit signed) constant plus a register,
3471 the sum of two registers, or a register indirect, possibly with an
3472 auto-increment. For DFmode and DImode with a constant plus register,
3473 we must ensure that both words are addressable or PowerPC64 with offset
3476 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3477 32-bit DImode, TImode, TFmode), indexed addressing cannot be used because
3478 adjacent memory cells are accessed by adding word-sized offsets
3479 during assembly output. */
3481 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3483 /* If this is an unaligned stvx/ldvx type address, discard the outer AND. */
3485 && ALTIVEC_VECTOR_MODE (mode)
3486 && GET_CODE (x) == AND
3487 && GET_CODE (XEXP (x, 1)) == CONST_INT
3488 && INTVAL (XEXP (x, 1)) == -16)
3491 if (RS6000_SYMBOL_REF_TLS_P (x))
3493 if (legitimate_indirect_address_p (x, reg_ok_strict))
3495 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3496 && !ALTIVEC_VECTOR_MODE (mode)
3497 && !SPE_VECTOR_MODE (mode)
3498 /* Restrict addressing for DI because of our SUBREG hackery. */
3499 && !(TARGET_E500_DOUBLE && (mode == DFmode || mode == DImode))
3501 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3503 if (legitimate_small_data_p (mode, x))
3505 if (legitimate_constant_pool_address_p (x))
3507 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3509 && GET_CODE (x) == PLUS
3510 && GET_CODE (XEXP (x, 0)) == REG
3511 && (XEXP (x, 0) == virtual_stack_vars_rtx
3512 || XEXP (x, 0) == arg_pointer_rtx)
3513 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3515 if (rs6000_legitimate_offset_address_p (mode, x, reg_ok_strict))
3519 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3521 || ((mode != DFmode || TARGET_E500_DOUBLE) && mode != TFmode))
3522 && (TARGET_POWERPC64 || mode != DImode)
3523 && legitimate_indexed_address_p (x, reg_ok_strict))
3525 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3530 /* Go to LABEL if ADDR (a legitimate address expression)
3531 has an effect that depends on the machine mode it is used for.
3533 On the RS/6000 this is true of all integral offsets (since AltiVec
3534 modes don't allow them) or is a pre-increment or decrement.
3536 ??? Except that due to conceptual problems in offsettable_address_p
3537 we can't really report the problems of integral offsets. So leave
3538 this assuming that the adjustable offset must be valid for the
3539 sub-words of a TFmode operand, which is what we had before. */
3542 rs6000_mode_dependent_address (rtx addr)
3544 switch (GET_CODE (addr))
3547 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3549 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3550 return val + 12 + 0x8000 >= 0x10000;
3559 return TARGET_UPDATE;
3568 /* More elaborate version of recog's offsettable_memref_p predicate
3569 that works around the ??? note of rs6000_mode_dependent_address.
3570 In particular it accepts
3572 (mem:DI (plus:SI (reg/f:SI 31 31) (const_int 32760 [0x7ff8])))
3574 in 32-bit mode, that the recog predicate rejects. */
3577 rs6000_offsettable_memref_p (rtx op)
3582 /* First mimic offsettable_memref_p. */
3583 if (offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)))
3586 /* offsettable_address_p invokes rs6000_mode_dependent_address, but
3587 the latter predicate knows nothing about the mode of the memory
3588 reference and, therefore, assumes that it is the largest supported
3589 mode (TFmode). As a consequence, legitimate offsettable memory
3590 references are rejected. rs6000_legitimate_offset_address_p contains
3591 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
3592 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
3595 /* Return number of consecutive hard regs needed starting at reg REGNO
3596 to hold something of mode MODE.
3597 This is ordinarily the length in words of a value of mode MODE
3598 but can be less for certain modes in special long registers.
3600 For the SPE, GPRs are 64 bits but only 32 bits are visible in
3601 scalar instructions. The upper 32 bits are only available to the
3604 POWER and PowerPC GPRs hold 32 bits worth;
3605 PowerPC64 GPRs and FPRs point register holds 64 bits worth. */
3608 rs6000_hard_regno_nregs (int regno, enum machine_mode mode)
3610 if (FP_REGNO_P (regno))
3611 return (GET_MODE_SIZE (mode) + UNITS_PER_FP_WORD - 1) / UNITS_PER_FP_WORD;
3613 if (TARGET_E500_DOUBLE && mode == DFmode)
3616 if (SPE_SIMD_REGNO_P (regno) && TARGET_SPE && SPE_VECTOR_MODE (mode))
3617 return (GET_MODE_SIZE (mode) + UNITS_PER_SPE_WORD - 1) / UNITS_PER_SPE_WORD;
3619 if (ALTIVEC_REGNO_P (regno))
3621 (GET_MODE_SIZE (mode) + UNITS_PER_ALTIVEC_WORD - 1) / UNITS_PER_ALTIVEC_WORD;
3623 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3626 /* Change register usage conditional on target flags. */
3628 rs6000_conditional_register_usage (void)
3632 /* Set MQ register fixed (already call_used) if not POWER
3633 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
3638 /* 64-bit AIX and Linux reserve GPR13 for thread-private data. */
3640 fixed_regs[13] = call_used_regs[13]
3641 = call_really_used_regs[13] = 1;
3643 /* Conditionally disable FPRs. */
3644 if (TARGET_SOFT_FLOAT || !TARGET_FPRS)
3645 for (i = 32; i < 64; i++)
3646 fixed_regs[i] = call_used_regs[i]
3647 = call_really_used_regs[i] = 1;
3649 /* The TOC register is not killed across calls in a way that is
3650 visible to the compiler. */
3651 if (DEFAULT_ABI == ABI_AIX)
3652 call_really_used_regs[2] = 0;
3654 if (DEFAULT_ABI == ABI_V4
3655 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3657 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3659 if (DEFAULT_ABI == ABI_V4
3660 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3662 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3663 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3664 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3666 if (DEFAULT_ABI == ABI_DARWIN
3667 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
3668 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3669 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3670 = call_really_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3672 if (TARGET_TOC && TARGET_MINIMAL_TOC)
3673 fixed_regs[RS6000_PIC_OFFSET_TABLE_REGNUM]
3674 = call_used_regs[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
3677 global_regs[VSCR_REGNO] = 1;
3681 global_regs[SPEFSCR_REGNO] = 1;
3682 fixed_regs[FIXED_SCRATCH]
3683 = call_used_regs[FIXED_SCRATCH]
3684 = call_really_used_regs[FIXED_SCRATCH] = 1;
3687 if (! TARGET_ALTIVEC)
3689 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
3690 fixed_regs[i] = call_used_regs[i] = call_really_used_regs[i] = 1;
3691 call_really_used_regs[VRSAVE_REGNO] = 1;
3694 if (TARGET_ALTIVEC_ABI)
3695 for (i = FIRST_ALTIVEC_REGNO; i < FIRST_ALTIVEC_REGNO + 20; ++i)
3696 call_used_regs[i] = call_really_used_regs[i] = 1;
3699 /* Try to output insns to set TARGET equal to the constant C if it can
3700 be done in less than N insns. Do all computations in MODE.
3701 Returns the place where the output has been placed if it can be
3702 done and the insns have been emitted. If it would take more than N
3703 insns, zero is returned and no insns and emitted. */
3706 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3707 rtx source, int n ATTRIBUTE_UNUSED)
3709 rtx result, insn, set;
3710 HOST_WIDE_INT c0, c1;
3717 dest = gen_reg_rtx (mode);
3718 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3722 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3724 emit_insn (gen_rtx_SET (VOIDmode, result,
3725 GEN_INT (INTVAL (source)
3726 & (~ (HOST_WIDE_INT) 0xffff))));
3727 emit_insn (gen_rtx_SET (VOIDmode, dest,
3728 gen_rtx_IOR (SImode, result,
3729 GEN_INT (INTVAL (source) & 0xffff))));
3734 switch (GET_CODE (source))
3737 c0 = INTVAL (source);
3742 #if HOST_BITS_PER_WIDE_INT >= 64
3743 c0 = CONST_DOUBLE_LOW (source);
3746 c0 = CONST_DOUBLE_LOW (source);
3747 c1 = CONST_DOUBLE_HIGH (source);
3755 result = rs6000_emit_set_long_const (dest, c0, c1);
3762 insn = get_last_insn ();
3763 set = single_set (insn);
3764 if (! CONSTANT_P (SET_SRC (set)))
3765 set_unique_reg_note (insn, REG_EQUAL, source);
3770 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3771 fall back to a straight forward decomposition. We do this to avoid
3772 exponential run times encountered when looking for longer sequences
3773 with rs6000_emit_set_const. */
3775 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3777 if (!TARGET_POWERPC64)
3779 rtx operand1, operand2;
3781 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3783 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3785 emit_move_insn (operand1, GEN_INT (c1));
3786 emit_move_insn (operand2, GEN_INT (c2));
3790 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3793 ud2 = (c1 & 0xffff0000) >> 16;
3794 #if HOST_BITS_PER_WIDE_INT >= 64
3798 ud4 = (c2 & 0xffff0000) >> 16;
3800 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3801 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3804 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3806 emit_move_insn (dest, GEN_INT (ud1));
3809 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3810 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3813 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3816 emit_move_insn (dest, GEN_INT (ud2 << 16));
3818 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3820 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3821 || (ud4 == 0 && ! (ud3 & 0x8000)))
3824 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3827 emit_move_insn (dest, GEN_INT (ud3 << 16));
3830 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3831 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3833 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3838 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3841 emit_move_insn (dest, GEN_INT (ud4 << 16));
3844 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3846 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3848 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3849 GEN_INT (ud2 << 16)));
3851 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3857 /* Helper for the following. Get rid of [r+r] memory refs
3858 in cases where it won't work (TImode, TFmode). */
3861 rs6000_eliminate_indexed_memrefs (rtx operands[2])
3863 if (GET_CODE (operands[0]) == MEM
3864 && GET_CODE (XEXP (operands[0], 0)) != REG
3865 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0))
3866 && ! reload_in_progress)
3868 = replace_equiv_address (operands[0],
3869 copy_addr_to_reg (XEXP (operands[0], 0)));
3871 if (GET_CODE (operands[1]) == MEM
3872 && GET_CODE (XEXP (operands[1], 0)) != REG
3873 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0))
3874 && ! reload_in_progress)
3876 = replace_equiv_address (operands[1],
3877 copy_addr_to_reg (XEXP (operands[1], 0)));
3880 /* Emit a move from SOURCE to DEST in mode MODE. */
3882 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3886 operands[1] = source;
3888 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3889 if (GET_CODE (operands[1]) == CONST_DOUBLE
3890 && ! FLOAT_MODE_P (mode)
3891 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3893 /* FIXME. This should never happen. */
3894 /* Since it seems that it does, do the safe thing and convert
3896 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3898 gcc_assert (GET_CODE (operands[1]) != CONST_DOUBLE
3899 || FLOAT_MODE_P (mode)
3900 || ((CONST_DOUBLE_HIGH (operands[1]) != 0
3901 || CONST_DOUBLE_LOW (operands[1]) < 0)
3902 && (CONST_DOUBLE_HIGH (operands[1]) != -1
3903 || CONST_DOUBLE_LOW (operands[1]) >= 0)));
3905 /* Check if GCC is setting up a block move that will end up using FP
3906 registers as temporaries. We must make sure this is acceptable. */
3907 if (GET_CODE (operands[0]) == MEM
3908 && GET_CODE (operands[1]) == MEM
3910 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3911 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3912 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3913 ? 32 : MEM_ALIGN (operands[0])))
3914 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3916 : MEM_ALIGN (operands[1]))))
3917 && ! MEM_VOLATILE_P (operands [0])
3918 && ! MEM_VOLATILE_P (operands [1]))
3920 emit_move_insn (adjust_address (operands[0], SImode, 0),
3921 adjust_address (operands[1], SImode, 0));
3922 emit_move_insn (adjust_address (operands[0], SImode, 4),
3923 adjust_address (operands[1], SImode, 4));
3927 if (!no_new_pseudos && GET_CODE (operands[0]) == MEM
3928 && !gpc_reg_operand (operands[1], mode))
3929 operands[1] = force_reg (mode, operands[1]);
3931 if (mode == SFmode && ! TARGET_POWERPC
3932 && TARGET_HARD_FLOAT && TARGET_FPRS
3933 && GET_CODE (operands[0]) == MEM)
3937 if (reload_in_progress || reload_completed)
3938 regnum = true_regnum (operands[1]);
3939 else if (GET_CODE (operands[1]) == REG)
3940 regnum = REGNO (operands[1]);
3944 /* If operands[1] is a register, on POWER it may have
3945 double-precision data in it, so truncate it to single
3947 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3950 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3951 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3952 operands[1] = newreg;
3956 /* Recognize the case where operand[1] is a reference to thread-local
3957 data and load its address to a register. */
3958 if (rs6000_tls_referenced_p (operands[1]))
3960 enum tls_model model;
3961 rtx tmp = operands[1];
3964 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
3966 addend = XEXP (XEXP (tmp, 0), 1);
3967 tmp = XEXP (XEXP (tmp, 0), 0);
3970 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
3971 model = SYMBOL_REF_TLS_MODEL (tmp);
3972 gcc_assert (model != 0);
3974 tmp = rs6000_legitimize_tls_address (tmp, model);
3977 tmp = gen_rtx_PLUS (mode, tmp, addend);
3978 tmp = force_operand (tmp, operands[0]);
3983 /* Handle the case where reload calls us with an invalid address. */
3984 if (reload_in_progress && mode == Pmode
3985 && (! general_operand (operands[1], mode)
3986 || ! nonimmediate_operand (operands[0], mode)))
3989 /* 128-bit constant floating-point values on Darwin should really be
3990 loaded as two parts. */
3991 if (!TARGET_IEEEQUAD
3992 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3993 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3995 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3996 know how to get a DFmode SUBREG of a TFmode. */
3997 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3998 simplify_gen_subreg (DImode, operands[1], mode, 0),
4000 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
4001 GET_MODE_SIZE (DImode)),
4002 simplify_gen_subreg (DImode, operands[1], mode,