1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "insn-flags.h"
30 #include "insn-codes.h"
32 #include "insn-config.h"
35 #include "typeclass.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
124 int explicit_inc_from;
130 /* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
134 extern int local_vars_size;
135 extern int stack_depth;
136 extern int max_stack_depth;
137 extern struct obstack permanent_obstack;
140 static rtx enqueue_insn PROTO((rtx, rtx));
141 static int queued_subexp_p PROTO((rtx));
142 static void init_queue PROTO((void));
143 static void move_by_pieces PROTO((rtx, rtx, int, int));
144 static int move_by_pieces_ninsns PROTO((unsigned int, int));
145 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
147 static void store_constructor PROTO((tree, rtx));
148 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
150 static int get_inner_unaligned_p PROTO((tree));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree));
154 static int fixed_type_p PROTO((tree));
155 static int get_pointer_alignment PROTO((tree, unsigned));
156 static tree string_constant PROTO((tree, tree *));
157 static tree c_strlen PROTO((tree));
158 static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
160 static int apply_args_size PROTO((void));
161 static int apply_result_size PROTO((void));
162 static rtx result_vector PROTO((int, rtx));
163 static rtx expand_builtin_apply_args PROTO((void));
164 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165 static void expand_builtin_return PROTO((rtx));
166 static rtx expand_increment PROTO((tree, int));
167 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168 tree bc_runtime_type_code PROTO((tree));
169 rtx bc_allocate_local PROTO((int, int));
170 void bc_store_memory PROTO((tree, tree));
171 tree bc_expand_component_address PROTO((tree));
172 tree bc_expand_address PROTO((tree));
173 void bc_expand_constructor PROTO((tree));
174 void bc_adjust_stack PROTO((int));
175 tree bc_canonicalize_array_ref PROTO((tree));
176 void bc_load_memory PROTO((tree, tree));
177 void bc_load_externaddr PROTO((rtx));
178 void bc_load_externaddr_id PROTO((tree, int));
179 void bc_load_localaddr PROTO((rtx));
180 void bc_load_parmaddr PROTO((rtx));
181 static void preexpand_calls PROTO((tree));
182 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
183 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
184 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
189 static tree defer_cleanups_to PROTO((tree));
190 extern void (*interim_eh_hook) PROTO((tree));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* MOVE_RATIO is the number of move instructions that is better than
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 /* A value of around 6 would minimize code size; infinity would minimize
208 #define MOVE_RATIO 15
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
221 /* Register mappings for target machines without register windows. */
222 #ifndef INCOMING_REGNO
223 #define INCOMING_REGNO(OUT) (OUT)
225 #ifndef OUTGOING_REGNO
226 #define OUTGOING_REGNO(IN) (IN)
229 /* Maps used to convert modes to const, load, and store bytecodes. */
230 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
231 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
232 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
234 /* Initialize maps used to convert modes to const, load, and store
237 bc_init_mode_to_opcode_maps ()
241 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
242 mode_to_const_map[mode] =
243 mode_to_load_map[mode] =
244 mode_to_store_map[mode] = neverneverland;
246 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
247 mode_to_const_map[(int) SYM] = CONST; \
248 mode_to_load_map[(int) SYM] = LOAD; \
249 mode_to_store_map[(int) SYM] = STORE;
251 #include "modemap.def"
255 /* This is run once per compilation to set up which modes can be used
256 directly in memory and to initialize the block move optab. */
262 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
267 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
270 insn = emit_insn (gen_rtx (SET, 0, 0));
271 pat = PATTERN (insn);
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
280 direct_load[(int) mode] = direct_store[(int) mode] = 0;
281 PUT_MODE (mem, mode);
282 PUT_MODE (mem1, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 reg = gen_rtx (REG, mode, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
322 /* This is run at the start of compiling a function. */
329 pending_stack_adjust = 0;
330 inhibit_defer_pop = 0;
331 cleanups_this_call = 0;
333 apply_args_value = 0;
337 /* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
344 /* Instead of saving the postincrement queue, empty it. */
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
349 p->cleanups_this_call = cleanups_this_call;
350 p->saveregs_value = saveregs_value;
351 p->apply_args_value = apply_args_value;
352 p->forced_labels = forced_labels;
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
356 cleanups_this_call = 0;
358 apply_args_value = 0;
362 /* Restore all variables describing the current status from the structure *P.
363 This is used after a nested function. */
366 restore_expr_status (p)
369 pending_stack_adjust = p->pending_stack_adjust;
370 inhibit_defer_pop = p->inhibit_defer_pop;
371 cleanups_this_call = p->cleanups_this_call;
372 saveregs_value = p->saveregs_value;
373 apply_args_value = p->apply_args_value;
374 forced_labels = p->forced_labels;
377 /* Manage the queue of increment instructions to be output
378 for POSTINCREMENT_EXPR expressions, etc. */
380 static rtx pending_chain;
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
390 enqueue_insn (var, body)
393 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body, pending_chain);
395 return pending_chain;
398 /* Use protect_from_queue to convert a QUEUED expression
399 into something that you can put immediately into an instruction.
400 If the queued incrementation has not happened yet,
401 protect_from_queue returns the variable itself.
402 If the incrementation has happened, protect_from_queue returns a temp
403 that contains a copy of the old value of the variable.
405 Any time an rtx which might possibly be a QUEUED is to be put
406 into an instruction, it must be passed through protect_from_queue first.
407 QUEUED expressions are not meaningful in instructions.
409 Do not pass a value through protect_from_queue and then hold
410 on to it for a while before putting it in an instruction!
411 If the queue is flushed in between, incorrect code will result. */
414 protect_from_queue (x, modify)
418 register RTX_CODE code = GET_CODE (x);
420 #if 0 /* A QUEUED can hang around after the queue is forced out. */
421 /* Shortcut for most common case. */
422 if (pending_chain == 0)
428 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429 use of autoincrement. Make a copy of the contents of the memory
430 location rather than a copy of the address, but not if the value is
431 of mode BLKmode. Don't modify X in place since it might be
433 if (code == MEM && GET_MODE (x) != BLKmode
434 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 register rtx y = XEXP (x, 0);
437 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
463 else if (code == PLUS || code == MULT)
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 return QUEUED_COPY (x);
491 /* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
500 register enum rtx_code code = GET_CODE (x);
506 return queued_subexp_p (XEXP (x, 0));
510 return queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1));
516 /* Perform all the pending incrementations. */
522 while (p = pending_chain)
524 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
525 pending_chain = QUEUED_NEXT (p);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
542 convert_move (to, from, unsignedp)
543 register rtx to, from;
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
586 #ifdef HAVE_extendqfhf2
587 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
589 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
593 #ifdef HAVE_extendqfsf2
594 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
596 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
600 #ifdef HAVE_extendqfdf2
601 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
603 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
607 #ifdef HAVE_extendqfxf2
608 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
610 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
614 #ifdef HAVE_extendqftf2
615 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
617 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
622 #ifdef HAVE_extendhftqf2
623 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
625 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
630 #ifdef HAVE_extendhfsf2
631 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
633 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
637 #ifdef HAVE_extendhfdf2
638 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
640 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
644 #ifdef HAVE_extendhfxf2
645 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
647 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
651 #ifdef HAVE_extendhftf2
652 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
654 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
659 #ifdef HAVE_extendsfdf2
660 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
662 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
666 #ifdef HAVE_extendsfxf2
667 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
669 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
673 #ifdef HAVE_extendsftf2
674 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
676 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
680 #ifdef HAVE_extenddfxf2
681 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
683 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
687 #ifdef HAVE_extenddftf2
688 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
690 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunchfqf2
696 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
698 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncsfqf2
703 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
705 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncdfqf2
710 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
712 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
716 #ifdef HAVE_truncxfqf2
717 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
719 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
723 #ifdef HAVE_trunctfqf2
724 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
726 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
731 #ifdef HAVE_trunctqfhf2
732 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
734 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
738 #ifdef HAVE_truncsfhf2
739 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
741 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
745 #ifdef HAVE_truncdfhf2
746 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
748 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
752 #ifdef HAVE_truncxfhf2
753 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
755 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
759 #ifdef HAVE_trunctfhf2
760 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
762 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
766 #ifdef HAVE_truncdfsf2
767 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
769 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
773 #ifdef HAVE_truncxfsf2
774 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
776 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
780 #ifdef HAVE_trunctfsf2
781 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
783 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
787 #ifdef HAVE_truncxfdf2
788 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
790 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
794 #ifdef HAVE_trunctfdf2
795 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
797 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
809 libcall = extendsfdf2_libfunc;
813 libcall = extendsfxf2_libfunc;
817 libcall = extendsftf2_libfunc;
826 libcall = truncdfsf2_libfunc;
830 libcall = extenddfxf2_libfunc;
834 libcall = extenddftf2_libfunc;
843 libcall = truncxfsf2_libfunc;
847 libcall = truncxfdf2_libfunc;
856 libcall = trunctfsf2_libfunc;
860 libcall = trunctfdf2_libfunc;
866 if (libcall == (rtx) 0)
867 /* This conversion is not implemented yet. */
870 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
872 emit_move_insn (to, value);
876 /* Now both modes are integers. */
878 /* Handle expanding beyond a word. */
879 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
880 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
887 enum machine_mode lowpart_mode;
888 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
890 /* Try converting directly if the insn is supported. */
891 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
894 /* If FROM is a SUBREG, put it into a register. Do this
895 so that we always generate the same set of insns for
896 better cse'ing; if an intermediate assignment occurred,
897 we won't be doing the operation directly on the SUBREG. */
898 if (optimize > 0 && GET_CODE (from) == SUBREG)
899 from = force_reg (from_mode, from);
900 emit_unop_insn (code, to, from, equiv_code);
903 /* Next, try converting via full word. */
904 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
905 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
906 != CODE_FOR_nothing))
908 if (GET_CODE (to) == REG)
909 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
910 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
911 emit_unop_insn (code, to,
912 gen_lowpart (word_mode, to), equiv_code);
916 /* No special multiword conversion insn; do it by hand. */
919 /* Since we will turn this into a no conflict block, we must ensure
920 that the source does not overlap the target. */
922 if (reg_overlap_mentioned_p (to, from))
923 from = force_reg (from_mode, from);
925 /* Get a copy of FROM widened to a word, if necessary. */
926 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
927 lowpart_mode = word_mode;
929 lowpart_mode = from_mode;
931 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
933 lowpart = gen_lowpart (lowpart_mode, to);
934 emit_move_insn (lowpart, lowfrom);
936 /* Compute the value to put in each remaining word. */
938 fill_value = const0_rtx;
943 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
944 && STORE_FLAG_VALUE == -1)
946 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
948 fill_value = gen_reg_rtx (word_mode);
949 emit_insn (gen_slt (fill_value));
955 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
956 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
958 fill_value = convert_to_mode (word_mode, fill_value, 1);
962 /* Fill the remaining words. */
963 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
965 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
966 rtx subword = operand_subword (to, index, 1, to_mode);
971 if (fill_value != subword)
972 emit_move_insn (subword, fill_value);
975 insns = get_insns ();
978 emit_no_conflict_block (insns, to, from, NULL_RTX,
979 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
983 /* Truncating multi-word to a word or less. */
984 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
985 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
987 if (!((GET_CODE (from) == MEM
988 && ! MEM_VOLATILE_P (from)
989 && direct_load[(int) to_mode]
990 && ! mode_dependent_address_p (XEXP (from, 0)))
991 || GET_CODE (from) == REG
992 || GET_CODE (from) == SUBREG))
993 from = force_reg (from_mode, from);
994 convert_move (to, gen_lowpart (word_mode, from), 0);
998 /* Handle pointer conversion */ /* SPEE 900220 */
999 if (to_mode == PSImode)
1001 if (from_mode != SImode)
1002 from = convert_to_mode (SImode, from, unsignedp);
1004 #ifdef HAVE_truncsipsi2
1005 if (HAVE_truncsipsi2)
1007 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1010 #endif /* HAVE_truncsipsi2 */
1014 if (from_mode == PSImode)
1016 if (to_mode != SImode)
1018 from = convert_to_mode (SImode, from, unsignedp);
1023 #ifdef HAVE_extendpsisi2
1024 if (HAVE_extendpsisi2)
1026 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1029 #endif /* HAVE_extendpsisi2 */
1034 if (to_mode == PDImode)
1036 if (from_mode != DImode)
1037 from = convert_to_mode (DImode, from, unsignedp);
1039 #ifdef HAVE_truncdipdi2
1040 if (HAVE_truncdipdi2)
1042 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1045 #endif /* HAVE_truncdipdi2 */
1049 if (from_mode == PDImode)
1051 if (to_mode != DImode)
1053 from = convert_to_mode (DImode, from, unsignedp);
1058 #ifdef HAVE_extendpdidi2
1059 if (HAVE_extendpdidi2)
1061 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1064 #endif /* HAVE_extendpdidi2 */
1069 /* Now follow all the conversions between integers
1070 no more than a word long. */
1072 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1073 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1074 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1075 GET_MODE_BITSIZE (from_mode)))
1077 if (!((GET_CODE (from) == MEM
1078 && ! MEM_VOLATILE_P (from)
1079 && direct_load[(int) to_mode]
1080 && ! mode_dependent_address_p (XEXP (from, 0)))
1081 || GET_CODE (from) == REG
1082 || GET_CODE (from) == SUBREG))
1083 from = force_reg (from_mode, from);
1084 emit_move_insn (to, gen_lowpart (to_mode, from));
1088 /* Handle extension. */
1089 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1091 /* Convert directly if that works. */
1092 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1093 != CODE_FOR_nothing)
1095 emit_unop_insn (code, to, from, equiv_code);
1100 enum machine_mode intermediate;
1102 /* Search for a mode to convert via. */
1103 for (intermediate = from_mode; intermediate != VOIDmode;
1104 intermediate = GET_MODE_WIDER_MODE (intermediate))
1105 if (((can_extend_p (to_mode, intermediate, unsignedp)
1106 != CODE_FOR_nothing)
1107 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1108 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1109 && (can_extend_p (intermediate, from_mode, unsignedp)
1110 != CODE_FOR_nothing))
1112 convert_move (to, convert_to_mode (intermediate, from,
1113 unsignedp), unsignedp);
1117 /* No suitable intermediate mode. */
1122 /* Support special truncate insns for certain modes. */
1124 if (from_mode == DImode && to_mode == SImode)
1126 #ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1137 if (from_mode == DImode && to_mode == HImode)
1139 #ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1150 if (from_mode == DImode && to_mode == QImode)
1152 #ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1163 if (from_mode == SImode && to_mode == HImode)
1165 #ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1176 if (from_mode == SImode && to_mode == QImode)
1178 #ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1189 if (from_mode == HImode && to_mode == QImode)
1191 #ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1202 if (from_mode == TImode && to_mode == DImode)
1204 #ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1215 if (from_mode == TImode && to_mode == SImode)
1217 #ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1228 if (from_mode == TImode && to_mode == HImode)
1230 #ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1241 if (from_mode == TImode && to_mode == QImode)
1243 #ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1264 /* Mode combination is not recognized. */
1268 /* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
1273 or by copying to a new temporary with conversion.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1287 /* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1301 convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1319 if (mode == oldmode)
1322 /* There is one case that we must handle specially: If we are converting
1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1331 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1333 /* We can do this with a gen_lowpart if both desired and current modes
1334 are integer, and this is either a constant integer, a register, or a
1335 non-volatile MEM. Except for the constant case where MODE is no
1336 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1338 if ((GET_CODE (x) == CONST_INT
1339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1340 || (GET_MODE_CLASS (mode) == MODE_INT
1341 && GET_MODE_CLASS (oldmode) == MODE_INT
1342 && (GET_CODE (x) == CONST_DOUBLE
1343 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1344 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1345 && direct_load[(int) mode])
1346 || (GET_CODE (x) == REG
1347 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1348 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1350 /* ?? If we don't know OLDMODE, we have to assume here that
1351 X does not need sign- or zero-extension. This may not be
1352 the case, but it's the best we can do. */
1353 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1354 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1356 HOST_WIDE_INT val = INTVAL (x);
1357 int width = GET_MODE_BITSIZE (oldmode);
1359 /* We must sign or zero-extend in this case. Start by
1360 zero-extending, then sign extend if we need to. */
1361 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1363 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1364 val |= (HOST_WIDE_INT) (-1) << width;
1366 return GEN_INT (val);
1369 return gen_lowpart (mode, x);
1372 temp = gen_reg_rtx (mode);
1373 convert_move (temp, x, unsignedp);
1377 /* Generate several move instructions to copy LEN bytes
1378 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1379 The caller must pass FROM and TO
1380 through protect_from_queue before calling.
1381 ALIGN (in bytes) is maximum alignment we can assume. */
1384 move_by_pieces (to, from, len, align)
1388 struct move_by_pieces data;
1389 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1390 int max_size = MOVE_MAX + 1;
1393 data.to_addr = to_addr;
1394 data.from_addr = from_addr;
1398 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1399 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1401 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1402 || GET_CODE (from_addr) == POST_INC
1403 || GET_CODE (from_addr) == POST_DEC);
1405 data.explicit_inc_from = 0;
1406 data.explicit_inc_to = 0;
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 if (data.reverse) data.offset = len;
1412 /* If copying requires more than two move insns,
1413 copy addresses to registers (to make displacements shorter)
1414 and use post-increment if available. */
1415 if (!(data.autinc_from && data.autinc_to)
1416 && move_by_pieces_ninsns (len, align) > 2)
1418 #ifdef HAVE_PRE_DECREMENT
1419 if (data.reverse && ! data.autinc_from)
1421 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1422 data.autinc_from = 1;
1423 data.explicit_inc_from = -1;
1426 #ifdef HAVE_POST_INCREMENT
1427 if (! data.autinc_from)
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 data.autinc_from = 1;
1431 data.explicit_inc_from = 1;
1434 if (!data.autinc_from && CONSTANT_P (from_addr))
1435 data.from_addr = copy_addr_to_reg (from_addr);
1436 #ifdef HAVE_PRE_DECREMENT
1437 if (data.reverse && ! data.autinc_to)
1439 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1441 data.explicit_inc_to = -1;
1444 #ifdef HAVE_POST_INCREMENT
1445 if (! data.reverse && ! data.autinc_to)
1447 data.to_addr = copy_addr_to_reg (to_addr);
1449 data.explicit_inc_to = 1;
1452 if (!data.autinc_to && CONSTANT_P (to_addr))
1453 data.to_addr = copy_addr_to_reg (to_addr);
1456 if (! SLOW_UNALIGNED_ACCESS
1457 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1460 /* First move what we can in the largest integer mode, then go to
1461 successively smaller modes. */
1463 while (max_size > 1)
1465 enum machine_mode mode = VOIDmode, tmode;
1466 enum insn_code icode;
1468 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1469 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1470 if (GET_MODE_SIZE (tmode) < max_size)
1473 if (mode == VOIDmode)
1476 icode = mov_optab->handlers[(int) mode].insn_code;
1477 if (icode != CODE_FOR_nothing
1478 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1479 GET_MODE_SIZE (mode)))
1480 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1482 max_size = GET_MODE_SIZE (mode);
1485 /* The code above should have handled everything. */
1490 /* Return number of insns required to move L bytes by pieces.
1491 ALIGN (in bytes) is maximum alignment we can assume. */
1494 move_by_pieces_ninsns (l, align)
1498 register int n_insns = 0;
1499 int max_size = MOVE_MAX + 1;
1501 if (! SLOW_UNALIGNED_ACCESS
1502 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1505 while (max_size > 1)
1507 enum machine_mode mode = VOIDmode, tmode;
1508 enum insn_code icode;
1510 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1511 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1512 if (GET_MODE_SIZE (tmode) < max_size)
1515 if (mode == VOIDmode)
1518 icode = mov_optab->handlers[(int) mode].insn_code;
1519 if (icode != CODE_FOR_nothing
1520 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1521 GET_MODE_SIZE (mode)))
1522 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1524 max_size = GET_MODE_SIZE (mode);
1530 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1531 with move instructions for mode MODE. GENFUN is the gen_... function
1532 to make a move insn for that mode. DATA has all the other info. */
1535 move_by_pieces_1 (genfun, mode, data)
1537 enum machine_mode mode;
1538 struct move_by_pieces *data;
1540 register int size = GET_MODE_SIZE (mode);
1541 register rtx to1, from1;
1543 while (data->len >= size)
1545 if (data->reverse) data->offset -= size;
1547 to1 = (data->autinc_to
1548 ? gen_rtx (MEM, mode, data->to_addr)
1549 : change_address (data->to, mode,
1550 plus_constant (data->to_addr, data->offset)));
1553 ? gen_rtx (MEM, mode, data->from_addr)
1554 : change_address (data->from, mode,
1555 plus_constant (data->from_addr, data->offset)));
1557 #ifdef HAVE_PRE_DECREMENT
1558 if (data->explicit_inc_to < 0)
1559 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1560 if (data->explicit_inc_from < 0)
1561 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1564 emit_insn ((*genfun) (to1, from1));
1565 #ifdef HAVE_POST_INCREMENT
1566 if (data->explicit_inc_to > 0)
1567 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1568 if (data->explicit_inc_from > 0)
1569 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1572 if (! data->reverse) data->offset += size;
1578 /* Emit code to move a block Y to a block X.
1579 This may be done with string-move instructions,
1580 with multiple scalar move instructions, or with a library call.
1582 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1584 SIZE is an rtx that says how long they are.
1585 ALIGN is the maximum alignment we can assume they have,
1586 measured in bytes. */
1589 emit_block_move (x, y, size, align)
1594 if (GET_MODE (x) != BLKmode)
1597 if (GET_MODE (y) != BLKmode)
1600 x = protect_from_queue (x, 1);
1601 y = protect_from_queue (y, 0);
1602 size = protect_from_queue (size, 0);
1604 if (GET_CODE (x) != MEM)
1606 if (GET_CODE (y) != MEM)
1611 if (GET_CODE (size) == CONST_INT
1612 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1613 move_by_pieces (x, y, INTVAL (size), align);
1616 /* Try the most limited insn first, because there's no point
1617 including more than one in the machine description unless
1618 the more limited one has some advantage. */
1620 rtx opalign = GEN_INT (align);
1621 enum machine_mode mode;
1623 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1624 mode = GET_MODE_WIDER_MODE (mode))
1626 enum insn_code code = movstr_optab[(int) mode];
1628 if (code != CODE_FOR_nothing
1629 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1630 here because if SIZE is less than the mode mask, as it is
1631 returned by the macro, it will definitely be less than the
1632 actual mode mask. */
1633 && ((GET_CODE (size) == CONST_INT
1634 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1635 <= GET_MODE_MASK (mode)))
1636 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1637 && (insn_operand_predicate[(int) code][0] == 0
1638 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1639 && (insn_operand_predicate[(int) code][1] == 0
1640 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1641 && (insn_operand_predicate[(int) code][3] == 0
1642 || (*insn_operand_predicate[(int) code][3]) (opalign,
1646 rtx last = get_last_insn ();
1649 op2 = convert_to_mode (mode, size, 1);
1650 if (insn_operand_predicate[(int) code][2] != 0
1651 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1652 op2 = copy_to_mode_reg (mode, op2);
1654 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1661 delete_insns_since (last);
1665 #ifdef TARGET_MEM_FUNCTIONS
1666 emit_library_call (memcpy_libfunc, 0,
1667 VOIDmode, 3, XEXP (x, 0), Pmode,
1669 convert_to_mode (TYPE_MODE (sizetype), size,
1670 TREE_UNSIGNED (sizetype)),
1671 TYPE_MODE (sizetype));
1673 emit_library_call (bcopy_libfunc, 0,
1674 VOIDmode, 3, XEXP (y, 0), Pmode,
1676 convert_to_mode (TYPE_MODE (sizetype), size,
1677 TREE_UNSIGNED (sizetype)),
1678 TYPE_MODE (sizetype));
1683 /* Copy all or part of a value X into registers starting at REGNO.
1684 The number of registers to be filled is NREGS. */
1687 move_block_to_reg (regno, x, nregs, mode)
1691 enum machine_mode mode;
1699 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1700 x = validize_mem (force_const_mem (mode, x));
1702 /* See if the machine can do this with a load multiple insn. */
1703 #ifdef HAVE_load_multiple
1704 if (HAVE_load_multiple)
1706 last = get_last_insn ();
1707 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1715 delete_insns_since (last);
1719 for (i = 0; i < nregs; i++)
1720 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1721 operand_subword_force (x, i, mode));
1724 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1725 The number of registers to be filled is NREGS. SIZE indicates the number
1726 of bytes in the object X. */
1730 move_block_from_reg (regno, x, nregs, size)
1739 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1740 to the left before storing to memory. */
1741 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1743 rtx tem = operand_subword (x, 0, 1, BLKmode);
1749 shift = expand_shift (LSHIFT_EXPR, word_mode,
1750 gen_rtx (REG, word_mode, regno),
1751 build_int_2 ((UNITS_PER_WORD - size)
1752 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1753 emit_move_insn (tem, shift);
1757 /* See if the machine can do this with a store multiple insn. */
1758 #ifdef HAVE_store_multiple
1759 if (HAVE_store_multiple)
1761 last = get_last_insn ();
1762 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1770 delete_insns_since (last);
1774 for (i = 0; i < nregs; i++)
1776 rtx tem = operand_subword (x, i, 1, BLKmode);
1781 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1785 /* Add a USE expression for REG to the (possibly empty) list pointed
1786 to by CALL_FUSAGE. REG must denote a hard register. */
1789 use_reg (call_fusage, reg)
1790 rtx *call_fusage, reg;
1792 if (GET_CODE (reg) != REG
1793 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1797 = gen_rtx (EXPR_LIST, VOIDmode,
1798 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1801 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1802 starting at REGNO. All of these registers must be hard registers. */
1805 use_regs (call_fusage, regno, nregs)
1812 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1815 for (i = 0; i < nregs; i++)
1816 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1819 /* Write zeros through the storage of OBJECT.
1820 If OBJECT has BLKmode, SIZE is its length in bytes. */
1823 clear_storage (object, size)
1827 if (GET_MODE (object) == BLKmode)
1829 #ifdef TARGET_MEM_FUNCTIONS
1830 emit_library_call (memset_libfunc, 0,
1832 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1833 GEN_INT (size), ptr_mode);
1835 emit_library_call (bzero_libfunc, 0,
1837 XEXP (object, 0), Pmode,
1838 GEN_INT (size), ptr_mode);
1842 emit_move_insn (object, const0_rtx);
1845 /* Generate code to copy Y into X.
1846 Both Y and X must have the same mode, except that
1847 Y can be a constant with VOIDmode.
1848 This mode cannot be BLKmode; use emit_block_move for that.
1850 Return the last instruction emitted. */
1853 emit_move_insn (x, y)
1856 enum machine_mode mode = GET_MODE (x);
1858 x = protect_from_queue (x, 1);
1859 y = protect_from_queue (y, 0);
1861 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1864 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1865 y = force_const_mem (mode, y);
1867 /* If X or Y are memory references, verify that their addresses are valid
1869 if (GET_CODE (x) == MEM
1870 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1871 && ! push_operand (x, GET_MODE (x)))
1873 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1874 x = change_address (x, VOIDmode, XEXP (x, 0));
1876 if (GET_CODE (y) == MEM
1877 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1879 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1880 y = change_address (y, VOIDmode, XEXP (y, 0));
1882 if (mode == BLKmode)
1885 return emit_move_insn_1 (x, y);
1888 /* Low level part of emit_move_insn.
1889 Called just like emit_move_insn, but assumes X and Y
1890 are basically valid. */
1893 emit_move_insn_1 (x, y)
1896 enum machine_mode mode = GET_MODE (x);
1897 enum machine_mode submode;
1898 enum mode_class class = GET_MODE_CLASS (mode);
1901 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1903 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1905 /* Expand complex moves by moving real part and imag part, if possible. */
1906 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1907 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1909 (class == MODE_COMPLEX_INT
1910 ? MODE_INT : MODE_FLOAT),
1912 && (mov_optab->handlers[(int) submode].insn_code
1913 != CODE_FOR_nothing))
1915 /* Don't split destination if it is a stack push. */
1916 int stack = push_operand (x, GET_MODE (x));
1919 /* If this is a stack, push the highpart first, so it
1920 will be in the argument order.
1922 In that case, change_address is used only to convert
1923 the mode, not to change the address. */
1926 /* Note that the real part always precedes the imag part in memory
1927 regardless of machine's endianness. */
1928 #ifdef STACK_GROWS_DOWNWARD
1929 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1930 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1931 gen_imagpart (submode, y)));
1932 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1933 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1934 gen_realpart (submode, y)));
1936 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1937 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1938 gen_realpart (submode, y)));
1939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1940 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1941 gen_imagpart (submode, y)));
1946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1947 (gen_realpart (submode, x), gen_realpart (submode, y)));
1948 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1949 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1952 return get_last_insn ();
1955 /* This will handle any multi-word mode that lacks a move_insn pattern.
1956 However, you will get better code if you define such patterns,
1957 even if they must turn into multiple assembler instructions. */
1958 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1963 #ifdef PUSH_ROUNDING
1965 /* If X is a push on the stack, do the push now and replace
1966 X with a reference to the stack pointer. */
1967 if (push_operand (x, GET_MODE (x)))
1969 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1970 x = change_address (x, VOIDmode, stack_pointer_rtx);
1975 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1978 rtx xpart = operand_subword (x, i, 1, mode);
1979 rtx ypart = operand_subword (y, i, 1, mode);
1981 /* If we can't get a part of Y, put Y into memory if it is a
1982 constant. Otherwise, force it into a register. If we still
1983 can't get a part of Y, abort. */
1984 if (ypart == 0 && CONSTANT_P (y))
1986 y = force_const_mem (mode, y);
1987 ypart = operand_subword (y, i, 1, mode);
1989 else if (ypart == 0)
1990 ypart = operand_subword_force (y, i, mode);
1992 if (xpart == 0 || ypart == 0)
1995 last_insn = emit_move_insn (xpart, ypart);
2004 /* Pushing data onto the stack. */
2006 /* Push a block of length SIZE (perhaps variable)
2007 and return an rtx to address the beginning of the block.
2008 Note that it is not possible for the value returned to be a QUEUED.
2009 The value may be virtual_outgoing_args_rtx.
2011 EXTRA is the number of bytes of padding to push in addition to SIZE.
2012 BELOW nonzero means this padding comes at low addresses;
2013 otherwise, the padding comes at high addresses. */
2016 push_block (size, extra, below)
2022 size = convert_modes (Pmode, ptr_mode, size, 1);
2023 if (CONSTANT_P (size))
2024 anti_adjust_stack (plus_constant (size, extra));
2025 else if (GET_CODE (size) == REG && extra == 0)
2026 anti_adjust_stack (size);
2029 rtx temp = copy_to_mode_reg (Pmode, size);
2031 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2032 temp, 0, OPTAB_LIB_WIDEN);
2033 anti_adjust_stack (temp);
2036 #ifdef STACK_GROWS_DOWNWARD
2037 temp = virtual_outgoing_args_rtx;
2038 if (extra != 0 && below)
2039 temp = plus_constant (temp, extra);
2041 if (GET_CODE (size) == CONST_INT)
2042 temp = plus_constant (virtual_outgoing_args_rtx,
2043 - INTVAL (size) - (below ? 0 : extra));
2044 else if (extra != 0 && !below)
2045 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2046 negate_rtx (Pmode, plus_constant (size, extra)));
2048 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2049 negate_rtx (Pmode, size));
2052 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2058 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2061 /* Generate code to push X onto the stack, assuming it has mode MODE and
2063 MODE is redundant except when X is a CONST_INT (since they don't
2065 SIZE is an rtx for the size of data to be copied (in bytes),
2066 needed only if X is BLKmode.
2068 ALIGN (in bytes) is maximum alignment we can assume.
2070 If PARTIAL and REG are both nonzero, then copy that many of the first
2071 words of X into registers starting with REG, and push the rest of X.
2072 The amount of space pushed is decreased by PARTIAL words,
2073 rounded *down* to a multiple of PARM_BOUNDARY.
2074 REG must be a hard register in this case.
2075 If REG is zero but PARTIAL is not, take any all others actions for an
2076 argument partially in registers, but do not actually load any
2079 EXTRA is the amount in bytes of extra space to leave next to this arg.
2080 This is ignored if an argument block has already been allocated.
2082 On a machine that lacks real push insns, ARGS_ADDR is the address of
2083 the bottom of the argument block for this call. We use indexing off there
2084 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2085 argument block has not been preallocated.
2087 ARGS_SO_FAR is the size of args previously pushed for this call. */
2090 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2091 args_addr, args_so_far)
2093 enum machine_mode mode;
2104 enum direction stack_direction
2105 #ifdef STACK_GROWS_DOWNWARD
2111 /* Decide where to pad the argument: `downward' for below,
2112 `upward' for above, or `none' for don't pad it.
2113 Default is below for small data on big-endian machines; else above. */
2114 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2116 /* Invert direction if stack is post-update. */
2117 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2118 if (where_pad != none)
2119 where_pad = (where_pad == downward ? upward : downward);
2121 xinner = x = protect_from_queue (x, 0);
2123 if (mode == BLKmode)
2125 /* Copy a block into the stack, entirely or partially. */
2128 int used = partial * UNITS_PER_WORD;
2129 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2137 /* USED is now the # of bytes we need not copy to the stack
2138 because registers will take care of them. */
2141 xinner = change_address (xinner, BLKmode,
2142 plus_constant (XEXP (xinner, 0), used));
2144 /* If the partial register-part of the arg counts in its stack size,
2145 skip the part of stack space corresponding to the registers.
2146 Otherwise, start copying to the beginning of the stack space,
2147 by setting SKIP to 0. */
2148 #ifndef REG_PARM_STACK_SPACE
2154 #ifdef PUSH_ROUNDING
2155 /* Do it with several push insns if that doesn't take lots of insns
2156 and if there is no difficulty with push insns that skip bytes
2157 on the stack for alignment purposes. */
2159 && GET_CODE (size) == CONST_INT
2161 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2163 /* Here we avoid the case of a structure whose weak alignment
2164 forces many pushes of a small amount of data,
2165 and such small pushes do rounding that causes trouble. */
2166 && ((! SLOW_UNALIGNED_ACCESS)
2167 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2168 || PUSH_ROUNDING (align) == align)
2169 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2171 /* Push padding now if padding above and stack grows down,
2172 or if padding below and stack grows up.
2173 But if space already allocated, this has already been done. */
2174 if (extra && args_addr == 0
2175 && where_pad != none && where_pad != stack_direction)
2176 anti_adjust_stack (GEN_INT (extra));
2178 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2179 INTVAL (size) - used, align);
2182 #endif /* PUSH_ROUNDING */
2184 /* Otherwise make space on the stack and copy the data
2185 to the address of that space. */
2187 /* Deduct words put into registers from the size we must copy. */
2190 if (GET_CODE (size) == CONST_INT)
2191 size = GEN_INT (INTVAL (size) - used);
2193 size = expand_binop (GET_MODE (size), sub_optab, size,
2194 GEN_INT (used), NULL_RTX, 0,
2198 /* Get the address of the stack space.
2199 In this case, we do not deal with EXTRA separately.
2200 A single stack adjust will do. */
2203 temp = push_block (size, extra, where_pad == downward);
2206 else if (GET_CODE (args_so_far) == CONST_INT)
2207 temp = memory_address (BLKmode,
2208 plus_constant (args_addr,
2209 skip + INTVAL (args_so_far)));
2211 temp = memory_address (BLKmode,
2212 plus_constant (gen_rtx (PLUS, Pmode,
2213 args_addr, args_so_far),
2216 /* TEMP is the address of the block. Copy the data there. */
2217 if (GET_CODE (size) == CONST_INT
2218 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2221 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2222 INTVAL (size), align);
2225 /* Try the most limited insn first, because there's no point
2226 including more than one in the machine description unless
2227 the more limited one has some advantage. */
2228 #ifdef HAVE_movstrqi
2230 && GET_CODE (size) == CONST_INT
2231 && ((unsigned) INTVAL (size)
2232 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2234 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2235 xinner, size, GEN_INT (align));
2243 #ifdef HAVE_movstrhi
2245 && GET_CODE (size) == CONST_INT
2246 && ((unsigned) INTVAL (size)
2247 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2249 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2250 xinner, size, GEN_INT (align));
2258 #ifdef HAVE_movstrsi
2261 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2262 xinner, size, GEN_INT (align));
2270 #ifdef HAVE_movstrdi
2273 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2274 xinner, size, GEN_INT (align));
2283 #ifndef ACCUMULATE_OUTGOING_ARGS
2284 /* If the source is referenced relative to the stack pointer,
2285 copy it to another register to stabilize it. We do not need
2286 to do this if we know that we won't be changing sp. */
2288 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2289 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2290 temp = copy_to_reg (temp);
2293 /* Make inhibit_defer_pop nonzero around the library call
2294 to force it to pop the bcopy-arguments right away. */
2296 #ifdef TARGET_MEM_FUNCTIONS
2297 emit_library_call (memcpy_libfunc, 0,
2298 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2299 convert_to_mode (TYPE_MODE (sizetype),
2300 size, TREE_UNSIGNED (sizetype)),
2301 TYPE_MODE (sizetype));
2303 emit_library_call (bcopy_libfunc, 0,
2304 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2305 convert_to_mode (TYPE_MODE (sizetype),
2306 size, TREE_UNSIGNED (sizetype)),
2307 TYPE_MODE (sizetype));
2312 else if (partial > 0)
2314 /* Scalar partly in registers. */
2316 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2319 /* # words of start of argument
2320 that we must make space for but need not store. */
2321 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2322 int args_offset = INTVAL (args_so_far);
2325 /* Push padding now if padding above and stack grows down,
2326 or if padding below and stack grows up.
2327 But if space already allocated, this has already been done. */
2328 if (extra && args_addr == 0
2329 && where_pad != none && where_pad != stack_direction)
2330 anti_adjust_stack (GEN_INT (extra));
2332 /* If we make space by pushing it, we might as well push
2333 the real data. Otherwise, we can leave OFFSET nonzero
2334 and leave the space uninitialized. */
2338 /* Now NOT_STACK gets the number of words that we don't need to
2339 allocate on the stack. */
2340 not_stack = partial - offset;
2342 /* If the partial register-part of the arg counts in its stack size,
2343 skip the part of stack space corresponding to the registers.
2344 Otherwise, start copying to the beginning of the stack space,
2345 by setting SKIP to 0. */
2346 #ifndef REG_PARM_STACK_SPACE
2352 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2353 x = validize_mem (force_const_mem (mode, x));
2355 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2356 SUBREGs of such registers are not allowed. */
2357 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2358 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2359 x = copy_to_reg (x);
2361 /* Loop over all the words allocated on the stack for this arg. */
2362 /* We can do it by words, because any scalar bigger than a word
2363 has a size a multiple of a word. */
2364 #ifndef PUSH_ARGS_REVERSED
2365 for (i = not_stack; i < size; i++)
2367 for (i = size - 1; i >= not_stack; i--)
2369 if (i >= not_stack + offset)
2370 emit_push_insn (operand_subword_force (x, i, mode),
2371 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2373 GEN_INT (args_offset + ((i - not_stack + skip)
2374 * UNITS_PER_WORD)));
2380 /* Push padding now if padding above and stack grows down,
2381 or if padding below and stack grows up.
2382 But if space already allocated, this has already been done. */
2383 if (extra && args_addr == 0
2384 && where_pad != none && where_pad != stack_direction)
2385 anti_adjust_stack (GEN_INT (extra));
2387 #ifdef PUSH_ROUNDING
2389 addr = gen_push_operand ();
2392 if (GET_CODE (args_so_far) == CONST_INT)
2394 = memory_address (mode,
2395 plus_constant (args_addr, INTVAL (args_so_far)));
2397 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2400 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2404 /* If part should go in registers, copy that part
2405 into the appropriate registers. Do this now, at the end,
2406 since mem-to-mem copies above may do function calls. */
2407 if (partial > 0 && reg != 0)
2408 move_block_to_reg (REGNO (reg), x, partial, mode);
2410 if (extra && args_addr == 0 && where_pad == stack_direction)
2411 anti_adjust_stack (GEN_INT (extra));
2414 /* Expand an assignment that stores the value of FROM into TO.
2415 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2416 (This may contain a QUEUED rtx;
2417 if the value is constant, this rtx is a constant.)
2418 Otherwise, the returned value is NULL_RTX.
2420 SUGGEST_REG is no longer actually used.
2421 It used to mean, copy the value through a register
2422 and return that register, if that is possible.
2423 We now use WANT_VALUE to decide whether to do this. */
2426 expand_assignment (to, from, want_value, suggest_reg)
2431 register rtx to_rtx = 0;
2434 /* Don't crash if the lhs of the assignment was erroneous. */
2436 if (TREE_CODE (to) == ERROR_MARK)
2438 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2439 return want_value ? result : NULL_RTX;
2442 if (output_bytecode)
2444 tree dest_innermost;
2446 bc_expand_expr (from);
2447 bc_emit_instruction (duplicate);
2449 dest_innermost = bc_expand_address (to);
2451 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2452 take care of it here. */
2454 bc_store_memory (TREE_TYPE (to), dest_innermost);
2458 /* Assignment of a structure component needs special treatment
2459 if the structure component's rtx is not simply a MEM.
2460 Assignment of an array element at a constant index, and assignment of
2461 an array element in an unaligned packed structure field, has the same
2464 if (TREE_CODE (to) == COMPONENT_REF
2465 || TREE_CODE (to) == BIT_FIELD_REF
2466 || (TREE_CODE (to) == ARRAY_REF
2467 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2468 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2469 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2471 enum machine_mode mode1;
2481 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2482 &mode1, &unsignedp, &volatilep);
2484 /* If we are going to use store_bit_field and extract_bit_field,
2485 make sure to_rtx will be safe for multiple use. */
2487 if (mode1 == VOIDmode && want_value)
2488 tem = stabilize_reference (tem);
2490 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2491 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2494 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2496 if (GET_CODE (to_rtx) != MEM)
2498 to_rtx = change_address (to_rtx, VOIDmode,
2499 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2500 force_reg (ptr_mode, offset_rtx)));
2501 /* If we have a variable offset, the known alignment
2502 is only that of the innermost structure containing the field.
2503 (Actually, we could sometimes do better by using the
2504 align of an element of the innermost array, but no need.) */
2505 if (TREE_CODE (to) == COMPONENT_REF
2506 || TREE_CODE (to) == BIT_FIELD_REF)
2508 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2512 if (GET_CODE (to_rtx) == MEM)
2514 /* When the offset is zero, to_rtx is the address of the
2515 structure we are storing into, and hence may be shared.
2516 We must make a new MEM before setting the volatile bit. */
2518 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2519 MEM_VOLATILE_P (to_rtx) = 1;
2521 #if 0 /* This was turned off because, when a field is volatile
2522 in an object which is not volatile, the object may be in a register,
2523 and then we would abort over here. */
2529 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2531 /* Spurious cast makes HPUX compiler happy. */
2532 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2535 /* Required alignment of containing datum. */
2537 int_size_in_bytes (TREE_TYPE (tem)));
2538 preserve_temp_slots (result);
2542 /* If the value is meaningful, convert RESULT to the proper mode.
2543 Otherwise, return nothing. */
2544 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2545 TYPE_MODE (TREE_TYPE (from)),
2547 TREE_UNSIGNED (TREE_TYPE (to)))
2551 /* If the rhs is a function call and its value is not an aggregate,
2552 call the function before we start to compute the lhs.
2553 This is needed for correct code for cases such as
2554 val = setjmp (buf) on machines where reference to val
2555 requires loading up part of an address in a separate insn.
2557 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2558 a promoted variable where the zero- or sign- extension needs to be done.
2559 Handling this in the normal way is safe because no computation is done
2561 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2562 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2567 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2569 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2571 if (GET_MODE (to_rtx) == BLKmode)
2573 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2574 emit_block_move (to_rtx, value, expr_size (from), align);
2577 emit_move_insn (to_rtx, value);
2578 preserve_temp_slots (to_rtx);
2581 return want_value ? to_rtx : NULL_RTX;
2584 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2585 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2588 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2590 /* Don't move directly into a return register. */
2591 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2596 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2597 emit_move_insn (to_rtx, temp);
2598 preserve_temp_slots (to_rtx);
2601 return want_value ? to_rtx : NULL_RTX;
2604 /* In case we are returning the contents of an object which overlaps
2605 the place the value is being stored, use a safe function when copying
2606 a value through a pointer into a structure value return block. */
2607 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2608 && current_function_returns_struct
2609 && !current_function_returns_pcc_struct)
2614 size = expr_size (from);
2615 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2617 #ifdef TARGET_MEM_FUNCTIONS
2618 emit_library_call (memcpy_libfunc, 0,
2619 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2620 XEXP (from_rtx, 0), Pmode,
2621 convert_to_mode (TYPE_MODE (sizetype),
2622 size, TREE_UNSIGNED (sizetype)),
2623 TYPE_MODE (sizetype));
2625 emit_library_call (bcopy_libfunc, 0,
2626 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2627 XEXP (to_rtx, 0), Pmode,
2628 convert_to_mode (TYPE_MODE (sizetype),
2629 size, TREE_UNSIGNED (sizetype)),
2630 TYPE_MODE (sizetype));
2633 preserve_temp_slots (to_rtx);
2636 return want_value ? to_rtx : NULL_RTX;
2639 /* Compute FROM and store the value in the rtx we got. */
2642 result = store_expr (from, to_rtx, want_value);
2643 preserve_temp_slots (result);
2646 return want_value ? result : NULL_RTX;
2649 /* Generate code for computing expression EXP,
2650 and storing the value into TARGET.
2651 TARGET may contain a QUEUED rtx.
2653 If WANT_VALUE is nonzero, return a copy of the value
2654 not in TARGET, so that we can be sure to use the proper
2655 value in a containing expression even if TARGET has something
2656 else stored in it. If possible, we copy the value through a pseudo
2657 and return that pseudo. Or, if the value is constant, we try to
2658 return the constant. In some cases, we return a pseudo
2659 copied *from* TARGET.
2661 If the mode is BLKmode then we may return TARGET itself.
2662 It turns out that in BLKmode it doesn't cause a problem.
2663 because C has no operators that could combine two different
2664 assignments into the same BLKmode object with different values
2665 with no sequence point. Will other languages need this to
2668 If WANT_VALUE is 0, we return NULL, to make sure
2669 to catch quickly any cases where the caller uses the value
2670 and fails to set WANT_VALUE. */
2673 store_expr (exp, target, want_value)
2675 register rtx target;
2679 int dont_return_target = 0;
2681 if (TREE_CODE (exp) == COMPOUND_EXPR)
2683 /* Perform first part of compound expression, then assign from second
2685 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2687 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2689 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2691 /* For conditional expression, get safe form of the target. Then
2692 test the condition, doing the appropriate assignment on either
2693 side. This avoids the creation of unnecessary temporaries.
2694 For non-BLKmode, it is more efficient not to do this. */
2696 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2699 target = protect_from_queue (target, 1);
2702 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2703 store_expr (TREE_OPERAND (exp, 1), target, 0);
2705 emit_jump_insn (gen_jump (lab2));
2708 store_expr (TREE_OPERAND (exp, 2), target, 0);
2712 return want_value ? target : NULL_RTX;
2714 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2715 && GET_MODE (target) != BLKmode)
2716 /* If target is in memory and caller wants value in a register instead,
2717 arrange that. Pass TARGET as target for expand_expr so that,
2718 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2719 We know expand_expr will not use the target in that case.
2720 Don't do this if TARGET is volatile because we are supposed
2721 to write it and then read it. */
2723 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2724 GET_MODE (target), 0);
2725 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2726 temp = copy_to_reg (temp);
2727 dont_return_target = 1;
2729 else if (queued_subexp_p (target))
2730 /* If target contains a postincrement, let's not risk
2731 using it as the place to generate the rhs. */
2733 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2735 /* Expand EXP into a new pseudo. */
2736 temp = gen_reg_rtx (GET_MODE (target));
2737 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2740 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2742 /* If target is volatile, ANSI requires accessing the value
2743 *from* the target, if it is accessed. So make that happen.
2744 In no case return the target itself. */
2745 if (! MEM_VOLATILE_P (target) && want_value)
2746 dont_return_target = 1;
2748 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2749 /* If this is an scalar in a register that is stored in a wider mode
2750 than the declared mode, compute the result into its declared mode
2751 and then convert to the wider mode. Our value is the computed
2754 /* If we don't want a value, we can do the conversion inside EXP,
2755 which will often result in some optimizations. Do the conversion
2756 in two steps: first change the signedness, if needed, then
2760 if (TREE_UNSIGNED (TREE_TYPE (exp))
2761 != SUBREG_PROMOTED_UNSIGNED_P (target))
2764 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2768 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2769 SUBREG_PROMOTED_UNSIGNED_P (target)),
2773 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2775 /* If TEMP is a volatile MEM and we want a result value, make
2776 the access now so it gets done only once. */
2777 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2778 temp = copy_to_reg (temp);
2780 /* If TEMP is a VOIDmode constant, use convert_modes to make
2781 sure that we properly convert it. */
2782 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2783 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2784 TYPE_MODE (TREE_TYPE (exp)), temp,
2785 SUBREG_PROMOTED_UNSIGNED_P (target));
2787 convert_move (SUBREG_REG (target), temp,
2788 SUBREG_PROMOTED_UNSIGNED_P (target));
2789 return want_value ? temp : NULL_RTX;
2793 temp = expand_expr (exp, target, GET_MODE (target), 0);
2794 /* Return TARGET if it's a specified hardware register.
2795 If TARGET is a volatile mem ref, either return TARGET
2796 or return a reg copied *from* TARGET; ANSI requires this.
2798 Otherwise, if TEMP is not TARGET, return TEMP
2799 if it is constant (for efficiency),
2800 or if we really want the correct value. */
2801 if (!(target && GET_CODE (target) == REG
2802 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2803 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2805 && (CONSTANT_P (temp) || want_value))
2806 dont_return_target = 1;
2809 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2810 the same as that of TARGET, adjust the constant. This is needed, for
2811 example, in case it is a CONST_DOUBLE and we want only a word-sized
2813 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2814 && TREE_CODE (exp) != ERROR_MARK
2815 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2816 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2817 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2819 /* If value was not generated in the target, store it there.
2820 Convert the value to TARGET's type first if nec. */
2822 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2824 target = protect_from_queue (target, 1);
2825 if (GET_MODE (temp) != GET_MODE (target)
2826 && GET_MODE (temp) != VOIDmode)
2828 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2829 if (dont_return_target)
2831 /* In this case, we will return TEMP,
2832 so make sure it has the proper mode.
2833 But don't forget to store the value into TARGET. */
2834 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2835 emit_move_insn (target, temp);
2838 convert_move (target, temp, unsignedp);
2841 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2843 /* Handle copying a string constant into an array.
2844 The string constant may be shorter than the array.
2845 So copy just the string's actual length, and clear the rest. */
2849 /* Get the size of the data type of the string,
2850 which is actually the size of the target. */
2851 size = expr_size (exp);
2852 if (GET_CODE (size) == CONST_INT
2853 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2854 emit_block_move (target, temp, size,
2855 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2858 /* Compute the size of the data to copy from the string. */
2860 = size_binop (MIN_EXPR,
2861 make_tree (sizetype, size),
2863 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2864 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2868 /* Copy that much. */
2869 emit_block_move (target, temp, copy_size_rtx,
2870 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2872 /* Figure out how much is left in TARGET that we have to clear.
2873 Do all calculations in ptr_mode. */
2875 addr = XEXP (target, 0);
2876 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2878 if (GET_CODE (copy_size_rtx) == CONST_INT)
2880 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2881 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2885 addr = force_reg (ptr_mode, addr);
2886 addr = expand_binop (ptr_mode, add_optab, addr,
2887 copy_size_rtx, NULL_RTX, 0,
2890 size = expand_binop (ptr_mode, sub_optab, size,
2891 copy_size_rtx, NULL_RTX, 0,
2894 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2895 GET_MODE (size), 0, 0);
2896 label = gen_label_rtx ();
2897 emit_jump_insn (gen_blt (label));
2900 if (size != const0_rtx)
2902 #ifdef TARGET_MEM_FUNCTIONS
2903 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2904 Pmode, const0_rtx, Pmode, size, ptr_mode);
2906 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2907 addr, Pmode, size, ptr_mode);
2915 else if (GET_MODE (temp) == BLKmode)
2916 emit_block_move (target, temp, expr_size (exp),
2917 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2919 emit_move_insn (target, temp);
2922 /* If we don't want a value, return NULL_RTX. */
2926 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2927 ??? The latter test doesn't seem to make sense. */
2928 else if (dont_return_target && GET_CODE (temp) != MEM)
2931 /* Return TARGET itself if it is a hard register. */
2932 else if (want_value && GET_MODE (target) != BLKmode
2933 && ! (GET_CODE (target) == REG
2934 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2935 return copy_to_reg (target);
2941 /* Store the value of constructor EXP into the rtx TARGET.
2942 TARGET is either a REG or a MEM. */
2945 store_constructor (exp, target)
2949 tree type = TREE_TYPE (exp);
2951 /* We know our target cannot conflict, since safe_from_p has been called. */
2953 /* Don't try copying piece by piece into a hard register
2954 since that is vulnerable to being clobbered by EXP.
2955 Instead, construct in a pseudo register and then copy it all. */
2956 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2958 rtx temp = gen_reg_rtx (GET_MODE (target));
2959 store_constructor (exp, temp);
2960 emit_move_insn (target, temp);
2965 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2966 || TREE_CODE (type) == QUAL_UNION_TYPE)
2970 /* Inform later passes that the whole union value is dead. */
2971 if (TREE_CODE (type) == UNION_TYPE
2972 || TREE_CODE (type) == QUAL_UNION_TYPE)
2973 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2975 /* If we are building a static constructor into a register,
2976 set the initial value as zero so we can fold the value into
2978 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2979 emit_move_insn (target, const0_rtx);
2981 /* If the constructor has fewer fields than the structure,
2982 clear the whole structure first. */
2983 else if (list_length (CONSTRUCTOR_ELTS (exp))
2984 != list_length (TYPE_FIELDS (type)))
2985 clear_storage (target, int_size_in_bytes (type));
2987 /* Inform later passes that the old value is dead. */
2988 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2990 /* Store each element of the constructor into
2991 the corresponding field of TARGET. */
2993 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2995 register tree field = TREE_PURPOSE (elt);
2996 register enum machine_mode mode;
3000 tree pos, constant = 0, offset = 0;
3001 rtx to_rtx = target;
3003 /* Just ignore missing fields.
3004 We cleared the whole structure, above,
3005 if any fields are missing. */
3009 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3010 unsignedp = TREE_UNSIGNED (field);
3011 mode = DECL_MODE (field);
3012 if (DECL_BIT_FIELD (field))
3015 pos = DECL_FIELD_BITPOS (field);
3016 if (TREE_CODE (pos) == INTEGER_CST)
3018 else if (TREE_CODE (pos) == PLUS_EXPR
3019 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3020 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3025 bitpos = TREE_INT_CST_LOW (constant);
3031 if (contains_placeholder_p (offset))
3032 offset = build (WITH_RECORD_EXPR, sizetype,
3035 offset = size_binop (FLOOR_DIV_EXPR, offset,
3036 size_int (BITS_PER_UNIT));
3038 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3039 if (GET_CODE (to_rtx) != MEM)
3043 = change_address (to_rtx, VOIDmode,
3044 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3045 force_reg (ptr_mode, offset_rtx)));
3048 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3049 /* The alignment of TARGET is
3050 at least what its type requires. */
3052 TYPE_ALIGN (type) / BITS_PER_UNIT,
3053 int_size_in_bytes (type));
3056 else if (TREE_CODE (type) == ARRAY_TYPE)
3060 tree domain = TYPE_DOMAIN (type);
3061 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3062 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3063 tree elttype = TREE_TYPE (type);
3065 /* If the constructor has fewer fields than the structure,
3066 clear the whole structure first. Similarly if this this is
3067 static constructor of a non-BLKmode object. */
3069 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3070 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3071 clear_storage (target, int_size_in_bytes (type));
3073 /* Inform later passes that the old value is dead. */
3074 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3076 /* Store each element of the constructor into
3077 the corresponding element of TARGET, determined
3078 by counting the elements. */
3079 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3081 elt = TREE_CHAIN (elt), i++)
3083 register enum machine_mode mode;
3087 tree index = TREE_PURPOSE (elt);
3088 rtx xtarget = target;
3090 mode = TYPE_MODE (elttype);
3091 bitsize = GET_MODE_BITSIZE (mode);
3092 unsignedp = TREE_UNSIGNED (elttype);
3094 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3095 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3097 rtx pos_rtx, addr, xtarget;
3101 index = size_int (i);
3103 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3104 size_int (BITS_PER_UNIT));
3105 position = size_binop (MULT_EXPR, index, position);
3106 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3107 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3108 xtarget = change_address (target, mode, addr);
3109 store_expr (TREE_VALUE (elt), xtarget, 0);
3114 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3115 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3117 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3119 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3120 /* The alignment of TARGET is
3121 at least what its type requires. */
3123 TYPE_ALIGN (type) / BITS_PER_UNIT,
3124 int_size_in_bytes (type));
3128 /* set constructor assignments */
3129 else if (TREE_CODE (type) == SET_TYPE)
3132 rtx xtarget = XEXP (target, 0);
3133 int set_word_size = TYPE_ALIGN (type);
3134 int nbytes = int_size_in_bytes (type);
3135 tree non_const_elements;
3136 int need_to_clear_first;
3137 tree domain = TYPE_DOMAIN (type);
3138 tree domain_min, domain_max, bitlength;
3140 /* The default implementation stategy is to extract the constant
3141 parts of the constructor, use that to initialize the target,
3142 and then "or" in whatever non-constant ranges we need in addition.
3144 If a large set is all zero or all ones, it is
3145 probably better to set it using memset (if available) or bzero.
3146 Also, if a large set has just a single range, it may also be
3147 better to first clear all the first clear the set (using
3148 bzero/memset), and set the bits we want. */
3150 /* Check for all zeros. */
3151 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3153 clear_storage (target, nbytes);
3160 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3161 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3162 bitlength = size_binop (PLUS_EXPR,
3163 size_binop (MINUS_EXPR, domain_max, domain_min),
3166 /* Check for range all ones, or at most a single range.
3167 (This optimization is only a win for big sets.) */
3168 if (GET_MODE (target) == BLKmode && nbytes > 16
3169 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3171 need_to_clear_first = 1;
3172 non_const_elements = CONSTRUCTOR_ELTS (exp);
3176 int nbits = nbytes * BITS_PER_UNIT;
3177 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3178 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3179 char *bit_buffer = (char*) alloca (nbits);
3180 HOST_WIDE_INT word = 0;
3183 int offset = 0; /* In bytes from beginning of set. */
3184 non_const_elements = get_set_constructor_bits (exp,
3188 if (bit_buffer[ibit])
3190 if (BITS_BIG_ENDIAN)
3191 word |= (1 << (set_word_size - 1 - bit_pos));
3193 word |= 1 << bit_pos;
3196 if (bit_pos >= set_word_size || ibit == nbits)
3198 rtx datum = GEN_INT (word);
3200 /* The assumption here is that it is safe to use XEXP if
3201 the set is multi-word, but not if it's single-word. */
3202 if (GET_CODE (target) == MEM)
3203 to_rtx = change_address (target, mode,
3204 plus_constant (XEXP (target, 0),
3206 else if (offset == 0)
3210 emit_move_insn (to_rtx, datum);
3215 offset += set_word_size / BITS_PER_UNIT;
3218 need_to_clear_first = 0;
3221 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3223 /* start of range of element or NULL */
3224 tree startbit = TREE_PURPOSE (elt);
3225 /* end of range of element, or element value */
3226 tree endbit = TREE_VALUE (elt);
3227 HOST_WIDE_INT startb, endb;
3228 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3230 bitlength_rtx = expand_expr (bitlength,
3231 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3233 /* handle non-range tuple element like [ expr ] */
3234 if (startbit == NULL_TREE)
3236 startbit = save_expr (endbit);
3239 startbit = convert (sizetype, startbit);
3240 endbit = convert (sizetype, endbit);
3241 if (! integer_zerop (domain_min))
3243 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3244 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3246 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3247 EXPAND_CONST_ADDRESS);
3248 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3249 EXPAND_CONST_ADDRESS);
3253 targetx = assign_stack_temp (GET_MODE (target),
3254 GET_MODE_SIZE (GET_MODE (target)),
3256 emit_move_insn (targetx, target);
3258 else if (GET_CODE (target) == MEM)
3263 #ifdef TARGET_MEM_FUNCTIONS
3264 /* Optimization: If startbit and endbit are
3265 constants divisble by BITS_PER_UNIT,
3266 call memset instead. */
3267 if (TREE_CODE (startbit) == INTEGER_CST
3268 && TREE_CODE (endbit) == INTEGER_CST
3269 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3270 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3273 if (need_to_clear_first
3274 && endb - startb != nbytes * BITS_PER_UNIT)
3275 clear_storage (target, nbytes);
3276 need_to_clear_first = 0;
3277 emit_library_call (memset_libfunc, 0,
3279 plus_constant (XEXP (targetx, 0), startb),
3282 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3288 if (need_to_clear_first)
3290 clear_storage (target, nbytes);
3291 need_to_clear_first = 0;
3293 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3294 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3295 bitlength_rtx, TYPE_MODE (sizetype),
3296 startbit_rtx, TYPE_MODE (sizetype),
3297 endbit_rtx, TYPE_MODE (sizetype));
3300 emit_move_insn (target, targetx);
3308 /* Store the value of EXP (an expression tree)
3309 into a subfield of TARGET which has mode MODE and occupies
3310 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3311 If MODE is VOIDmode, it means that we are storing into a bit-field.
3313 If VALUE_MODE is VOIDmode, return nothing in particular.
3314 UNSIGNEDP is not used in this case.
3316 Otherwise, return an rtx for the value stored. This rtx
3317 has mode VALUE_MODE if that is convenient to do.
3318 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3320 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3321 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3324 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3325 unsignedp, align, total_size)
3327 int bitsize, bitpos;
3328 enum machine_mode mode;
3330 enum machine_mode value_mode;
3335 HOST_WIDE_INT width_mask = 0;
3337 if (bitsize < HOST_BITS_PER_WIDE_INT)
3338 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3340 /* If we are storing into an unaligned field of an aligned union that is
3341 in a register, we may have the mode of TARGET being an integer mode but
3342 MODE == BLKmode. In that case, get an aligned object whose size and
3343 alignment are the same as TARGET and store TARGET into it (we can avoid
3344 the store if the field being stored is the entire width of TARGET). Then
3345 call ourselves recursively to store the field into a BLKmode version of
3346 that object. Finally, load from the object into TARGET. This is not
3347 very efficient in general, but should only be slightly more expensive
3348 than the otherwise-required unaligned accesses. Perhaps this can be
3349 cleaned up later. */
3352 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3354 rtx object = assign_stack_temp (GET_MODE (target),
3355 GET_MODE_SIZE (GET_MODE (target)), 0);
3356 rtx blk_object = copy_rtx (object);
3358 MEM_IN_STRUCT_P (object) = 1;
3359 MEM_IN_STRUCT_P (blk_object) = 1;
3360 PUT_MODE (blk_object, BLKmode);
3362 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3363 emit_move_insn (object, target);
3365 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3368 /* Even though we aren't returning target, we need to
3369 give it the updated value. */
3370 emit_move_insn (target, object);
3375 /* If the structure is in a register or if the component
3376 is a bit field, we cannot use addressing to access it.
3377 Use bit-field techniques or SUBREG to store in it. */
3379 if (mode == VOIDmode
3380 || (mode != BLKmode && ! direct_store[(int) mode])
3381 || GET_CODE (target) == REG
3382 || GET_CODE (target) == SUBREG
3383 /* If the field isn't aligned enough to store as an ordinary memref,
3384 store it as a bit field. */
3385 || (SLOW_UNALIGNED_ACCESS
3386 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3387 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3389 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3391 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3393 if (mode != VOIDmode && mode != BLKmode
3394 && mode != TYPE_MODE (TREE_TYPE (exp)))
3395 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3397 /* Store the value in the bitfield. */
3398 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3399 if (value_mode != VOIDmode)
3401 /* The caller wants an rtx for the value. */
3402 /* If possible, avoid refetching from the bitfield itself. */
3404 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3407 enum machine_mode tmode;
3410 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3411 tmode = GET_MODE (temp);
3412 if (tmode == VOIDmode)
3414 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3415 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3416 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3418 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3419 NULL_RTX, value_mode, 0, align,
3426 rtx addr = XEXP (target, 0);
3429 /* If a value is wanted, it must be the lhs;
3430 so make the address stable for multiple use. */
3432 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3433 && ! CONSTANT_ADDRESS_P (addr)
3434 /* A frame-pointer reference is already stable. */
3435 && ! (GET_CODE (addr) == PLUS
3436 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3437 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3438 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3439 addr = copy_to_reg (addr);
3441 /* Now build a reference to just the desired component. */
3443 to_rtx = change_address (target, mode,
3444 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3445 MEM_IN_STRUCT_P (to_rtx) = 1;
3447 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3451 /* Return true if any object containing the innermost array is an unaligned
3452 packed structure field. */
3455 get_inner_unaligned_p (exp)
3458 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3462 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3464 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3468 else if (TREE_CODE (exp) != ARRAY_REF
3469 && TREE_CODE (exp) != NON_LVALUE_EXPR
3470 && ! ((TREE_CODE (exp) == NOP_EXPR
3471 || TREE_CODE (exp) == CONVERT_EXPR)
3472 && (TYPE_MODE (TREE_TYPE (exp))
3473 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3476 exp = TREE_OPERAND (exp, 0);
3482 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3483 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3484 ARRAY_REFs and find the ultimate containing object, which we return.
3486 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3487 bit position, and *PUNSIGNEDP to the signedness of the field.
3488 If the position of the field is variable, we store a tree
3489 giving the variable offset (in units) in *POFFSET.
3490 This offset is in addition to the bit position.
3491 If the position is not variable, we store 0 in *POFFSET.
3493 If any of the extraction expressions is volatile,
3494 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3496 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3497 is a mode that can be used to access the field. In that case, *PBITSIZE
3500 If the field describes a variable-sized object, *PMODE is set to
3501 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3502 this case, but the address of the object can be found. */
3505 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3506 punsignedp, pvolatilep)
3511 enum machine_mode *pmode;
3515 tree orig_exp = exp;
3517 enum machine_mode mode = VOIDmode;
3518 tree offset = integer_zero_node;
3520 if (TREE_CODE (exp) == COMPONENT_REF)
3522 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3523 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3524 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3525 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3527 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3529 size_tree = TREE_OPERAND (exp, 1);
3530 *punsignedp = TREE_UNSIGNED (exp);
3534 mode = TYPE_MODE (TREE_TYPE (exp));
3535 *pbitsize = GET_MODE_BITSIZE (mode);
3536 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3541 if (TREE_CODE (size_tree) != INTEGER_CST)
3542 mode = BLKmode, *pbitsize = -1;
3544 *pbitsize = TREE_INT_CST_LOW (size_tree);
3547 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3548 and find the ultimate containing object. */
3554 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3556 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3557 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3558 : TREE_OPERAND (exp, 2));
3559 tree constant = integer_zero_node, var = pos;
3561 /* If this field hasn't been filled in yet, don't go
3562 past it. This should only happen when folding expressions
3563 made during type construction. */
3567 /* Assume here that the offset is a multiple of a unit.
3568 If not, there should be an explicitly added constant. */
3569 if (TREE_CODE (pos) == PLUS_EXPR
3570 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3571 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3572 else if (TREE_CODE (pos) == INTEGER_CST)
3573 constant = pos, var = integer_zero_node;
3575 *pbitpos += TREE_INT_CST_LOW (constant);
3578 offset = size_binop (PLUS_EXPR, offset,
3579 size_binop (EXACT_DIV_EXPR, var,
3580 size_int (BITS_PER_UNIT)));
3583 else if (TREE_CODE (exp) == ARRAY_REF)
3585 /* This code is based on the code in case ARRAY_REF in expand_expr
3586 below. We assume here that the size of an array element is
3587 always an integral multiple of BITS_PER_UNIT. */
3589 tree index = TREE_OPERAND (exp, 1);
3590 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3592 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3593 tree index_type = TREE_TYPE (index);
3595 if (! integer_zerop (low_bound))
3596 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3598 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3600 index = convert (type_for_size (POINTER_SIZE, 0), index);
3601 index_type = TREE_TYPE (index);
3604 index = fold (build (MULT_EXPR, index_type, index,
3605 TYPE_SIZE (TREE_TYPE (exp))));
3607 if (TREE_CODE (index) == INTEGER_CST
3608 && TREE_INT_CST_HIGH (index) == 0)
3609 *pbitpos += TREE_INT_CST_LOW (index);
3611 offset = size_binop (PLUS_EXPR, offset,
3612 size_binop (FLOOR_DIV_EXPR, index,
3613 size_int (BITS_PER_UNIT)));
3615 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3616 && ! ((TREE_CODE (exp) == NOP_EXPR
3617 || TREE_CODE (exp) == CONVERT_EXPR)
3618 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3619 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3621 && (TYPE_MODE (TREE_TYPE (exp))
3622 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3625 /* If any reference in the chain is volatile, the effect is volatile. */
3626 if (TREE_THIS_VOLATILE (exp))
3628 exp = TREE_OPERAND (exp, 0);
3631 /* If this was a bit-field, see if there is a mode that allows direct
3632 access in case EXP is in memory. */
3633 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3635 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3636 if (mode == BLKmode)
3640 if (integer_zerop (offset))
3643 if (offset != 0 && contains_placeholder_p (offset))
3644 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3651 /* Given an rtx VALUE that may contain additions and multiplications,
3652 return an equivalent value that just refers to a register or memory.
3653 This is done by generating instructions to perform the arithmetic
3654 and returning a pseudo-register containing the value.
3656 The returned value may be a REG, SUBREG, MEM or constant. */
3659 force_operand (value, target)
3662 register optab binoptab = 0;
3663 /* Use a temporary to force order of execution of calls to
3667 /* Use subtarget as the target for operand 0 of a binary operation. */
3668 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3670 if (GET_CODE (value) == PLUS)
3671 binoptab = add_optab;
3672 else if (GET_CODE (value) == MINUS)
3673 binoptab = sub_optab;
3674 else if (GET_CODE (value) == MULT)
3676 op2 = XEXP (value, 1);
3677 if (!CONSTANT_P (op2)
3678 && !(GET_CODE (op2) == REG && op2 != subtarget))
3680 tmp = force_operand (XEXP (value, 0), subtarget);
3681 return expand_mult (GET_MODE (value), tmp,
3682 force_operand (op2, NULL_RTX),
3688 op2 = XEXP (value, 1);
3689 if (!CONSTANT_P (op2)
3690 && !(GET_CODE (op2) == REG && op2 != subtarget))
3692 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3694 binoptab = add_optab;
3695 op2 = negate_rtx (GET_MODE (value), op2);
3698 /* Check for an addition with OP2 a constant integer and our first
3699 operand a PLUS of a virtual register and something else. In that
3700 case, we want to emit the sum of the virtual register and the
3701 constant first and then add the other value. This allows virtual
3702 register instantiation to simply modify the constant rather than
3703 creating another one around this addition. */
3704 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3705 && GET_CODE (XEXP (value, 0)) == PLUS
3706 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3707 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3708 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3710 rtx temp = expand_binop (GET_MODE (value), binoptab,
3711 XEXP (XEXP (value, 0), 0), op2,
3712 subtarget, 0, OPTAB_LIB_WIDEN);
3713 return expand_binop (GET_MODE (value), binoptab, temp,
3714 force_operand (XEXP (XEXP (value, 0), 1), 0),
3715 target, 0, OPTAB_LIB_WIDEN);
3718 tmp = force_operand (XEXP (value, 0), subtarget);
3719 return expand_binop (GET_MODE (value), binoptab, tmp,
3720 force_operand (op2, NULL_RTX),
3721 target, 0, OPTAB_LIB_WIDEN);
3722 /* We give UNSIGNEDP = 0 to expand_binop
3723 because the only operations we are expanding here are signed ones. */
3728 /* Subroutine of expand_expr:
3729 save the non-copied parts (LIST) of an expr (LHS), and return a list
3730 which can restore these values to their previous values,
3731 should something modify their storage. */
3734 save_noncopied_parts (lhs, list)
3741 for (tail = list; tail; tail = TREE_CHAIN (tail))
3742 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3743 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3746 tree part = TREE_VALUE (tail);
3747 tree part_type = TREE_TYPE (part);
3748 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3749 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3750 int_size_in_bytes (part_type), 0);
3751 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3752 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3753 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3754 parts = tree_cons (to_be_saved,
3755 build (RTL_EXPR, part_type, NULL_TREE,
3758 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3763 /* Subroutine of expand_expr:
3764 record the non-copied parts (LIST) of an expr (LHS), and return a list
3765 which specifies the initial values of these parts. */
3768 init_noncopied_parts (lhs, list)
3775 for (tail = list; tail; tail = TREE_CHAIN (tail))
3776 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3777 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3780 tree part = TREE_VALUE (tail);
3781 tree part_type = TREE_TYPE (part);
3782 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3783 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3788 /* Subroutine of expand_expr: return nonzero iff there is no way that
3789 EXP can reference X, which is being modified. */
3792 safe_from_p (x, exp)
3800 /* If EXP has varying size, we MUST use a target since we currently
3801 have no way of allocating temporaries of variable size. So we
3802 assume here that something at a higher level has prevented a
3803 clash. This is somewhat bogus, but the best we can do. */
3804 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3805 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3808 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3809 find the underlying pseudo. */
3810 if (GET_CODE (x) == SUBREG)
3813 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3817 /* If X is a location in the outgoing argument area, it is always safe. */
3818 if (GET_CODE (x) == MEM
3819 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3820 || (GET_CODE (XEXP (x, 0)) == PLUS
3821 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3824 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3827 exp_rtl = DECL_RTL (exp);
3834 if (TREE_CODE (exp) == TREE_LIST)
3835 return ((TREE_VALUE (exp) == 0
3836 || safe_from_p (x, TREE_VALUE (exp)))
3837 && (TREE_CHAIN (exp) == 0
3838 || safe_from_p (x, TREE_CHAIN (exp))));
3843 return safe_from_p (x, TREE_OPERAND (exp, 0));
3847 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3848 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3852 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3853 the expression. If it is set, we conflict iff we are that rtx or
3854 both are in memory. Otherwise, we check all operands of the
3855 expression recursively. */
3857 switch (TREE_CODE (exp))
3860 return (staticp (TREE_OPERAND (exp, 0))
3861 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3864 if (GET_CODE (x) == MEM)
3869 exp_rtl = CALL_EXPR_RTL (exp);
3872 /* Assume that the call will clobber all hard registers and
3874 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3875 || GET_CODE (x) == MEM)
3882 exp_rtl = RTL_EXPR_RTL (exp);
3884 /* We don't know what this can modify. */
3889 case WITH_CLEANUP_EXPR:
3890 exp_rtl = RTL_EXPR_RTL (exp);
3893 case CLEANUP_POINT_EXPR:
3894 return safe_from_p (x, TREE_OPERAND (exp, 0));
3897 exp_rtl = SAVE_EXPR_RTL (exp);
3901 /* The only operand we look at is operand 1. The rest aren't
3902 part of the expression. */
3903 return safe_from_p (x, TREE_OPERAND (exp, 1));
3905 case METHOD_CALL_EXPR:
3906 /* This takes a rtx argument, but shouldn't appear here. */
3910 /* If we have an rtx, we do not need to scan our operands. */
3914 nops = tree_code_length[(int) TREE_CODE (exp)];
3915 for (i = 0; i < nops; i++)
3916 if (TREE_OPERAND (exp, i) != 0
3917 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3921 /* If we have an rtl, find any enclosed object. Then see if we conflict
3925 if (GET_CODE (exp_rtl) == SUBREG)
3927 exp_rtl = SUBREG_REG (exp_rtl);
3928 if (GET_CODE (exp_rtl) == REG
3929 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3933 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3934 are memory and EXP is not readonly. */
3935 return ! (rtx_equal_p (x, exp_rtl)
3936 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3937 && ! TREE_READONLY (exp)));
3940 /* If we reach here, it is safe. */
3944 /* Subroutine of expand_expr: return nonzero iff EXP is an
3945 expression whose type is statically determinable. */
3951 if (TREE_CODE (exp) == PARM_DECL
3952 || TREE_CODE (exp) == VAR_DECL
3953 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3954 || TREE_CODE (exp) == COMPONENT_REF
3955 || TREE_CODE (exp) == ARRAY_REF)
3960 /* expand_expr: generate code for computing expression EXP.
3961 An rtx for the computed value is returned. The value is never null.
3962 In the case of a void EXP, const0_rtx is returned.
3964 The value may be stored in TARGET if TARGET is nonzero.
3965 TARGET is just a suggestion; callers must assume that
3966 the rtx returned may not be the same as TARGET.
3968 If TARGET is CONST0_RTX, it means that the value will be ignored.
3970 If TMODE is not VOIDmode, it suggests generating the
3971 result in mode TMODE. But this is done only when convenient.
3972 Otherwise, TMODE is ignored and the value generated in its natural mode.
3973 TMODE is just a suggestion; callers must assume that
3974 the rtx returned may not have mode TMODE.
3976 Note that TARGET may have neither TMODE nor MODE. In that case, it
3977 probably will not be used.
3979 If MODIFIER is EXPAND_SUM then when EXP is an addition
3980 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3981 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3982 products as above, or REG or MEM, or constant.
3983 Ordinarily in such cases we would output mul or add instructions
3984 and then return a pseudo reg containing the sum.
3986 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3987 it also marks a label as absolutely required (it can't be dead).
3988 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3989 This is used for outputting expressions used in initializers.
3991 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3992 with a constant address even if that address is not normally legitimate.
3993 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3996 expand_expr (exp, target, tmode, modifier)
3999 enum machine_mode tmode;
4000 enum expand_modifier modifier;
4002 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4003 This is static so it will be accessible to our recursive callees. */
4004 static tree placeholder_list = 0;
4005 register rtx op0, op1, temp;
4006 tree type = TREE_TYPE (exp);
4007 int unsignedp = TREE_UNSIGNED (type);
4008 register enum machine_mode mode = TYPE_MODE (type);
4009 register enum tree_code code = TREE_CODE (exp);
4011 /* Use subtarget as the target for operand 0 of a binary operation. */
4012 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4013 rtx original_target = target;
4014 /* Maybe defer this until sure not doing bytecode? */
4015 int ignore = (target == const0_rtx
4016 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4017 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4018 || code == COND_EXPR)
4019 && TREE_CODE (type) == VOID_TYPE));
4023 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4025 bc_expand_expr (exp);
4029 /* Don't use hard regs as subtargets, because the combiner
4030 can only handle pseudo regs. */
4031 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4033 /* Avoid subtargets inside loops,
4034 since they hide some invariant expressions. */
4035 if (preserve_subexpressions_p ())
4038 /* If we are going to ignore this result, we need only do something
4039 if there is a side-effect somewhere in the expression. If there
4040 is, short-circuit the most common cases here. Note that we must
4041 not call expand_expr with anything but const0_rtx in case this
4042 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4046 if (! TREE_SIDE_EFFECTS (exp))
4049 /* Ensure we reference a volatile object even if value is ignored. */
4050 if (TREE_THIS_VOLATILE (exp)
4051 && TREE_CODE (exp) != FUNCTION_DECL
4052 && mode != VOIDmode && mode != BLKmode)
4054 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4055 if (GET_CODE (temp) == MEM)
4056 temp = copy_to_reg (temp);
4060 if (TREE_CODE_CLASS (code) == '1')
4061 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4062 VOIDmode, modifier);
4063 else if (TREE_CODE_CLASS (code) == '2'
4064 || TREE_CODE_CLASS (code) == '<')
4066 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4067 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4070 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4072 /* If the second operand has no side effects, just evaluate
4074 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4075 VOIDmode, modifier);
4080 /* If will do cse, generate all results into pseudo registers
4081 since 1) that allows cse to find more things
4082 and 2) otherwise cse could produce an insn the machine
4085 if (! cse_not_expected && mode != BLKmode && target
4086 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4093 tree function = decl_function_context (exp);
4094 /* Handle using a label in a containing function. */
4095 if (function != current_function_decl && function != 0)
4097 struct function *p = find_function_data (function);
4098 /* Allocate in the memory associated with the function
4099 that the label is in. */
4100 push_obstacks (p->function_obstack,
4101 p->function_maybepermanent_obstack);
4103 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4104 label_rtx (exp), p->forced_labels);
4107 else if (modifier == EXPAND_INITIALIZER)
4108 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4109 label_rtx (exp), forced_labels);
4110 temp = gen_rtx (MEM, FUNCTION_MODE,
4111 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4112 if (function != current_function_decl && function != 0)
4113 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4118 if (DECL_RTL (exp) == 0)
4120 error_with_decl (exp, "prior parameter's size depends on `%s'");
4121 return CONST0_RTX (mode);
4124 /* ... fall through ... */
4127 /* If a static var's type was incomplete when the decl was written,
4128 but the type is complete now, lay out the decl now. */
4129 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4130 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4132 push_obstacks_nochange ();
4133 end_temporary_allocation ();
4134 layout_decl (exp, 0);
4135 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4139 /* ... fall through ... */
4143 if (DECL_RTL (exp) == 0)
4146 /* Ensure variable marked as used even if it doesn't go through
4147 a parser. If it hasn't be used yet, write out an external
4149 if (! TREE_USED (exp))
4151 assemble_external (exp);
4152 TREE_USED (exp) = 1;
4155 /* Handle variables inherited from containing functions. */
4156 context = decl_function_context (exp);
4158 /* We treat inline_function_decl as an alias for the current function
4159 because that is the inline function whose vars, types, etc.
4160 are being merged into the current function.
4161 See expand_inline_function. */
4163 if (context != 0 && context != current_function_decl
4164 && context != inline_function_decl
4165 /* If var is static, we don't need a static chain to access it. */
4166 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4167 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4171 /* Mark as non-local and addressable. */
4172 DECL_NONLOCAL (exp) = 1;
4173 mark_addressable (exp);
4174 if (GET_CODE (DECL_RTL (exp)) != MEM)
4176 addr = XEXP (DECL_RTL (exp), 0);
4177 if (GET_CODE (addr) == MEM)
4178 addr = gen_rtx (MEM, Pmode,
4179 fix_lexical_addr (XEXP (addr, 0), exp));
4181 addr = fix_lexical_addr (addr, exp);
4182 return change_address (DECL_RTL (exp), mode, addr);
4185 /* This is the case of an array whose size is to be determined
4186 from its initializer, while the initializer is still being parsed.
4189 if (GET_CODE (DECL_RTL (exp)) == MEM
4190 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4191 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4192 XEXP (DECL_RTL (exp), 0));
4194 /* If DECL_RTL is memory, we are in the normal case and either
4195 the address is not valid or it is not a register and -fforce-addr
4196 is specified, get the address into a register. */
4198 if (GET_CODE (DECL_RTL (exp)) == MEM
4199 && modifier != EXPAND_CONST_ADDRESS
4200 && modifier != EXPAND_SUM
4201 && modifier != EXPAND_INITIALIZER
4202 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4205 return change_address (DECL_RTL (exp), VOIDmode,
4206 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4208 /* If the mode of DECL_RTL does not match that of the decl, it
4209 must be a promoted value. We return a SUBREG of the wanted mode,
4210 but mark it so that we know that it was already extended. */
4212 if (GET_CODE (DECL_RTL (exp)) == REG
4213 && GET_MODE (DECL_RTL (exp)) != mode)
4215 /* Get the signedness used for this variable. Ensure we get the
4216 same mode we got when the variable was declared. */
4217 if (GET_MODE (DECL_RTL (exp))
4218 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4221 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4222 SUBREG_PROMOTED_VAR_P (temp) = 1;
4223 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4227 return DECL_RTL (exp);
4230 return immed_double_const (TREE_INT_CST_LOW (exp),
4231 TREE_INT_CST_HIGH (exp),
4235 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4238 /* If optimized, generate immediate CONST_DOUBLE
4239 which will be turned into memory by reload if necessary.
4241 We used to force a register so that loop.c could see it. But
4242 this does not allow gen_* patterns to perform optimizations with
4243 the constants. It also produces two insns in cases like "x = 1.0;".
4244 On most machines, floating-point constants are not permitted in
4245 many insns, so we'd end up copying it to a register in any case.
4247 Now, we do the copying in expand_binop, if appropriate. */
4248 return immed_real_const (exp);
4252 if (! TREE_CST_RTL (exp))
4253 output_constant_def (exp);
4255 /* TREE_CST_RTL probably contains a constant address.
4256 On RISC machines where a constant address isn't valid,
4257 make some insns to get that address into a register. */
4258 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4259 && modifier != EXPAND_CONST_ADDRESS
4260 && modifier != EXPAND_INITIALIZER
4261 && modifier != EXPAND_SUM
4262 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4264 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4265 return change_address (TREE_CST_RTL (exp), VOIDmode,
4266 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4267 return TREE_CST_RTL (exp);
4270 context = decl_function_context (exp);
4272 /* We treat inline_function_decl as an alias for the current function
4273 because that is the inline function whose vars, types, etc.
4274 are being merged into the current function.
4275 See expand_inline_function. */
4276 if (context == current_function_decl || context == inline_function_decl)
4279 /* If this is non-local, handle it. */
4282 temp = SAVE_EXPR_RTL (exp);
4283 if (temp && GET_CODE (temp) == REG)
4285 put_var_into_stack (exp);
4286 temp = SAVE_EXPR_RTL (exp);
4288 if (temp == 0 || GET_CODE (temp) != MEM)
4290 return change_address (temp, mode,
4291 fix_lexical_addr (XEXP (temp, 0), exp));
4293 if (SAVE_EXPR_RTL (exp) == 0)
4295 if (mode == BLKmode)
4298 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4299 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4302 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4304 SAVE_EXPR_RTL (exp) = temp;
4305 if (!optimize && GET_CODE (temp) == REG)
4306 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4309 /* If the mode of TEMP does not match that of the expression, it
4310 must be a promoted value. We pass store_expr a SUBREG of the
4311 wanted mode but mark it so that we know that it was already
4312 extended. Note that `unsignedp' was modified above in
4315 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4317 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4318 SUBREG_PROMOTED_VAR_P (temp) = 1;
4319 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4322 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4325 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4326 must be a promoted value. We return a SUBREG of the wanted mode,
4327 but mark it so that we know that it was already extended. */
4329 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4330 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4332 /* Compute the signedness and make the proper SUBREG. */
4333 promote_mode (type, mode, &unsignedp, 0);
4334 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4335 SUBREG_PROMOTED_VAR_P (temp) = 1;
4336 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4340 return SAVE_EXPR_RTL (exp);
4342 case PLACEHOLDER_EXPR:
4343 /* If there is an object on the head of the placeholder list,
4344 see if some object in it's references is of type TYPE. For
4345 further information, see tree.def. */
4346 if (placeholder_list)
4349 tree old_list = placeholder_list;
4351 for (object = TREE_PURPOSE (placeholder_list);
4352 TREE_TYPE (object) != type
4353 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4354 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4355 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4356 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4357 object = TREE_OPERAND (object, 0))
4360 if (object && TREE_TYPE (object) == type)
4362 /* Expand this object skipping the list entries before
4363 it was found in case it is also a PLACEHOLDER_EXPR.
4364 In that case, we want to translate it using subsequent
4366 placeholder_list = TREE_CHAIN (placeholder_list);
4367 temp = expand_expr (object, original_target, tmode, modifier);
4368 placeholder_list = old_list;
4373 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4376 case WITH_RECORD_EXPR:
4377 /* Put the object on the placeholder list, expand our first operand,
4378 and pop the list. */
4379 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4381 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4383 placeholder_list = TREE_CHAIN (placeholder_list);
4387 expand_exit_loop_if_false (NULL_PTR,
4388 invert_truthvalue (TREE_OPERAND (exp, 0)));
4393 expand_start_loop (1);
4394 expand_expr_stmt (TREE_OPERAND (exp, 0));
4402 tree vars = TREE_OPERAND (exp, 0);
4403 int vars_need_expansion = 0;
4405 /* Need to open a binding contour here because
4406 if there are any cleanups they most be contained here. */
4407 expand_start_bindings (0);
4409 /* Mark the corresponding BLOCK for output in its proper place. */
4410 if (TREE_OPERAND (exp, 2) != 0
4411 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4412 insert_block (TREE_OPERAND (exp, 2));
4414 /* If VARS have not yet been expanded, expand them now. */
4417 if (DECL_RTL (vars) == 0)
4419 vars_need_expansion = 1;
4422 expand_decl_init (vars);
4423 vars = TREE_CHAIN (vars);
4426 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4428 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4434 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4436 emit_insns (RTL_EXPR_SEQUENCE (exp));
4437 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4438 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4439 free_temps_for_rtl_expr (exp);
4440 return RTL_EXPR_RTL (exp);
4443 /* If we don't need the result, just ensure we evaluate any
4448 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4449 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4453 /* All elts simple constants => refer to a constant in memory. But
4454 if this is a non-BLKmode mode, let it store a field at a time
4455 since that should make a CONST_INT or CONST_DOUBLE when we
4456 fold. Likewise, if we have a target we can use, it is best to
4457 store directly into the target unless the type is large enough
4458 that memcpy will be used. If we are making an initializer and
4459 all operands are constant, put it in memory as well. */
4460 else if ((TREE_STATIC (exp)
4461 && ((mode == BLKmode
4462 && ! (target != 0 && safe_from_p (target, exp)))
4463 || TREE_ADDRESSABLE (exp)
4464 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4465 && (move_by_pieces_ninsns
4466 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4469 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4471 rtx constructor = output_constant_def (exp);
4472 if (modifier != EXPAND_CONST_ADDRESS
4473 && modifier != EXPAND_INITIALIZER
4474 && modifier != EXPAND_SUM
4475 && (! memory_address_p (GET_MODE (constructor),
4476 XEXP (constructor, 0))
4478 && GET_CODE (XEXP (constructor, 0)) != REG)))
4479 constructor = change_address (constructor, VOIDmode,
4480 XEXP (constructor, 0));
4486 if (target == 0 || ! safe_from_p (target, exp))
4488 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4489 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4493 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4494 if (AGGREGATE_TYPE_P (type))
4495 MEM_IN_STRUCT_P (target) = 1;
4498 store_constructor (exp, target);
4504 tree exp1 = TREE_OPERAND (exp, 0);
4507 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4508 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4509 This code has the same general effect as simply doing
4510 expand_expr on the save expr, except that the expression PTR
4511 is computed for use as a memory address. This means different
4512 code, suitable for indexing, may be generated. */
4513 if (TREE_CODE (exp1) == SAVE_EXPR
4514 && SAVE_EXPR_RTL (exp1) == 0
4515 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4517 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4518 VOIDmode, EXPAND_SUM);
4519 op0 = memory_address (mode, temp);
4520 op0 = copy_all_regs (op0);
4521 SAVE_EXPR_RTL (exp1) = op0;
4525 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4526 op0 = memory_address (mode, op0);
4529 temp = gen_rtx (MEM, mode, op0);
4530 /* If address was computed by addition,
4531 mark this as an element of an aggregate. */
4532 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4533 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4534 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4535 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4536 || (TREE_CODE (exp1) == ADDR_EXPR
4537 && (exp2 = TREE_OPERAND (exp1, 0))
4538 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4539 MEM_IN_STRUCT_P (temp) = 1;
4540 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4541 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4542 a location is accessed through a pointer to const does not mean
4543 that the value there can never change. */
4544 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4550 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4554 tree array = TREE_OPERAND (exp, 0);
4555 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4556 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4557 tree index = TREE_OPERAND (exp, 1);
4558 tree index_type = TREE_TYPE (index);
4561 if (TREE_CODE (low_bound) != INTEGER_CST
4562 && contains_placeholder_p (low_bound))
4563 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4565 /* Optimize the special-case of a zero lower bound.
4567 We convert the low_bound to sizetype to avoid some problems
4568 with constant folding. (E.g. suppose the lower bound is 1,
4569 and its mode is QI. Without the conversion, (ARRAY
4570 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4571 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4573 But sizetype isn't quite right either (especially if
4574 the lowbound is negative). FIXME */
4576 if (! integer_zerop (low_bound))
4577 index = fold (build (MINUS_EXPR, index_type, index,
4578 convert (sizetype, low_bound)));
4580 if ((TREE_CODE (index) != INTEGER_CST
4581 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4582 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4584 /* Nonconstant array index or nonconstant element size, and
4585 not an array in an unaligned (packed) structure field.
4586 Generate the tree for *(&array+index) and expand that,
4587 except do it in a language-independent way
4588 and don't complain about non-lvalue arrays.
4589 `mark_addressable' should already have been called
4590 for any array for which this case will be reached. */
4592 /* Don't forget the const or volatile flag from the array
4594 tree variant_type = build_type_variant (type,
4595 TREE_READONLY (exp),
4596 TREE_THIS_VOLATILE (exp));
4597 tree array_adr = build1 (ADDR_EXPR,
4598 build_pointer_type (variant_type), array);
4600 tree size = size_in_bytes (type);
4602 /* Convert the integer argument to a type the same size as a
4603 pointer so the multiply won't overflow spuriously. */
4604 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4605 index = convert (type_for_size (POINTER_SIZE, 0), index);
4607 if (TREE_CODE (size) != INTEGER_CST
4608 && contains_placeholder_p (size))
4609 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4611 /* Don't think the address has side effects
4612 just because the array does.
4613 (In some cases the address might have side effects,
4614 and we fail to record that fact here. However, it should not
4615 matter, since expand_expr should not care.) */
4616 TREE_SIDE_EFFECTS (array_adr) = 0;
4618 elt = build1 (INDIRECT_REF, type,
4619 fold (build (PLUS_EXPR,
4620 TYPE_POINTER_TO (variant_type),
4622 fold (build (MULT_EXPR,
4623 TYPE_POINTER_TO (variant_type),
4626 /* Volatility, etc., of new expression is same as old
4628 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4629 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4630 TREE_READONLY (elt) = TREE_READONLY (exp);
4632 return expand_expr (elt, target, tmode, modifier);
4635 /* Fold an expression like: "foo"[2].
4636 This is not done in fold so it won't happen inside &.
4637 Don't fold if this is for wide characters since it's too
4638 difficult to do correctly and this is a very rare case. */
4640 if (TREE_CODE (array) == STRING_CST
4641 && TREE_CODE (index) == INTEGER_CST
4642 && !TREE_INT_CST_HIGH (index)
4643 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4644 && GET_MODE_CLASS (mode) == MODE_INT
4645 && GET_MODE_SIZE (mode) == 1)
4646 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4648 /* If this is a constant index into a constant array,
4649 just get the value from the array. Handle both the cases when
4650 we have an explicit constructor and when our operand is a variable
4651 that was declared const. */
4653 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4655 if (TREE_CODE (index) == INTEGER_CST
4656 && TREE_INT_CST_HIGH (index) == 0)
4658 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4660 i = TREE_INT_CST_LOW (index);
4662 elem = TREE_CHAIN (elem);
4664 return expand_expr (fold (TREE_VALUE (elem)), target,
4669 else if (optimize >= 1
4670 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4671 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4672 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4674 if (TREE_CODE (index) == INTEGER_CST
4675 && TREE_INT_CST_HIGH (index) == 0)
4677 tree init = DECL_INITIAL (array);
4679 i = TREE_INT_CST_LOW (index);
4680 if (TREE_CODE (init) == CONSTRUCTOR)
4682 tree elem = CONSTRUCTOR_ELTS (init);
4685 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4686 elem = TREE_CHAIN (elem);
4688 return expand_expr (fold (TREE_VALUE (elem)), target,
4691 else if (TREE_CODE (init) == STRING_CST
4692 && i < TREE_STRING_LENGTH (init))
4693 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4698 /* Treat array-ref with constant index as a component-ref. */
4702 /* If the operand is a CONSTRUCTOR, we can just extract the
4703 appropriate field if it is present. Don't do this if we have
4704 already written the data since we want to refer to that copy
4705 and varasm.c assumes that's what we'll do. */
4706 if (code != ARRAY_REF
4707 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4708 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4712 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4713 elt = TREE_CHAIN (elt))
4714 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4715 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4719 enum machine_mode mode1;
4724 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4725 &mode1, &unsignedp, &volatilep);
4728 /* If we got back the original object, something is wrong. Perhaps
4729 we are evaluating an expression too early. In any event, don't
4730 infinitely recurse. */
4734 /* In some cases, we will be offsetting OP0's address by a constant.
4735 So get it as a sum, if possible. If we will be using it
4736 directly in an insn, we validate it. */
4737 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4739 /* If this is a constant, put it into a register if it is a
4740 legitimate constant and memory if it isn't. */
4741 if (CONSTANT_P (op0))
4743 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4744 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4745 op0 = force_reg (mode, op0);
4747 op0 = validize_mem (force_const_mem (mode, op0));
4750 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4753 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4755 if (GET_CODE (op0) != MEM)
4757 op0 = change_address (op0, VOIDmode,
4758 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4759 force_reg (ptr_mode, offset_rtx)));
4760 /* If we have a variable offset, the known alignment
4761 is only that of the innermost structure containing the field.
4762 (Actually, we could sometimes do better by using the
4763 size of an element of the innermost array, but no need.) */
4764 if (TREE_CODE (exp) == COMPONENT_REF
4765 || TREE_CODE (exp) == BIT_FIELD_REF)
4766 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4770 /* Don't forget about volatility even if this is a bitfield. */
4771 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4773 op0 = copy_rtx (op0);
4774 MEM_VOLATILE_P (op0) = 1;
4777 /* In cases where an aligned union has an unaligned object
4778 as a field, we might be extracting a BLKmode value from
4779 an integer-mode (e.g., SImode) object. Handle this case
4780 by doing the extract into an object as wide as the field
4781 (which we know to be the width of a basic mode), then
4782 storing into memory, and changing the mode to BLKmode. */
4783 if (mode1 == VOIDmode
4784 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4785 || (modifier != EXPAND_CONST_ADDRESS
4786 && modifier != EXPAND_SUM
4787 && modifier != EXPAND_INITIALIZER
4788 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4789 /* If the field isn't aligned enough to fetch as a memref,
4790 fetch it as a bit field. */
4791 || (SLOW_UNALIGNED_ACCESS
4792 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4793 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4795 enum machine_mode ext_mode = mode;
4797 if (ext_mode == BLKmode)
4798 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4800 if (ext_mode == BLKmode)
4803 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4804 unsignedp, target, ext_mode, ext_mode,
4806 int_size_in_bytes (TREE_TYPE (tem)));
4807 if (mode == BLKmode)
4809 rtx new = assign_stack_temp (ext_mode,
4810 bitsize / BITS_PER_UNIT, 0);
4812 emit_move_insn (new, op0);
4813 op0 = copy_rtx (new);
4814 PUT_MODE (op0, BLKmode);
4815 MEM_IN_STRUCT_P (op0) = 1;
4821 /* Get a reference to just this component. */
4822 if (modifier == EXPAND_CONST_ADDRESS
4823 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4824 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4825 (bitpos / BITS_PER_UNIT)));
4827 op0 = change_address (op0, mode1,
4828 plus_constant (XEXP (op0, 0),
4829 (bitpos / BITS_PER_UNIT)));
4830 MEM_IN_STRUCT_P (op0) = 1;
4831 MEM_VOLATILE_P (op0) |= volatilep;
4832 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4835 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4836 convert_move (target, op0, unsignedp);
4842 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4843 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4844 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4845 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4846 MEM_IN_STRUCT_P (temp) = 1;
4847 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4848 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4849 a location is accessed through a pointer to const does not mean
4850 that the value there can never change. */
4851 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4856 /* Intended for a reference to a buffer of a file-object in Pascal.
4857 But it's not certain that a special tree code will really be
4858 necessary for these. INDIRECT_REF might work for them. */
4864 /* Pascal set IN expression.
4867 rlo = set_low - (set_low%bits_per_word);
4868 the_word = set [ (index - rlo)/bits_per_word ];
4869 bit_index = index % bits_per_word;
4870 bitmask = 1 << bit_index;
4871 return !!(the_word & bitmask); */
4873 tree set = TREE_OPERAND (exp, 0);
4874 tree index = TREE_OPERAND (exp, 1);
4875 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4876 tree set_type = TREE_TYPE (set);
4877 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4878 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4879 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4880 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4881 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4882 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4883 rtx setaddr = XEXP (setval, 0);
4884 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4886 rtx diff, quo, rem, addr, bit, result;
4888 preexpand_calls (exp);
4890 /* If domain is empty, answer is no. Likewise if index is constant
4891 and out of bounds. */
4892 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4893 && TREE_CODE (set_low_bound) == INTEGER_CST
4894 && tree_int_cst_lt (set_high_bound, set_low_bound)
4895 || (TREE_CODE (index) == INTEGER_CST
4896 && TREE_CODE (set_low_bound) == INTEGER_CST
4897 && tree_int_cst_lt (index, set_low_bound))
4898 || (TREE_CODE (set_high_bound) == INTEGER_CST
4899 && TREE_CODE (index) == INTEGER_CST
4900 && tree_int_cst_lt (set_high_bound, index))))
4904 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4906 /* If we get here, we have to generate the code for both cases
4907 (in range and out of range). */
4909 op0 = gen_label_rtx ();
4910 op1 = gen_label_rtx ();
4912 if (! (GET_CODE (index_val) == CONST_INT
4913 && GET_CODE (lo_r) == CONST_INT))
4915 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4916 GET_MODE (index_val), iunsignedp, 0);
4917 emit_jump_insn (gen_blt (op1));
4920 if (! (GET_CODE (index_val) == CONST_INT
4921 && GET_CODE (hi_r) == CONST_INT))
4923 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4924 GET_MODE (index_val), iunsignedp, 0);
4925 emit_jump_insn (gen_bgt (op1));
4928 /* Calculate the element number of bit zero in the first word
4930 if (GET_CODE (lo_r) == CONST_INT)
4931 rlow = GEN_INT (INTVAL (lo_r)
4932 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4934 rlow = expand_binop (index_mode, and_optab, lo_r,
4935 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4936 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4938 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4939 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4941 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4942 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4943 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4944 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4946 addr = memory_address (byte_mode,
4947 expand_binop (index_mode, add_optab, diff,
4948 setaddr, NULL_RTX, iunsignedp,
4951 /* Extract the bit we want to examine */
4952 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4953 gen_rtx (MEM, byte_mode, addr),
4954 make_tree (TREE_TYPE (index), rem),
4956 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4957 GET_MODE (target) == byte_mode ? target : 0,
4958 1, OPTAB_LIB_WIDEN);
4960 if (result != target)
4961 convert_move (target, result, 1);
4963 /* Output the code to handle the out-of-range case. */
4966 emit_move_insn (target, const0_rtx);
4971 case WITH_CLEANUP_EXPR:
4972 if (RTL_EXPR_RTL (exp) == 0)
4975 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4977 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4978 /* That's it for this cleanup. */
4979 TREE_OPERAND (exp, 2) = 0;
4980 (*interim_eh_hook) (NULL_TREE);
4982 return RTL_EXPR_RTL (exp);
4984 case CLEANUP_POINT_EXPR:
4986 extern int temp_slot_level;
4987 tree old_cleanups = cleanups_this_call;
4988 int old_temp_level = target_temp_slot_level;
4990 target_temp_slot_level = temp_slot_level;
4991 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4992 /* If we're going to use this value, load it up now. */
4994 op0 = force_not_mem (op0);
4995 expand_cleanups_to (old_cleanups);
4996 preserve_temp_slots (op0);
4999 target_temp_slot_level = old_temp_level;
5004 /* Check for a built-in function. */
5005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5006 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5008 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5009 return expand_builtin (exp, target, subtarget, tmode, ignore);
5011 /* If this call was expanded already by preexpand_calls,
5012 just return the result we got. */
5013 if (CALL_EXPR_RTL (exp) != 0)
5014 return CALL_EXPR_RTL (exp);
5016 return expand_call (exp, target, ignore);
5018 case NON_LVALUE_EXPR:
5021 case REFERENCE_EXPR:
5022 if (TREE_CODE (type) == UNION_TYPE)
5024 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5027 if (mode == BLKmode)
5029 if (TYPE_SIZE (type) == 0
5030 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5032 target = assign_stack_temp (BLKmode,
5033 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5034 + BITS_PER_UNIT - 1)
5035 / BITS_PER_UNIT, 0);
5036 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5039 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5042 if (GET_CODE (target) == MEM)
5043 /* Store data into beginning of memory target. */
5044 store_expr (TREE_OPERAND (exp, 0),
5045 change_address (target, TYPE_MODE (valtype), 0), 0);
5047 else if (GET_CODE (target) == REG)
5048 /* Store this field into a union of the proper type. */
5049 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5050 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5052 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5056 /* Return the entire union. */
5060 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5062 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5065 /* If the signedness of the conversion differs and OP0 is
5066 a promoted SUBREG, clear that indication since we now
5067 have to do the proper extension. */
5068 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5069 && GET_CODE (op0) == SUBREG)
5070 SUBREG_PROMOTED_VAR_P (op0) = 0;
5075 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5076 if (GET_MODE (op0) == mode)
5079 /* If OP0 is a constant, just convert it into the proper mode. */
5080 if (CONSTANT_P (op0))
5082 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5083 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5085 if (modifier == EXPAND_INITIALIZER)
5086 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5088 if (flag_force_mem && GET_CODE (op0) == MEM)
5089 op0 = copy_to_reg (op0);
5093 convert_to_mode (mode, op0,
5094 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5096 convert_move (target, op0,
5097 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5101 /* We come here from MINUS_EXPR when the second operand is a constant. */
5103 this_optab = add_optab;
5105 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5106 something else, make sure we add the register to the constant and
5107 then to the other thing. This case can occur during strength
5108 reduction and doing it this way will produce better code if the
5109 frame pointer or argument pointer is eliminated.
5111 fold-const.c will ensure that the constant is always in the inner
5112 PLUS_EXPR, so the only case we need to do anything about is if
5113 sp, ap, or fp is our second argument, in which case we must swap
5114 the innermost first argument and our second argument. */
5116 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5117 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5118 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5119 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5120 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5121 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5123 tree t = TREE_OPERAND (exp, 1);
5125 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5126 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5129 /* If the result is to be ptr_mode and we are adding an integer to
5130 something, we might be forming a constant. So try to use
5131 plus_constant. If it produces a sum and we can't accept it,
5132 use force_operand. This allows P = &ARR[const] to generate
5133 efficient code on machines where a SYMBOL_REF is not a valid
5136 If this is an EXPAND_SUM call, always return the sum. */
5137 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5138 || mode == ptr_mode)
5140 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5141 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5142 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5144 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5146 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5147 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5148 op1 = force_operand (op1, target);
5152 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5153 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5154 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5156 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5158 if (! CONSTANT_P (op0))
5160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5161 VOIDmode, modifier);
5162 /* Don't go to both_summands if modifier
5163 says it's not right to return a PLUS. */
5164 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5168 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5169 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5170 op0 = force_operand (op0, target);
5175 /* No sense saving up arithmetic to be done
5176 if it's all in the wrong mode to form part of an address.
5177 And force_operand won't know whether to sign-extend or
5179 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5180 || mode != ptr_mode)
5183 preexpand_calls (exp);
5184 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5187 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5188 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5191 /* Make sure any term that's a sum with a constant comes last. */
5192 if (GET_CODE (op0) == PLUS
5193 && CONSTANT_P (XEXP (op0, 1)))
5199 /* If adding to a sum including a constant,
5200 associate it to put the constant outside. */
5201 if (GET_CODE (op1) == PLUS
5202 && CONSTANT_P (XEXP (op1, 1)))
5204 rtx constant_term = const0_rtx;
5206 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5209 /* Ensure that MULT comes first if there is one. */
5210 else if (GET_CODE (op0) == MULT)
5211 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5213 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5215 /* Let's also eliminate constants from op0 if possible. */
5216 op0 = eliminate_constant_term (op0, &constant_term);
5218 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5219 their sum should be a constant. Form it into OP1, since the
5220 result we want will then be OP0 + OP1. */
5222 temp = simplify_binary_operation (PLUS, mode, constant_term,
5227 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5230 /* Put a constant term last and put a multiplication first. */
5231 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5232 temp = op1, op1 = op0, op0 = temp;
5234 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5235 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5238 /* For initializers, we are allowed to return a MINUS of two
5239 symbolic constants. Here we handle all cases when both operands
5241 /* Handle difference of two symbolic constants,
5242 for the sake of an initializer. */
5243 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5244 && really_constant_p (TREE_OPERAND (exp, 0))
5245 && really_constant_p (TREE_OPERAND (exp, 1)))
5247 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5248 VOIDmode, modifier);
5249 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5250 VOIDmode, modifier);
5252 /* If the last operand is a CONST_INT, use plus_constant of
5253 the negated constant. Else make the MINUS. */
5254 if (GET_CODE (op1) == CONST_INT)
5255 return plus_constant (op0, - INTVAL (op1));
5257 return gen_rtx (MINUS, mode, op0, op1);
5259 /* Convert A - const to A + (-const). */
5260 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5262 tree negated = fold (build1 (NEGATE_EXPR, type,
5263 TREE_OPERAND (exp, 1)));
5265 /* Deal with the case where we can't negate the constant
5267 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5269 tree newtype = signed_type (type);
5270 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5271 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5272 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5274 if (! TREE_OVERFLOW (newneg))
5275 return expand_expr (convert (type,
5276 build (PLUS_EXPR, newtype,
5278 target, tmode, modifier);
5282 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5286 this_optab = sub_optab;
5290 preexpand_calls (exp);
5291 /* If first operand is constant, swap them.
5292 Thus the following special case checks need only
5293 check the second operand. */
5294 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5296 register tree t1 = TREE_OPERAND (exp, 0);
5297 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5298 TREE_OPERAND (exp, 1) = t1;
5301 /* Attempt to return something suitable for generating an
5302 indexed address, for machines that support that. */
5304 if (modifier == EXPAND_SUM && mode == ptr_mode
5305 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5308 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5310 /* Apply distributive law if OP0 is x+c. */
5311 if (GET_CODE (op0) == PLUS
5312 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5313 return gen_rtx (PLUS, mode,
5314 gen_rtx (MULT, mode, XEXP (op0, 0),
5315 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5316 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5317 * INTVAL (XEXP (op0, 1))));
5319 if (GET_CODE (op0) != REG)
5320 op0 = force_operand (op0, NULL_RTX);
5321 if (GET_CODE (op0) != REG)
5322 op0 = copy_to_mode_reg (mode, op0);
5324 return gen_rtx (MULT, mode, op0,
5325 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5328 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5331 /* Check for multiplying things that have been extended
5332 from a narrower type. If this machine supports multiplying
5333 in that narrower type with a result in the desired type,
5334 do it that way, and avoid the explicit type-conversion. */
5335 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5336 && TREE_CODE (type) == INTEGER_TYPE
5337 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5338 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5339 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5340 && int_fits_type_p (TREE_OPERAND (exp, 1),
5341 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5342 /* Don't use a widening multiply if a shift will do. */
5343 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5344 > HOST_BITS_PER_WIDE_INT)
5345 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5347 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5348 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5350 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5351 /* If both operands are extended, they must either both
5352 be zero-extended or both be sign-extended. */
5353 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5355 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5357 enum machine_mode innermode
5358 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5359 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5360 ? umul_widen_optab : smul_widen_optab);
5361 if (mode == GET_MODE_WIDER_MODE (innermode)
5362 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5364 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5365 NULL_RTX, VOIDmode, 0);
5366 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5370 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5371 NULL_RTX, VOIDmode, 0);
5375 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5376 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5377 return expand_mult (mode, op0, op1, target, unsignedp);
5379 case TRUNC_DIV_EXPR:
5380 case FLOOR_DIV_EXPR:
5382 case ROUND_DIV_EXPR:
5383 case EXACT_DIV_EXPR:
5384 preexpand_calls (exp);
5385 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5387 /* Possible optimization: compute the dividend with EXPAND_SUM
5388 then if the divisor is constant can optimize the case
5389 where some terms of the dividend have coeffs divisible by it. */
5390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5392 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5395 this_optab = flodiv_optab;
5398 case TRUNC_MOD_EXPR:
5399 case FLOOR_MOD_EXPR:
5401 case ROUND_MOD_EXPR:
5402 preexpand_calls (exp);
5403 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5405 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5406 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5407 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5409 case FIX_ROUND_EXPR:
5410 case FIX_FLOOR_EXPR:
5412 abort (); /* Not used for C. */
5414 case FIX_TRUNC_EXPR:
5415 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5417 target = gen_reg_rtx (mode);
5418 expand_fix (target, op0, unsignedp);
5422 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5424 target = gen_reg_rtx (mode);
5425 /* expand_float can't figure out what to do if FROM has VOIDmode.
5426 So give it the correct mode. With -O, cse will optimize this. */
5427 if (GET_MODE (op0) == VOIDmode)
5428 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5430 expand_float (target, op0,
5431 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5436 temp = expand_unop (mode, neg_optab, op0, target, 0);
5442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5444 /* Handle complex values specially. */
5445 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5446 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5447 return expand_complex_abs (mode, op0, target, unsignedp);
5449 /* Unsigned abs is simply the operand. Testing here means we don't
5450 risk generating incorrect code below. */
5451 if (TREE_UNSIGNED (type))
5454 return expand_abs (mode, op0, target, unsignedp,
5455 safe_from_p (target, TREE_OPERAND (exp, 0)));
5459 target = original_target;
5460 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5461 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5462 || GET_MODE (target) != mode
5463 || (GET_CODE (target) == REG
5464 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5465 target = gen_reg_rtx (mode);
5466 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5467 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5469 /* First try to do it with a special MIN or MAX instruction.
5470 If that does not win, use a conditional jump to select the proper
5472 this_optab = (TREE_UNSIGNED (type)
5473 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5474 : (code == MIN_EXPR ? smin_optab : smax_optab));
5476 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5481 /* At this point, a MEM target is no longer useful; we will get better
5484 if (GET_CODE (target) == MEM)
5485 target = gen_reg_rtx (mode);
5488 emit_move_insn (target, op0);
5490 op0 = gen_label_rtx ();
5492 /* If this mode is an integer too wide to compare properly,
5493 compare word by word. Rely on cse to optimize constant cases. */
5494 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5496 if (code == MAX_EXPR)
5497 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5498 target, op1, NULL_RTX, op0);
5500 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5501 op1, target, NULL_RTX, op0);
5502 emit_move_insn (target, op1);
5506 if (code == MAX_EXPR)
5507 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5508 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5509 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5511 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5512 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5513 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5514 if (temp == const0_rtx)
5515 emit_move_insn (target, op1);
5516 else if (temp != const_true_rtx)
5518 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5519 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5522 emit_move_insn (target, op1);
5529 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5530 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5536 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5537 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5542 /* ??? Can optimize bitwise operations with one arg constant.
5543 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5544 and (a bitwise1 b) bitwise2 b (etc)
5545 but that is probably not worth while. */
5547 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5548 boolean values when we want in all cases to compute both of them. In
5549 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5550 as actual zero-or-1 values and then bitwise anding. In cases where
5551 there cannot be any side effects, better code would be made by
5552 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5553 how to recognize those cases. */
5555 case TRUTH_AND_EXPR:
5557 this_optab = and_optab;
5562 this_optab = ior_optab;
5565 case TRUTH_XOR_EXPR:
5567 this_optab = xor_optab;
5574 preexpand_calls (exp);
5575 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5577 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5578 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5581 /* Could determine the answer when only additive constants differ. Also,
5582 the addition of one can be handled by changing the condition. */
5589 preexpand_calls (exp);
5590 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5594 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5595 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5597 && GET_CODE (original_target) == REG
5598 && (GET_MODE (original_target)
5599 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5601 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5604 if (temp != original_target)
5605 temp = copy_to_reg (temp);
5607 op1 = gen_label_rtx ();
5608 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5609 GET_MODE (temp), unsignedp, 0);
5610 emit_jump_insn (gen_beq (op1));
5611 emit_move_insn (temp, const1_rtx);
5616 /* If no set-flag instruction, must generate a conditional
5617 store into a temporary variable. Drop through
5618 and handle this like && and ||. */
5620 case TRUTH_ANDIF_EXPR:
5621 case TRUTH_ORIF_EXPR:
5623 && (target == 0 || ! safe_from_p (target, exp)
5624 /* Make sure we don't have a hard reg (such as function's return
5625 value) live across basic blocks, if not optimizing. */
5626 || (!optimize && GET_CODE (target) == REG
5627 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5628 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5631 emit_clr_insn (target);
5633 op1 = gen_label_rtx ();
5634 jumpifnot (exp, op1);
5637 emit_0_to_1_insn (target);
5640 return ignore ? const0_rtx : target;
5642 case TRUTH_NOT_EXPR:
5643 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5644 /* The parser is careful to generate TRUTH_NOT_EXPR
5645 only with operands that are always zero or one. */
5646 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5647 target, 1, OPTAB_LIB_WIDEN);
5653 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5655 return expand_expr (TREE_OPERAND (exp, 1),
5656 (ignore ? const0_rtx : target),
5661 rtx flag = NULL_RTX;
5662 tree left_cleanups = NULL_TREE;
5663 tree right_cleanups = NULL_TREE;
5665 /* Used to save a pointer to the place to put the setting of
5666 the flag that indicates if this side of the conditional was
5667 taken. We backpatch the code, if we find out later that we
5668 have any conditional cleanups that need to be performed. */
5669 rtx dest_right_flag = NULL_RTX;
5670 rtx dest_left_flag = NULL_RTX;
5672 /* Note that COND_EXPRs whose type is a structure or union
5673 are required to be constructed to contain assignments of
5674 a temporary variable, so that we can evaluate them here
5675 for side effect only. If type is void, we must do likewise. */
5677 /* If an arm of the branch requires a cleanup,
5678 only that cleanup is performed. */
5681 tree binary_op = 0, unary_op = 0;
5682 tree old_cleanups = cleanups_this_call;
5684 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5685 convert it to our mode, if necessary. */
5686 if (integer_onep (TREE_OPERAND (exp, 1))
5687 && integer_zerop (TREE_OPERAND (exp, 2))
5688 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5692 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5697 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5698 if (GET_MODE (op0) == mode)
5702 target = gen_reg_rtx (mode);
5703 convert_move (target, op0, unsignedp);
5707 /* If we are not to produce a result, we have no target. Otherwise,
5708 if a target was specified use it; it will not be used as an
5709 intermediate target unless it is safe. If no target, use a
5714 else if (original_target
5715 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5716 && GET_MODE (original_target) == mode
5717 && ! (GET_CODE (original_target) == MEM
5718 && MEM_VOLATILE_P (original_target)))
5719 temp = original_target;
5720 else if (mode == BLKmode)
5722 if (TYPE_SIZE (type) == 0
5723 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5726 temp = assign_stack_temp (BLKmode,
5727 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5728 + BITS_PER_UNIT - 1)
5729 / BITS_PER_UNIT, 0);
5730 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5733 temp = gen_reg_rtx (mode);
5735 /* Check for X ? A + B : A. If we have this, we can copy
5736 A to the output and conditionally add B. Similarly for unary
5737 operations. Don't do this if X has side-effects because
5738 those side effects might affect A or B and the "?" operation is
5739 a sequence point in ANSI. (We test for side effects later.) */
5741 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5742 && operand_equal_p (TREE_OPERAND (exp, 2),
5743 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5744 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5745 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5746 && operand_equal_p (TREE_OPERAND (exp, 1),
5747 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5748 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5749 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5750 && operand_equal_p (TREE_OPERAND (exp, 2),
5751 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5752 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5753 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5754 && operand_equal_p (TREE_OPERAND (exp, 1),
5755 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5756 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5758 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5759 operation, do this as A + (X != 0). Similarly for other simple
5760 binary operators. */
5761 if (temp && singleton && binary_op
5762 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5763 && (TREE_CODE (binary_op) == PLUS_EXPR
5764 || TREE_CODE (binary_op) == MINUS_EXPR
5765 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5766 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5767 && integer_onep (TREE_OPERAND (binary_op, 1))
5768 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5771 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5772 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5773 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5776 /* If we had X ? A : A + 1, do this as A + (X == 0).
5778 We have to invert the truth value here and then put it
5779 back later if do_store_flag fails. We cannot simply copy
5780 TREE_OPERAND (exp, 0) to another variable and modify that
5781 because invert_truthvalue can modify the tree pointed to
5783 if (singleton == TREE_OPERAND (exp, 1))
5784 TREE_OPERAND (exp, 0)
5785 = invert_truthvalue (TREE_OPERAND (exp, 0));
5787 result = do_store_flag (TREE_OPERAND (exp, 0),
5788 (safe_from_p (temp, singleton)
5790 mode, BRANCH_COST <= 1);
5794 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5795 return expand_binop (mode, boptab, op1, result, temp,
5796 unsignedp, OPTAB_LIB_WIDEN);
5798 else if (singleton == TREE_OPERAND (exp, 1))
5799 TREE_OPERAND (exp, 0)
5800 = invert_truthvalue (TREE_OPERAND (exp, 0));
5804 op0 = gen_label_rtx ();
5806 flag = gen_reg_rtx (word_mode);
5807 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5811 /* If the target conflicts with the other operand of the
5812 binary op, we can't use it. Also, we can't use the target
5813 if it is a hard register, because evaluating the condition
5814 might clobber it. */
5816 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5817 || (GET_CODE (temp) == REG
5818 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5819 temp = gen_reg_rtx (mode);
5820 store_expr (singleton, temp, 0);
5823 expand_expr (singleton,
5824 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5825 dest_left_flag = get_last_insn ();
5826 if (singleton == TREE_OPERAND (exp, 1))
5827 jumpif (TREE_OPERAND (exp, 0), op0);
5829 jumpifnot (TREE_OPERAND (exp, 0), op0);
5831 /* Allows cleanups up to here. */
5832 old_cleanups = cleanups_this_call;
5833 if (binary_op && temp == 0)
5834 /* Just touch the other operand. */
5835 expand_expr (TREE_OPERAND (binary_op, 1),
5836 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5838 store_expr (build (TREE_CODE (binary_op), type,
5839 make_tree (type, temp),
5840 TREE_OPERAND (binary_op, 1)),
5843 store_expr (build1 (TREE_CODE (unary_op), type,
5844 make_tree (type, temp)),
5847 dest_right_flag = get_last_insn ();
5850 /* This is now done in jump.c and is better done there because it
5851 produces shorter register lifetimes. */
5853 /* Check for both possibilities either constants or variables
5854 in registers (but not the same as the target!). If so, can
5855 save branches by assigning one, branching, and assigning the
5857 else if (temp && GET_MODE (temp) != BLKmode
5858 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5859 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5860 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5861 && DECL_RTL (TREE_OPERAND (exp, 1))
5862 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5863 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5864 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5865 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5866 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5867 && DECL_RTL (TREE_OPERAND (exp, 2))
5868 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5869 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5871 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5872 temp = gen_reg_rtx (mode);
5873 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5874 dest_left_flag = get_last_insn ();
5875 jumpifnot (TREE_OPERAND (exp, 0), op0);
5877 /* Allows cleanups up to here. */
5878 old_cleanups = cleanups_this_call;
5879 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5881 dest_right_flag = get_last_insn ();
5884 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5885 comparison operator. If we have one of these cases, set the
5886 output to A, branch on A (cse will merge these two references),
5887 then set the output to FOO. */
5889 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5890 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5891 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5892 TREE_OPERAND (exp, 1), 0)
5893 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5894 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5896 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5897 temp = gen_reg_rtx (mode);
5898 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5899 dest_left_flag = get_last_insn ();
5900 jumpif (TREE_OPERAND (exp, 0), op0);
5902 /* Allows cleanups up to here. */
5903 old_cleanups = cleanups_this_call;
5904 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5906 dest_right_flag = get_last_insn ();
5909 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5910 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5911 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5912 TREE_OPERAND (exp, 2), 0)
5913 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5914 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5916 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5917 temp = gen_reg_rtx (mode);
5918 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5919 dest_left_flag = get_last_insn ();
5920 jumpifnot (TREE_OPERAND (exp, 0), op0);
5922 /* Allows cleanups up to here. */
5923 old_cleanups = cleanups_this_call;
5924 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5926 dest_right_flag = get_last_insn ();
5930 op1 = gen_label_rtx ();
5931 jumpifnot (TREE_OPERAND (exp, 0), op0);
5933 /* Allows cleanups up to here. */
5934 old_cleanups = cleanups_this_call;
5936 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5938 expand_expr (TREE_OPERAND (exp, 1),
5939 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5940 dest_left_flag = get_last_insn ();
5942 /* Handle conditional cleanups, if any. */
5943 left_cleanups = defer_cleanups_to (old_cleanups);
5946 emit_jump_insn (gen_jump (op1));
5950 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5952 expand_expr (TREE_OPERAND (exp, 2),
5953 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5954 dest_right_flag = get_last_insn ();
5957 /* Handle conditional cleanups, if any. */
5958 right_cleanups = defer_cleanups_to (old_cleanups);
5964 /* Add back in, any conditional cleanups. */
5965 if (left_cleanups || right_cleanups)
5971 /* Now that we know that a flag is needed, go back and add in the
5972 setting of the flag. */
5974 /* Do the left side flag. */
5975 last = get_last_insn ();
5976 /* Flag left cleanups as needed. */
5977 emit_move_insn (flag, const1_rtx);
5978 /* ??? deprecated, use sequences instead. */
5979 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5981 /* Do the right side flag. */
5982 last = get_last_insn ();
5983 /* Flag left cleanups as needed. */
5984 emit_move_insn (flag, const0_rtx);
5985 /* ??? deprecated, use sequences instead. */
5986 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5988 /* convert flag, which is an rtx, into a tree. */
5989 cond = make_node (RTL_EXPR);
5990 TREE_TYPE (cond) = integer_type_node;
5991 RTL_EXPR_RTL (cond) = flag;
5992 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5994 if (! left_cleanups)
5995 left_cleanups = integer_zero_node;
5996 if (! right_cleanups)
5997 right_cleanups = integer_zero_node;
5998 new_cleanups = build (COND_EXPR, void_type_node,
5999 truthvalue_conversion (cond),
6000 left_cleanups, right_cleanups);
6001 new_cleanups = fold (new_cleanups);
6003 /* Now add in the conditionalized cleanups. */
6005 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6006 (*interim_eh_hook) (NULL_TREE);
6013 int need_exception_region = 0;
6014 /* Something needs to be initialized, but we didn't know
6015 where that thing was when building the tree. For example,
6016 it could be the return value of a function, or a parameter
6017 to a function which lays down in the stack, or a temporary
6018 variable which must be passed by reference.
6020 We guarantee that the expression will either be constructed
6021 or copied into our original target. */
6023 tree slot = TREE_OPERAND (exp, 0);
6027 if (TREE_CODE (slot) != VAR_DECL)
6032 if (DECL_RTL (slot) != 0)
6034 target = DECL_RTL (slot);
6035 /* If we have already expanded the slot, so don't do
6037 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6042 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6043 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6044 /* All temp slots at this level must not conflict. */
6045 preserve_temp_slots (target);
6046 DECL_RTL (slot) = target;
6048 /* Since SLOT is not known to the called function
6049 to belong to its stack frame, we must build an explicit
6050 cleanup. This case occurs when we must build up a reference
6051 to pass the reference as an argument. In this case,
6052 it is very likely that such a reference need not be
6055 if (TREE_OPERAND (exp, 2) == 0)
6056 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6057 if (TREE_OPERAND (exp, 2))
6059 cleanups_this_call = tree_cons (NULL_TREE,
6060 TREE_OPERAND (exp, 2),
6061 cleanups_this_call);
6062 need_exception_region = 1;
6068 /* This case does occur, when expanding a parameter which
6069 needs to be constructed on the stack. The target
6070 is the actual stack address that we want to initialize.
6071 The function we call will perform the cleanup in this case. */
6073 /* If we have already assigned it space, use that space,
6074 not target that we were passed in, as our target
6075 parameter is only a hint. */
6076 if (DECL_RTL (slot) != 0)
6078 target = DECL_RTL (slot);
6079 /* If we have already expanded the slot, so don't do
6081 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6085 DECL_RTL (slot) = target;
6088 exp1 = TREE_OPERAND (exp, 1);
6089 /* Mark it as expanded. */
6090 TREE_OPERAND (exp, 1) = NULL_TREE;
6092 temp = expand_expr (exp1, target, tmode, modifier);
6094 if (need_exception_region)
6095 (*interim_eh_hook) (NULL_TREE);
6102 tree lhs = TREE_OPERAND (exp, 0);
6103 tree rhs = TREE_OPERAND (exp, 1);
6104 tree noncopied_parts = 0;
6105 tree lhs_type = TREE_TYPE (lhs);
6107 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6108 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6109 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6110 TYPE_NONCOPIED_PARTS (lhs_type));
6111 while (noncopied_parts != 0)
6113 expand_assignment (TREE_VALUE (noncopied_parts),
6114 TREE_PURPOSE (noncopied_parts), 0, 0);
6115 noncopied_parts = TREE_CHAIN (noncopied_parts);
6122 /* If lhs is complex, expand calls in rhs before computing it.
6123 That's so we don't compute a pointer and save it over a call.
6124 If lhs is simple, compute it first so we can give it as a
6125 target if the rhs is just a call. This avoids an extra temp and copy
6126 and that prevents a partial-subsumption which makes bad code.
6127 Actually we could treat component_ref's of vars like vars. */
6129 tree lhs = TREE_OPERAND (exp, 0);
6130 tree rhs = TREE_OPERAND (exp, 1);
6131 tree noncopied_parts = 0;
6132 tree lhs_type = TREE_TYPE (lhs);
6136 if (TREE_CODE (lhs) != VAR_DECL
6137 && TREE_CODE (lhs) != RESULT_DECL
6138 && TREE_CODE (lhs) != PARM_DECL)
6139 preexpand_calls (exp);
6141 /* Check for |= or &= of a bitfield of size one into another bitfield
6142 of size 1. In this case, (unless we need the result of the
6143 assignment) we can do this more efficiently with a
6144 test followed by an assignment, if necessary.
6146 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6147 things change so we do, this code should be enhanced to
6150 && TREE_CODE (lhs) == COMPONENT_REF
6151 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6152 || TREE_CODE (rhs) == BIT_AND_EXPR)
6153 && TREE_OPERAND (rhs, 0) == lhs
6154 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6155 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6156 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6158 rtx label = gen_label_rtx ();
6160 do_jump (TREE_OPERAND (rhs, 1),
6161 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6162 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6163 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6164 (TREE_CODE (rhs) == BIT_IOR_EXPR
6166 : integer_zero_node)),
6168 do_pending_stack_adjust ();
6173 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6174 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6175 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6176 TYPE_NONCOPIED_PARTS (lhs_type));
6178 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6179 while (noncopied_parts != 0)
6181 expand_assignment (TREE_PURPOSE (noncopied_parts),
6182 TREE_VALUE (noncopied_parts), 0, 0);
6183 noncopied_parts = TREE_CHAIN (noncopied_parts);
6188 case PREINCREMENT_EXPR:
6189 case PREDECREMENT_EXPR:
6190 return expand_increment (exp, 0);
6192 case POSTINCREMENT_EXPR:
6193 case POSTDECREMENT_EXPR:
6194 /* Faster to treat as pre-increment if result is not used. */
6195 return expand_increment (exp, ! ignore);
6198 /* If nonzero, TEMP will be set to the address of something that might
6199 be a MEM corresponding to a stack slot. */
6202 /* Are we taking the address of a nested function? */
6203 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6204 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6206 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6207 op0 = force_operand (op0, target);
6209 /* If we are taking the address of something erroneous, just
6211 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6215 /* We make sure to pass const0_rtx down if we came in with
6216 ignore set, to avoid doing the cleanups twice for something. */
6217 op0 = expand_expr (TREE_OPERAND (exp, 0),
6218 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6219 (modifier == EXPAND_INITIALIZER
6220 ? modifier : EXPAND_CONST_ADDRESS));
6222 /* If we are going to ignore the result, OP0 will have been set
6223 to const0_rtx, so just return it. Don't get confused and
6224 think we are taking the address of the constant. */
6228 /* We would like the object in memory. If it is a constant,
6229 we can have it be statically allocated into memory. For
6230 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6231 memory and store the value into it. */
6233 if (CONSTANT_P (op0))
6234 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6236 else if (GET_CODE (op0) == MEM)
6238 mark_temp_addr_taken (op0);
6239 temp = XEXP (op0, 0);
6242 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6243 || GET_CODE (op0) == CONCAT)
6245 /* If this object is in a register, it must be not
6247 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6248 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6250 = assign_stack_temp (inner_mode,
6251 int_size_in_bytes (inner_type), 1);
6252 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6254 mark_temp_addr_taken (memloc);
6255 emit_move_insn (memloc, op0);
6259 if (GET_CODE (op0) != MEM)
6262 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6264 temp = XEXP (op0, 0);
6265 #ifdef POINTERS_EXTEND_UNSIGNED
6266 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6267 && mode == ptr_mode)
6268 temp = convert_modes (ptr_mode, Pmode, temp,
6269 POINTERS_EXTEND_UNSIGNED);
6274 op0 = force_operand (XEXP (op0, 0), target);
6277 if (flag_force_addr && GET_CODE (op0) != REG)
6278 op0 = force_reg (Pmode, op0);
6280 if (GET_CODE (op0) == REG)
6281 mark_reg_pointer (op0);
6283 /* If we might have had a temp slot, add an equivalent address
6286 update_temp_slot_address (temp, op0);
6288 #ifdef POINTERS_EXTEND_UNSIGNED
6289 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6290 && mode == ptr_mode)
6291 op0 = convert_modes (ptr_mode, Pmode, op0, POINTERS_EXTEND_UNSIGNED);
6296 case ENTRY_VALUE_EXPR:
6299 /* COMPLEX type for Extended Pascal & Fortran */
6302 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6305 /* Get the rtx code of the operands. */
6306 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6307 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6310 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6314 /* Move the real (op0) and imaginary (op1) parts to their location. */
6315 emit_move_insn (gen_realpart (mode, target), op0);
6316 emit_move_insn (gen_imagpart (mode, target), op1);
6318 insns = get_insns ();
6321 /* Complex construction should appear as a single unit. */
6322 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6323 each with a separate pseudo as destination.
6324 It's not correct for flow to treat them as a unit. */
6325 if (GET_CODE (target) != CONCAT)
6326 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6334 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6335 return gen_realpart (mode, op0);
6338 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6339 return gen_imagpart (mode, op0);
6343 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6347 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6350 target = gen_reg_rtx (mode);
6354 /* Store the realpart and the negated imagpart to target. */
6355 emit_move_insn (gen_realpart (partmode, target),
6356 gen_realpart (partmode, op0));
6358 imag_t = gen_imagpart (partmode, target);
6359 temp = expand_unop (partmode, neg_optab,
6360 gen_imagpart (partmode, op0), imag_t, 0);
6362 emit_move_insn (imag_t, temp);
6364 insns = get_insns ();
6367 /* Conjugate should appear as a single unit
6368 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6369 each with a separate pseudo as destination.
6370 It's not correct for flow to treat them as a unit. */
6371 if (GET_CODE (target) != CONCAT)
6372 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6380 op0 = CONST0_RTX (tmode);
6386 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6389 /* Here to do an ordinary binary operator, generating an instruction
6390 from the optab already placed in `this_optab'. */
6392 preexpand_calls (exp);
6393 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6398 temp = expand_binop (mode, this_optab, op0, op1, target,
6399 unsignedp, OPTAB_LIB_WIDEN);
6406 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6408 bc_expand_expr (exp)
6411 enum tree_code code;
6414 struct binary_operator *binoptab;
6415 struct unary_operator *unoptab;
6416 struct increment_operator *incroptab;
6417 struct bc_label *lab, *lab1;
6418 enum bytecode_opcode opcode;
6421 code = TREE_CODE (exp);
6427 if (DECL_RTL (exp) == 0)
6429 error_with_decl (exp, "prior parameter's size depends on `%s'");
6433 bc_load_parmaddr (DECL_RTL (exp));
6434 bc_load_memory (TREE_TYPE (exp), exp);
6440 if (DECL_RTL (exp) == 0)
6444 if (BYTECODE_LABEL (DECL_RTL (exp)))
6445 bc_load_externaddr (DECL_RTL (exp));
6447 bc_load_localaddr (DECL_RTL (exp));
6449 if (TREE_PUBLIC (exp))
6450 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6451 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6453 bc_load_localaddr (DECL_RTL (exp));
6455 bc_load_memory (TREE_TYPE (exp), exp);
6460 #ifdef DEBUG_PRINT_CODE
6461 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6463 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6465 : TYPE_MODE (TREE_TYPE (exp)))],
6466 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6472 #ifdef DEBUG_PRINT_CODE
6473 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6475 /* FIX THIS: find a better way to pass real_cst's. -bson */
6476 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6477 (double) TREE_REAL_CST (exp));
6486 /* We build a call description vector describing the type of
6487 the return value and of the arguments; this call vector,
6488 together with a pointer to a location for the return value
6489 and the base of the argument list, is passed to the low
6490 level machine dependent call subroutine, which is responsible
6491 for putting the arguments wherever real functions expect
6492 them, as well as getting the return value back. */
6494 tree calldesc = 0, arg;
6498 /* Push the evaluated args on the evaluation stack in reverse
6499 order. Also make an entry for each arg in the calldesc
6500 vector while we're at it. */
6502 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6504 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6507 bc_expand_expr (TREE_VALUE (arg));
6509 calldesc = tree_cons ((tree) 0,
6510 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6512 calldesc = tree_cons ((tree) 0,
6513 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6517 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6519 /* Allocate a location for the return value and push its
6520 address on the evaluation stack. Also make an entry
6521 at the front of the calldesc for the return value type. */
6523 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6524 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6525 bc_load_localaddr (retval);
6527 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6528 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6530 /* Prepend the argument count. */
6531 calldesc = tree_cons ((tree) 0,
6532 build_int_2 (nargs, 0),
6535 /* Push the address of the call description vector on the stack. */
6536 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6537 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6538 build_index_type (build_int_2 (nargs * 2, 0)));
6539 r = output_constant_def (calldesc);
6540 bc_load_externaddr (r);
6542 /* Push the address of the function to be called. */
6543 bc_expand_expr (TREE_OPERAND (exp, 0));
6545 /* Call the function, popping its address and the calldesc vector
6546 address off the evaluation stack in the process. */
6547 bc_emit_instruction (call);
6549 /* Pop the arguments off the stack. */
6550 bc_adjust_stack (nargs);
6552 /* Load the return value onto the stack. */
6553 bc_load_localaddr (retval);
6554 bc_load_memory (type, TREE_OPERAND (exp, 0));
6560 if (!SAVE_EXPR_RTL (exp))
6562 /* First time around: copy to local variable */
6563 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6564 TYPE_ALIGN (TREE_TYPE(exp)));
6565 bc_expand_expr (TREE_OPERAND (exp, 0));
6566 bc_emit_instruction (duplicate);
6568 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6569 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6573 /* Consecutive reference: use saved copy */
6574 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6575 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6580 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6581 how are they handled instead? */
6584 TREE_USED (exp) = 1;
6585 bc_expand_expr (STMT_BODY (exp));
6592 bc_expand_expr (TREE_OPERAND (exp, 0));
6593 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6598 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6603 bc_expand_address (TREE_OPERAND (exp, 0));
6608 bc_expand_expr (TREE_OPERAND (exp, 0));
6609 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6614 bc_expand_expr (bc_canonicalize_array_ref (exp));
6619 bc_expand_component_address (exp);
6621 /* If we have a bitfield, generate a proper load */
6622 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6627 bc_expand_expr (TREE_OPERAND (exp, 0));
6628 bc_emit_instruction (drop);
6629 bc_expand_expr (TREE_OPERAND (exp, 1));
6634 bc_expand_expr (TREE_OPERAND (exp, 0));
6635 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6636 lab = bc_get_bytecode_label ();
6637 bc_emit_bytecode (xjumpifnot);
6638 bc_emit_bytecode_labelref (lab);
6640 #ifdef DEBUG_PRINT_CODE
6641 fputc ('\n', stderr);
6643 bc_expand_expr (TREE_OPERAND (exp, 1));
6644 lab1 = bc_get_bytecode_label ();
6645 bc_emit_bytecode (jump);
6646 bc_emit_bytecode_labelref (lab1);
6648 #ifdef DEBUG_PRINT_CODE
6649 fputc ('\n', stderr);
6652 bc_emit_bytecode_labeldef (lab);
6653 bc_expand_expr (TREE_OPERAND (exp, 2));
6654 bc_emit_bytecode_labeldef (lab1);
6657 case TRUTH_ANDIF_EXPR:
6659 opcode = xjumpifnot;
6662 case TRUTH_ORIF_EXPR:
6669 binoptab = optab_plus_expr;
6674 binoptab = optab_minus_expr;
6679 binoptab = optab_mult_expr;
6682 case TRUNC_DIV_EXPR:
6683 case FLOOR_DIV_EXPR:
6685 case ROUND_DIV_EXPR:
6686 case EXACT_DIV_EXPR:
6688 binoptab = optab_trunc_div_expr;
6691 case TRUNC_MOD_EXPR:
6692 case FLOOR_MOD_EXPR:
6694 case ROUND_MOD_EXPR:
6696 binoptab = optab_trunc_mod_expr;
6699 case FIX_ROUND_EXPR:
6700 case FIX_FLOOR_EXPR:
6702 abort (); /* Not used for C. */
6704 case FIX_TRUNC_EXPR:
6711 abort (); /* FIXME */
6715 binoptab = optab_rdiv_expr;
6720 binoptab = optab_bit_and_expr;
6725 binoptab = optab_bit_ior_expr;
6730 binoptab = optab_bit_xor_expr;
6735 binoptab = optab_lshift_expr;
6740 binoptab = optab_rshift_expr;
6743 case TRUTH_AND_EXPR:
6745 binoptab = optab_truth_and_expr;
6750 binoptab = optab_truth_or_expr;
6755 binoptab = optab_lt_expr;
6760 binoptab = optab_le_expr;
6765 binoptab = optab_ge_expr;
6770 binoptab = optab_gt_expr;
6775 binoptab = optab_eq_expr;
6780 binoptab = optab_ne_expr;
6785 unoptab = optab_negate_expr;
6790 unoptab = optab_bit_not_expr;
6793 case TRUTH_NOT_EXPR:
6795 unoptab = optab_truth_not_expr;
6798 case PREDECREMENT_EXPR:
6800 incroptab = optab_predecrement_expr;
6803 case PREINCREMENT_EXPR:
6805 incroptab = optab_preincrement_expr;
6808 case POSTDECREMENT_EXPR:
6810 incroptab = optab_postdecrement_expr;
6813 case POSTINCREMENT_EXPR:
6815 incroptab = optab_postincrement_expr;
6820 bc_expand_constructor (exp);
6830 tree vars = TREE_OPERAND (exp, 0);
6831 int vars_need_expansion = 0;
6833 /* Need to open a binding contour here because
6834 if there are any cleanups they most be contained here. */
6835 expand_start_bindings (0);
6837 /* Mark the corresponding BLOCK for output. */
6838 if (TREE_OPERAND (exp, 2) != 0)
6839 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6841 /* If VARS have not yet been expanded, expand them now. */
6844 if (DECL_RTL (vars) == 0)
6846 vars_need_expansion = 1;
6849 expand_decl_init (vars);
6850 vars = TREE_CHAIN (vars);
6853 bc_expand_expr (TREE_OPERAND (exp, 1));
6855 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6865 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6866 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6872 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6878 bc_expand_expr (TREE_OPERAND (exp, 0));
6879 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6880 lab = bc_get_bytecode_label ();
6882 bc_emit_instruction (duplicate);
6883 bc_emit_bytecode (opcode);
6884 bc_emit_bytecode_labelref (lab);
6886 #ifdef DEBUG_PRINT_CODE
6887 fputc ('\n', stderr);
6890 bc_emit_instruction (drop);
6892 bc_expand_expr (TREE_OPERAND (exp, 1));
6893 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6894 bc_emit_bytecode_labeldef (lab);
6900 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6902 /* Push the quantum. */
6903 bc_expand_expr (TREE_OPERAND (exp, 1));
6905 /* Convert it to the lvalue's type. */
6906 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6908 /* Push the address of the lvalue */
6909 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6911 /* Perform actual increment */
6912 bc_expand_increment (incroptab, type);
6916 /* Return the alignment in bits of EXP, a pointer valued expression.
6917 But don't return more than MAX_ALIGN no matter what.
6918 The alignment returned is, by default, the alignment of the thing that
6919 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6921 Otherwise, look at the expression to see if we can do better, i.e., if the
6922 expression is actually pointing at an object whose alignment is tighter. */
6925 get_pointer_alignment (exp, max_align)
6929 unsigned align, inner;
6931 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6934 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6935 align = MIN (align, max_align);
6939 switch (TREE_CODE (exp))
6943 case NON_LVALUE_EXPR:
6944 exp = TREE_OPERAND (exp, 0);
6945 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6947 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6948 align = MIN (inner, max_align);
6952 /* If sum of pointer + int, restrict our maximum alignment to that
6953 imposed by the integer. If not, we can't do any better than
6955 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6958 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6963 exp = TREE_OPERAND (exp, 0);
6967 /* See what we are pointing at and look at its alignment. */
6968 exp = TREE_OPERAND (exp, 0);
6969 if (TREE_CODE (exp) == FUNCTION_DECL)
6970 align = FUNCTION_BOUNDARY;
6971 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6972 align = DECL_ALIGN (exp);
6973 #ifdef CONSTANT_ALIGNMENT
6974 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6975 align = CONSTANT_ALIGNMENT (exp, align);
6977 return MIN (align, max_align);
6985 /* Return the tree node and offset if a given argument corresponds to
6986 a string constant. */
6989 string_constant (arg, ptr_offset)
6995 if (TREE_CODE (arg) == ADDR_EXPR
6996 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6998 *ptr_offset = integer_zero_node;
6999 return TREE_OPERAND (arg, 0);
7001 else if (TREE_CODE (arg) == PLUS_EXPR)
7003 tree arg0 = TREE_OPERAND (arg, 0);
7004 tree arg1 = TREE_OPERAND (arg, 1);
7009 if (TREE_CODE (arg0) == ADDR_EXPR
7010 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7013 return TREE_OPERAND (arg0, 0);
7015 else if (TREE_CODE (arg1) == ADDR_EXPR
7016 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7019 return TREE_OPERAND (arg1, 0);
7026 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7027 way, because it could contain a zero byte in the middle.
7028 TREE_STRING_LENGTH is the size of the character array, not the string.
7030 Unfortunately, string_constant can't access the values of const char
7031 arrays with initializers, so neither can we do so here. */
7041 src = string_constant (src, &offset_node);
7044 max = TREE_STRING_LENGTH (src);
7045 ptr = TREE_STRING_POINTER (src);
7046 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7048 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7049 compute the offset to the following null if we don't know where to
7050 start searching for it. */
7052 for (i = 0; i < max; i++)
7055 /* We don't know the starting offset, but we do know that the string
7056 has no internal zero bytes. We can assume that the offset falls
7057 within the bounds of the string; otherwise, the programmer deserves
7058 what he gets. Subtract the offset from the length of the string,
7060 /* This would perhaps not be valid if we were dealing with named
7061 arrays in addition to literal string constants. */
7062 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7065 /* We have a known offset into the string. Start searching there for
7066 a null character. */
7067 if (offset_node == 0)
7071 /* Did we get a long long offset? If so, punt. */
7072 if (TREE_INT_CST_HIGH (offset_node) != 0)
7074 offset = TREE_INT_CST_LOW (offset_node);
7076 /* If the offset is known to be out of bounds, warn, and call strlen at
7078 if (offset < 0 || offset > max)
7080 warning ("offset outside bounds of constant string");
7083 /* Use strlen to search for the first zero byte. Since any strings
7084 constructed with build_string will have nulls appended, we win even
7085 if we get handed something like (char[4])"abcd".
7087 Since OFFSET is our starting index into the string, no further
7088 calculation is needed. */
7089 return size_int (strlen (ptr + offset));
7092 /* Expand an expression EXP that calls a built-in function,
7093 with result going to TARGET if that's convenient
7094 (and in mode MODE if that's convenient).
7095 SUBTARGET may be used as the target for computing one of EXP's operands.
7096 IGNORE is nonzero if the value is to be ignored. */
7098 #define CALLED_AS_BUILT_IN(NODE) \
7099 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7102 expand_builtin (exp, target, subtarget, mode, ignore)
7106 enum machine_mode mode;
7109 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7110 tree arglist = TREE_OPERAND (exp, 1);
7113 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7114 optab builtin_optab;
7116 switch (DECL_FUNCTION_CODE (fndecl))
7121 /* build_function_call changes these into ABS_EXPR. */
7126 /* Treat these like sqrt, but only if the user asks for them. */
7127 if (! flag_fast_math)
7129 case BUILT_IN_FSQRT:
7130 /* If not optimizing, call the library function. */
7135 /* Arg could be wrong type if user redeclared this fcn wrong. */
7136 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7139 /* Stabilize and compute the argument. */
7140 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7141 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7143 exp = copy_node (exp);
7144 arglist = copy_node (arglist);
7145 TREE_OPERAND (exp, 1) = arglist;
7146 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7148 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7150 /* Make a suitable register to place result in. */
7151 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7156 switch (DECL_FUNCTION_CODE (fndecl))
7159 builtin_optab = sin_optab; break;
7161 builtin_optab = cos_optab; break;
7162 case BUILT_IN_FSQRT:
7163 builtin_optab = sqrt_optab; break;
7168 /* Compute into TARGET.
7169 Set TARGET to wherever the result comes back. */
7170 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7171 builtin_optab, op0, target, 0);
7173 /* If we were unable to expand via the builtin, stop the
7174 sequence (without outputting the insns) and break, causing
7175 a call the the library function. */
7182 /* Check the results by default. But if flag_fast_math is turned on,
7183 then assume sqrt will always be called with valid arguments. */
7185 if (! flag_fast_math)
7187 /* Don't define the builtin FP instructions
7188 if your machine is not IEEE. */
7189 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7192 lab1 = gen_label_rtx ();
7194 /* Test the result; if it is NaN, set errno=EDOM because
7195 the argument was not in the domain. */
7196 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7197 emit_jump_insn (gen_beq (lab1));
7201 #ifdef GEN_ERRNO_RTX
7202 rtx errno_rtx = GEN_ERRNO_RTX;
7205 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7208 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7211 /* We can't set errno=EDOM directly; let the library call do it.
7212 Pop the arguments right away in case the call gets deleted. */
7214 expand_call (exp, target, 0);
7221 /* Output the entire sequence. */
7222 insns = get_insns ();
7228 /* __builtin_apply_args returns block of memory allocated on
7229 the stack into which is stored the arg pointer, structure
7230 value address, static chain, and all the registers that might
7231 possibly be used in performing a function call. The code is
7232 moved to the start of the function so the incoming values are
7234 case BUILT_IN_APPLY_ARGS:
7235 /* Don't do __builtin_apply_args more than once in a function.
7236 Save the result of the first call and reuse it. */
7237 if (apply_args_value != 0)
7238 return apply_args_value;
7240 /* When this function is called, it means that registers must be
7241 saved on entry to this function. So we migrate the
7242 call to the first insn of this function. */
7247 temp = expand_builtin_apply_args ();
7251 apply_args_value = temp;
7253 /* Put the sequence after the NOTE that starts the function.
7254 If this is inside a SEQUENCE, make the outer-level insn
7255 chain current, so the code is placed at the start of the
7257 push_topmost_sequence ();
7258 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7259 pop_topmost_sequence ();
7263 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7264 FUNCTION with a copy of the parameters described by
7265 ARGUMENTS, and ARGSIZE. It returns a block of memory
7266 allocated on the stack into which is stored all the registers
7267 that might possibly be used for returning the result of a
7268 function. ARGUMENTS is the value returned by
7269 __builtin_apply_args. ARGSIZE is the number of bytes of
7270 arguments that must be copied. ??? How should this value be
7271 computed? We'll also need a safe worst case value for varargs
7273 case BUILT_IN_APPLY:
7275 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7276 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7277 || TREE_CHAIN (arglist) == 0
7278 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7279 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7280 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7288 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7289 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7291 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7294 /* __builtin_return (RESULT) causes the function to return the
7295 value described by RESULT. RESULT is address of the block of
7296 memory returned by __builtin_apply. */
7297 case BUILT_IN_RETURN:
7299 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7300 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7301 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7302 NULL_RTX, VOIDmode, 0));
7305 case BUILT_IN_SAVEREGS:
7306 /* Don't do __builtin_saveregs more than once in a function.
7307 Save the result of the first call and reuse it. */
7308 if (saveregs_value != 0)
7309 return saveregs_value;
7311 /* When this function is called, it means that registers must be
7312 saved on entry to this function. So we migrate the
7313 call to the first insn of this function. */
7317 /* Now really call the function. `expand_call' does not call
7318 expand_builtin, so there is no danger of infinite recursion here. */
7321 #ifdef EXPAND_BUILTIN_SAVEREGS
7322 /* Do whatever the machine needs done in this case. */
7323 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7325 /* The register where the function returns its value
7326 is likely to have something else in it, such as an argument.
7327 So preserve that register around the call. */
7329 if (value_mode != VOIDmode)
7331 rtx valreg = hard_libcall_value (value_mode);
7332 rtx saved_valreg = gen_reg_rtx (value_mode);
7334 emit_move_insn (saved_valreg, valreg);
7335 temp = expand_call (exp, target, ignore);
7336 emit_move_insn (valreg, saved_valreg);
7339 /* Generate the call, putting the value in a pseudo. */
7340 temp = expand_call (exp, target, ignore);
7346 saveregs_value = temp;
7348 /* Put the sequence after the NOTE that starts the function.
7349 If this is inside a SEQUENCE, make the outer-level insn
7350 chain current, so the code is placed at the start of the
7352 push_topmost_sequence ();
7353 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7354 pop_topmost_sequence ();
7358 /* __builtin_args_info (N) returns word N of the arg space info
7359 for the current function. The number and meanings of words
7360 is controlled by the definition of CUMULATIVE_ARGS. */
7361 case BUILT_IN_ARGS_INFO:
7363 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7365 int *word_ptr = (int *) ¤t_function_args_info;
7366 tree type, elts, result;
7368 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7369 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7370 __FILE__, __LINE__);
7374 tree arg = TREE_VALUE (arglist);
7375 if (TREE_CODE (arg) != INTEGER_CST)
7376 error ("argument of `__builtin_args_info' must be constant");
7379 int wordnum = TREE_INT_CST_LOW (arg);
7381 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7382 error ("argument of `__builtin_args_info' out of range");
7384 return GEN_INT (word_ptr[wordnum]);
7388 error ("missing argument in `__builtin_args_info'");
7393 for (i = 0; i < nwords; i++)
7394 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7396 type = build_array_type (integer_type_node,
7397 build_index_type (build_int_2 (nwords, 0)));
7398 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7399 TREE_CONSTANT (result) = 1;
7400 TREE_STATIC (result) = 1;
7401 result = build (INDIRECT_REF, build_pointer_type (type), result);
7402 TREE_CONSTANT (result) = 1;
7403 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7407 /* Return the address of the first anonymous stack arg. */
7408 case BUILT_IN_NEXT_ARG:
7410 tree fntype = TREE_TYPE (current_function_decl);
7412 if ((TYPE_ARG_TYPES (fntype) == 0
7413 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7415 && ! current_function_varargs)
7417 error ("`va_start' used in function with fixed args");
7423 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7424 tree arg = TREE_VALUE (arglist);
7426 /* Strip off all nops for the sake of the comparison. This
7427 is not quite the same as STRIP_NOPS. It does more. */
7428 while (TREE_CODE (arg) == NOP_EXPR
7429 || TREE_CODE (arg) == CONVERT_EXPR
7430 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7431 arg = TREE_OPERAND (arg, 0);
7432 if (arg != last_parm)
7433 warning ("second parameter of `va_start' not last named argument");
7435 else if (! current_function_varargs)
7436 /* Evidently an out of date version of <stdarg.h>; can't validate
7437 va_start's second argument, but can still work as intended. */
7438 warning ("`__builtin_next_arg' called without an argument");
7441 return expand_binop (Pmode, add_optab,
7442 current_function_internal_arg_pointer,
7443 current_function_arg_offset_rtx,
7444 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7446 case BUILT_IN_CLASSIFY_TYPE:
7449 tree type = TREE_TYPE (TREE_VALUE (arglist));
7450 enum tree_code code = TREE_CODE (type);
7451 if (code == VOID_TYPE)
7452 return GEN_INT (void_type_class);
7453 if (code == INTEGER_TYPE)
7454 return GEN_INT (integer_type_class);
7455 if (code == CHAR_TYPE)
7456 return GEN_INT (char_type_class);
7457 if (code == ENUMERAL_TYPE)
7458 return GEN_INT (enumeral_type_class);
7459 if (code == BOOLEAN_TYPE)
7460 return GEN_INT (boolean_type_class);
7461 if (code == POINTER_TYPE)
7462 return GEN_INT (pointer_type_class);
7463 if (code == REFERENCE_TYPE)
7464 return GEN_INT (reference_type_class);
7465 if (code == OFFSET_TYPE)
7466 return GEN_INT (offset_type_class);
7467 if (code == REAL_TYPE)
7468 return GEN_INT (real_type_class);
7469 if (code == COMPLEX_TYPE)
7470 return GEN_INT (complex_type_class);
7471 if (code == FUNCTION_TYPE)
7472 return GEN_INT (function_type_class);
7473 if (code == METHOD_TYPE)
7474 return GEN_INT (method_type_class);
7475 if (code == RECORD_TYPE)
7476 return GEN_INT (record_type_class);
7477 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7478 return GEN_INT (union_type_class);
7479 if (code == ARRAY_TYPE)
7481 if (TYPE_STRING_FLAG (type))
7482 return GEN_INT (string_type_class);
7484 return GEN_INT (array_type_class);
7486 if (code == SET_TYPE)
7487 return GEN_INT (set_type_class);
7488 if (code == FILE_TYPE)
7489 return GEN_INT (file_type_class);
7490 if (code == LANG_TYPE)
7491 return GEN_INT (lang_type_class);
7493 return GEN_INT (no_type_class);
7495 case BUILT_IN_CONSTANT_P:
7499 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7500 ? const1_rtx : const0_rtx);
7502 case BUILT_IN_FRAME_ADDRESS:
7503 /* The argument must be a nonnegative integer constant.
7504 It counts the number of frames to scan up the stack.
7505 The value is the address of that frame. */
7506 case BUILT_IN_RETURN_ADDRESS:
7507 /* The argument must be a nonnegative integer constant.
7508 It counts the number of frames to scan up the stack.
7509 The value is the return address saved in that frame. */
7511 /* Warning about missing arg was already issued. */
7513 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7515 error ("invalid arg to `__builtin_return_address'");
7518 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7520 error ("invalid arg to `__builtin_return_address'");
7525 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7526 rtx tem = frame_pointer_rtx;
7529 /* Some machines need special handling before we can access arbitrary
7530 frames. For example, on the sparc, we must first flush all
7531 register windows to the stack. */
7532 #ifdef SETUP_FRAME_ADDRESSES
7533 SETUP_FRAME_ADDRESSES ();
7536 /* On the sparc, the return address is not in the frame, it is
7537 in a register. There is no way to access it off of the current
7538 frame pointer, but it can be accessed off the previous frame
7539 pointer by reading the value from the register window save
7541 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7542 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7546 /* Scan back COUNT frames to the specified frame. */
7547 for (i = 0; i < count; i++)
7549 /* Assume the dynamic chain pointer is in the word that
7550 the frame address points to, unless otherwise specified. */
7551 #ifdef DYNAMIC_CHAIN_ADDRESS
7552 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7554 tem = memory_address (Pmode, tem);
7555 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7558 /* For __builtin_frame_address, return what we've got. */
7559 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7562 /* For __builtin_return_address,
7563 Get the return address from that frame. */
7564 #ifdef RETURN_ADDR_RTX
7565 return RETURN_ADDR_RTX (count, tem);
7567 tem = memory_address (Pmode,
7568 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7569 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7573 case BUILT_IN_ALLOCA:
7575 /* Arg could be non-integer if user redeclared this fcn wrong. */
7576 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7579 /* Compute the argument. */
7580 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7582 /* Allocate the desired space. */
7583 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7586 /* If not optimizing, call the library function. */
7587 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7591 /* Arg could be non-integer if user redeclared this fcn wrong. */
7592 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7595 /* Compute the argument. */
7596 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7597 /* Compute ffs, into TARGET if possible.
7598 Set TARGET to wherever the result comes back. */
7599 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7600 ffs_optab, op0, target, 1);
7605 case BUILT_IN_STRLEN:
7606 /* If not optimizing, call the library function. */
7607 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7611 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7612 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7616 tree src = TREE_VALUE (arglist);
7617 tree len = c_strlen (src);
7620 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7622 rtx result, src_rtx, char_rtx;
7623 enum machine_mode insn_mode = value_mode, char_mode;
7624 enum insn_code icode;
7626 /* If the length is known, just return it. */
7628 return expand_expr (len, target, mode, 0);
7630 /* If SRC is not a pointer type, don't do this operation inline. */
7634 /* Call a function if we can't compute strlen in the right mode. */
7636 while (insn_mode != VOIDmode)
7638 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7639 if (icode != CODE_FOR_nothing)
7642 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7644 if (insn_mode == VOIDmode)
7647 /* Make a place to write the result of the instruction. */
7650 && GET_CODE (result) == REG
7651 && GET_MODE (result) == insn_mode
7652 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7653 result = gen_reg_rtx (insn_mode);
7655 /* Make sure the operands are acceptable to the predicates. */
7657 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7658 result = gen_reg_rtx (insn_mode);
7660 src_rtx = memory_address (BLKmode,
7661 expand_expr (src, NULL_RTX, ptr_mode,
7663 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7664 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7666 char_rtx = const0_rtx;
7667 char_mode = insn_operand_mode[(int)icode][2];
7668 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7669 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7671 emit_insn (GEN_FCN (icode) (result,
7672 gen_rtx (MEM, BLKmode, src_rtx),
7673 char_rtx, GEN_INT (align)));
7675 /* Return the value in the proper mode for this function. */
7676 if (GET_MODE (result) == value_mode)
7678 else if (target != 0)
7680 convert_move (target, result, 0);
7684 return convert_to_mode (value_mode, result, 0);
7687 case BUILT_IN_STRCPY:
7688 /* If not optimizing, call the library function. */
7689 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7693 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7694 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7695 || TREE_CHAIN (arglist) == 0
7696 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7700 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7705 len = size_binop (PLUS_EXPR, len, integer_one_node);
7707 chainon (arglist, build_tree_list (NULL_TREE, len));
7711 case BUILT_IN_MEMCPY:
7712 /* If not optimizing, call the library function. */
7713 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7717 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7718 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7719 || TREE_CHAIN (arglist) == 0
7720 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7721 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7722 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7726 tree dest = TREE_VALUE (arglist);
7727 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7728 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7731 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7733 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7734 rtx dest_rtx, dest_mem, src_mem;
7736 /* If either SRC or DEST is not a pointer type, don't do
7737 this operation in-line. */
7738 if (src_align == 0 || dest_align == 0)
7740 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7741 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7745 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7746 dest_mem = gen_rtx (MEM, BLKmode,
7747 memory_address (BLKmode, dest_rtx));
7748 src_mem = gen_rtx (MEM, BLKmode,
7749 memory_address (BLKmode,
7750 expand_expr (src, NULL_RTX,
7754 /* Copy word part most expediently. */
7755 emit_block_move (dest_mem, src_mem,
7756 expand_expr (len, NULL_RTX, VOIDmode, 0),
7757 MIN (src_align, dest_align));
7761 /* These comparison functions need an instruction that returns an actual
7762 index. An ordinary compare that just sets the condition codes
7764 #ifdef HAVE_cmpstrsi
7765 case BUILT_IN_STRCMP:
7766 /* If not optimizing, call the library function. */
7767 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7771 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7772 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7773 || TREE_CHAIN (arglist) == 0
7774 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7776 else if (!HAVE_cmpstrsi)
7779 tree arg1 = TREE_VALUE (arglist);
7780 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7784 len = c_strlen (arg1);
7786 len = size_binop (PLUS_EXPR, integer_one_node, len);
7787 len2 = c_strlen (arg2);
7789 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7791 /* If we don't have a constant length for the first, use the length
7792 of the second, if we know it. We don't require a constant for
7793 this case; some cost analysis could be done if both are available
7794 but neither is constant. For now, assume they're equally cheap.
7796 If both strings have constant lengths, use the smaller. This
7797 could arise if optimization results in strcpy being called with
7798 two fixed strings, or if the code was machine-generated. We should
7799 add some code to the `memcmp' handler below to deal with such
7800 situations, someday. */
7801 if (!len || TREE_CODE (len) != INTEGER_CST)
7808 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7810 if (tree_int_cst_lt (len2, len))
7814 chainon (arglist, build_tree_list (NULL_TREE, len));
7818 case BUILT_IN_MEMCMP:
7819 /* If not optimizing, call the library function. */
7820 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7824 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7825 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7826 || TREE_CHAIN (arglist) == 0
7827 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7828 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7829 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7831 else if (!HAVE_cmpstrsi)
7834 tree arg1 = TREE_VALUE (arglist);
7835 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7836 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7840 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7842 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7843 enum machine_mode insn_mode
7844 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7846 /* If we don't have POINTER_TYPE, call the function. */
7847 if (arg1_align == 0 || arg2_align == 0)
7849 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7850 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7854 /* Make a place to write the result of the instruction. */
7857 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7858 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7859 result = gen_reg_rtx (insn_mode);
7861 emit_insn (gen_cmpstrsi (result,
7862 gen_rtx (MEM, BLKmode,
7863 expand_expr (arg1, NULL_RTX,
7866 gen_rtx (MEM, BLKmode,
7867 expand_expr (arg2, NULL_RTX,
7870 expand_expr (len, NULL_RTX, VOIDmode, 0),
7871 GEN_INT (MIN (arg1_align, arg2_align))));
7873 /* Return the value in the proper mode for this function. */
7874 mode = TYPE_MODE (TREE_TYPE (exp));
7875 if (GET_MODE (result) == mode)
7877 else if (target != 0)
7879 convert_move (target, result, 0);
7883 return convert_to_mode (mode, result, 0);
7886 case BUILT_IN_STRCMP:
7887 case BUILT_IN_MEMCMP:
7891 default: /* just do library call, if unknown builtin */
7892 error ("built-in function `%s' not currently supported",
7893 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7896 /* The switch statement above can drop through to cause the function
7897 to be called normally. */
7899 return expand_call (exp, target, ignore);
7902 /* Built-in functions to perform an untyped call and return. */
7904 /* For each register that may be used for calling a function, this
7905 gives a mode used to copy the register's value. VOIDmode indicates
7906 the register is not used for calling a function. If the machine
7907 has register windows, this gives only the outbound registers.
7908 INCOMING_REGNO gives the corresponding inbound register. */
7909 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7911 /* For each register that may be used for returning values, this gives
7912 a mode used to copy the register's value. VOIDmode indicates the
7913 register is not used for returning values. If the machine has
7914 register windows, this gives only the outbound registers.
7915 INCOMING_REGNO gives the corresponding inbound register. */
7916 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7918 /* For each register that may be used for calling a function, this
7919 gives the offset of that register into the block returned by
7920 __bultin_apply_args. 0 indicates that the register is not
7921 used for calling a function. */
7922 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7924 /* Return the offset of register REGNO into the block returned by
7925 __builtin_apply_args. This is not declared static, since it is
7926 needed in objc-act.c. */
7929 apply_args_register_offset (regno)
7934 /* Arguments are always put in outgoing registers (in the argument
7935 block) if such make sense. */
7936 #ifdef OUTGOING_REGNO
7937 regno = OUTGOING_REGNO(regno);
7939 return apply_args_reg_offset[regno];
7942 /* Return the size required for the block returned by __builtin_apply_args,
7943 and initialize apply_args_mode. */
7948 static int size = -1;
7950 enum machine_mode mode;
7952 /* The values computed by this function never change. */
7955 /* The first value is the incoming arg-pointer. */
7956 size = GET_MODE_SIZE (Pmode);
7958 /* The second value is the structure value address unless this is
7959 passed as an "invisible" first argument. */
7960 if (struct_value_rtx)
7961 size += GET_MODE_SIZE (Pmode);
7963 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7964 if (FUNCTION_ARG_REGNO_P (regno))
7966 /* Search for the proper mode for copying this register's
7967 value. I'm not sure this is right, but it works so far. */
7968 enum machine_mode best_mode = VOIDmode;
7970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7972 mode = GET_MODE_WIDER_MODE (mode))
7973 if (HARD_REGNO_MODE_OK (regno, mode)
7974 && HARD_REGNO_NREGS (regno, mode) == 1)
7977 if (best_mode == VOIDmode)
7978 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7980 mode = GET_MODE_WIDER_MODE (mode))
7981 if (HARD_REGNO_MODE_OK (regno, mode)
7982 && (mov_optab->handlers[(int) mode].insn_code
7983 != CODE_FOR_nothing))
7987 if (mode == VOIDmode)
7990 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7991 if (size % align != 0)
7992 size = CEIL (size, align) * align;
7993 apply_args_reg_offset[regno] = size;
7994 size += GET_MODE_SIZE (mode);
7995 apply_args_mode[regno] = mode;
7999 apply_args_mode[regno] = VOIDmode;
8000 apply_args_reg_offset[regno] = 0;
8006 /* Return the size required for the block returned by __builtin_apply,
8007 and initialize apply_result_mode. */
8010 apply_result_size ()
8012 static int size = -1;
8014 enum machine_mode mode;
8016 /* The values computed by this function never change. */
8021 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8022 if (FUNCTION_VALUE_REGNO_P (regno))
8024 /* Search for the proper mode for copying this register's
8025 value. I'm not sure this is right, but it works so far. */
8026 enum machine_mode best_mode = VOIDmode;
8028 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8030 mode = GET_MODE_WIDER_MODE (mode))
8031 if (HARD_REGNO_MODE_OK (regno, mode))
8034 if (best_mode == VOIDmode)
8035 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8037 mode = GET_MODE_WIDER_MODE (mode))
8038 if (HARD_REGNO_MODE_OK (regno, mode)
8039 && (mov_optab->handlers[(int) mode].insn_code
8040 != CODE_FOR_nothing))
8044 if (mode == VOIDmode)
8047 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8048 if (size % align != 0)
8049 size = CEIL (size, align) * align;
8050 size += GET_MODE_SIZE (mode);
8051 apply_result_mode[regno] = mode;
8054 apply_result_mode[regno] = VOIDmode;
8056 /* Allow targets that use untyped_call and untyped_return to override
8057 the size so that machine-specific information can be stored here. */
8058 #ifdef APPLY_RESULT_SIZE
8059 size = APPLY_RESULT_SIZE;
8065 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8066 /* Create a vector describing the result block RESULT. If SAVEP is true,
8067 the result block is used to save the values; otherwise it is used to
8068 restore the values. */
8071 result_vector (savep, result)
8075 int regno, size, align, nelts;
8076 enum machine_mode mode;
8078 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8081 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8082 if ((mode = apply_result_mode[regno]) != VOIDmode)
8084 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8085 if (size % align != 0)
8086 size = CEIL (size, align) * align;
8087 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8088 mem = change_address (result, mode,
8089 plus_constant (XEXP (result, 0), size));
8090 savevec[nelts++] = (savep
8091 ? gen_rtx (SET, VOIDmode, mem, reg)
8092 : gen_rtx (SET, VOIDmode, reg, mem));
8093 size += GET_MODE_SIZE (mode);
8095 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8097 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8099 /* Save the state required to perform an untyped call with the same
8100 arguments as were passed to the current function. */
8103 expand_builtin_apply_args ()
8106 int size, align, regno;
8107 enum machine_mode mode;
8109 /* Create a block where the arg-pointer, structure value address,
8110 and argument registers can be saved. */
8111 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8113 /* Walk past the arg-pointer and structure value address. */
8114 size = GET_MODE_SIZE (Pmode);
8115 if (struct_value_rtx)
8116 size += GET_MODE_SIZE (Pmode);
8118 /* Save each register used in calling a function to the block. */
8119 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8120 if ((mode = apply_args_mode[regno]) != VOIDmode)
8122 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8123 if (size % align != 0)
8124 size = CEIL (size, align) * align;
8125 emit_move_insn (change_address (registers, mode,
8126 plus_constant (XEXP (registers, 0),
8128 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
8129 size += GET_MODE_SIZE (mode);
8132 /* Save the arg pointer to the block. */
8133 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8134 copy_to_reg (virtual_incoming_args_rtx));
8135 size = GET_MODE_SIZE (Pmode);
8137 /* Save the structure value address unless this is passed as an
8138 "invisible" first argument. */
8139 if (struct_value_incoming_rtx)
8141 emit_move_insn (change_address (registers, Pmode,
8142 plus_constant (XEXP (registers, 0),
8144 copy_to_reg (struct_value_incoming_rtx));
8145 size += GET_MODE_SIZE (Pmode);
8148 /* Return the address of the block. */
8149 return copy_addr_to_reg (XEXP (registers, 0));
8152 /* Perform an untyped call and save the state required to perform an
8153 untyped return of whatever value was returned by the given function. */
8156 expand_builtin_apply (function, arguments, argsize)
8157 rtx function, arguments, argsize;
8159 int size, align, regno;
8160 enum machine_mode mode;
8161 rtx incoming_args, result, reg, dest, call_insn;
8162 rtx old_stack_level = 0;
8163 rtx call_fusage = 0;
8165 /* Create a block where the return registers can be saved. */
8166 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8168 /* ??? The argsize value should be adjusted here. */
8170 /* Fetch the arg pointer from the ARGUMENTS block. */
8171 incoming_args = gen_reg_rtx (Pmode);
8172 emit_move_insn (incoming_args,
8173 gen_rtx (MEM, Pmode, arguments));
8174 #ifndef STACK_GROWS_DOWNWARD
8175 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8176 incoming_args, 0, OPTAB_LIB_WIDEN);
8179 /* Perform postincrements before actually calling the function. */
8182 /* Push a new argument block and copy the arguments. */
8183 do_pending_stack_adjust ();
8184 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8186 /* Push a block of memory onto the stack to store the memory arguments.
8187 Save the address in a register, and copy the memory arguments. ??? I
8188 haven't figured out how the calling convention macros effect this,
8189 but it's likely that the source and/or destination addresses in
8190 the block copy will need updating in machine specific ways. */
8191 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8192 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8193 gen_rtx (MEM, BLKmode, incoming_args),
8195 PARM_BOUNDARY / BITS_PER_UNIT);
8197 /* Refer to the argument block. */
8199 arguments = gen_rtx (MEM, BLKmode, arguments);
8201 /* Walk past the arg-pointer and structure value address. */
8202 size = GET_MODE_SIZE (Pmode);
8203 if (struct_value_rtx)
8204 size += GET_MODE_SIZE (Pmode);
8206 /* Restore each of the registers previously saved. Make USE insns
8207 for each of these registers for use in making the call. */
8208 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8209 if ((mode = apply_args_mode[regno]) != VOIDmode)
8211 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8212 if (size % align != 0)
8213 size = CEIL (size, align) * align;
8214 reg = gen_rtx (REG, mode, regno);
8215 emit_move_insn (reg,
8216 change_address (arguments, mode,
8217 plus_constant (XEXP (arguments, 0),
8220 use_reg (&call_fusage, reg);
8221 size += GET_MODE_SIZE (mode);
8224 /* Restore the structure value address unless this is passed as an
8225 "invisible" first argument. */
8226 size = GET_MODE_SIZE (Pmode);
8227 if (struct_value_rtx)
8229 rtx value = gen_reg_rtx (Pmode);
8230 emit_move_insn (value,
8231 change_address (arguments, Pmode,
8232 plus_constant (XEXP (arguments, 0),
8234 emit_move_insn (struct_value_rtx, value);
8235 if (GET_CODE (struct_value_rtx) == REG)
8236 use_reg (&call_fusage, struct_value_rtx);
8237 size += GET_MODE_SIZE (Pmode);
8240 /* All arguments and registers used for the call are set up by now! */
8241 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8243 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8244 and we don't want to load it into a register as an optimization,
8245 because prepare_call_address already did it if it should be done. */
8246 if (GET_CODE (function) != SYMBOL_REF)
8247 function = memory_address (FUNCTION_MODE, function);
8249 /* Generate the actual call instruction and save the return value. */
8250 #ifdef HAVE_untyped_call
8251 if (HAVE_untyped_call)
8252 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8253 result, result_vector (1, result)));
8256 #ifdef HAVE_call_value
8257 if (HAVE_call_value)
8261 /* Locate the unique return register. It is not possible to
8262 express a call that sets more than one return register using
8263 call_value; use untyped_call for that. In fact, untyped_call
8264 only needs to save the return registers in the given block. */
8265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8266 if ((mode = apply_result_mode[regno]) != VOIDmode)
8269 abort (); /* HAVE_untyped_call required. */
8270 valreg = gen_rtx (REG, mode, regno);
8273 emit_call_insn (gen_call_value (valreg,
8274 gen_rtx (MEM, FUNCTION_MODE, function),
8275 const0_rtx, NULL_RTX, const0_rtx));
8277 emit_move_insn (change_address (result, GET_MODE (valreg),
8285 /* Find the CALL insn we just emitted. */
8286 for (call_insn = get_last_insn ();
8287 call_insn && GET_CODE (call_insn) != CALL_INSN;
8288 call_insn = PREV_INSN (call_insn))
8294 /* Put the register usage information on the CALL. If there is already
8295 some usage information, put ours at the end. */
8296 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8300 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8301 link = XEXP (link, 1))
8304 XEXP (link, 1) = call_fusage;
8307 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8309 /* Restore the stack. */
8310 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8312 /* Return the address of the result block. */
8313 return copy_addr_to_reg (XEXP (result, 0));
8316 /* Perform an untyped return. */
8319 expand_builtin_return (result)
8322 int size, align, regno;
8323 enum machine_mode mode;
8325 rtx call_fusage = 0;
8327 apply_result_size ();
8328 result = gen_rtx (MEM, BLKmode, result);
8330 #ifdef HAVE_untyped_return
8331 if (HAVE_untyped_return)
8333 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8339 /* Restore the return value and note that each value is used. */
8341 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8342 if ((mode = apply_result_mode[regno]) != VOIDmode)
8344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8345 if (size % align != 0)
8346 size = CEIL (size, align) * align;
8347 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8348 emit_move_insn (reg,
8349 change_address (result, mode,
8350 plus_constant (XEXP (result, 0),
8353 push_to_sequence (call_fusage);
8354 emit_insn (gen_rtx (USE, VOIDmode, reg));
8355 call_fusage = get_insns ();
8357 size += GET_MODE_SIZE (mode);
8360 /* Put the USE insns before the return. */
8361 emit_insns (call_fusage);
8363 /* Return whatever values was restored by jumping directly to the end
8365 expand_null_return ();
8368 /* Expand code for a post- or pre- increment or decrement
8369 and return the RTX for the result.
8370 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8373 expand_increment (exp, post)
8377 register rtx op0, op1;
8378 register rtx temp, value;
8379 register tree incremented = TREE_OPERAND (exp, 0);
8380 optab this_optab = add_optab;
8382 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8383 int op0_is_copy = 0;
8384 int single_insn = 0;
8385 /* 1 means we can't store into OP0 directly,
8386 because it is a subreg narrower than a word,
8387 and we don't dare clobber the rest of the word. */
8390 if (output_bytecode)
8392 bc_expand_expr (exp);
8396 /* Stabilize any component ref that might need to be
8397 evaluated more than once below. */
8399 || TREE_CODE (incremented) == BIT_FIELD_REF
8400 || (TREE_CODE (incremented) == COMPONENT_REF
8401 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8402 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8403 incremented = stabilize_reference (incremented);
8404 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8405 ones into save exprs so that they don't accidentally get evaluated
8406 more than once by the code below. */
8407 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8408 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8409 incremented = save_expr (incremented);
8411 /* Compute the operands as RTX.
8412 Note whether OP0 is the actual lvalue or a copy of it:
8413 I believe it is a copy iff it is a register or subreg
8414 and insns were generated in computing it. */
8416 temp = get_last_insn ();
8417 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8419 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8420 in place but intead must do sign- or zero-extension during assignment,
8421 so we copy it into a new register and let the code below use it as
8424 Note that we can safely modify this SUBREG since it is know not to be
8425 shared (it was made by the expand_expr call above). */
8427 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8430 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8434 else if (GET_CODE (op0) == SUBREG
8435 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8437 /* We cannot increment this SUBREG in place. If we are
8438 post-incrementing, get a copy of the old value. Otherwise,
8439 just mark that we cannot increment in place. */
8441 op0 = copy_to_reg (op0);
8446 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8447 && temp != get_last_insn ());
8448 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8450 /* Decide whether incrementing or decrementing. */
8451 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8452 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8453 this_optab = sub_optab;
8455 /* Convert decrement by a constant into a negative increment. */
8456 if (this_optab == sub_optab
8457 && GET_CODE (op1) == CONST_INT)
8459 op1 = GEN_INT (- INTVAL (op1));
8460 this_optab = add_optab;
8463 /* For a preincrement, see if we can do this with a single instruction. */
8466 icode = (int) this_optab->handlers[(int) mode].insn_code;
8467 if (icode != (int) CODE_FOR_nothing
8468 /* Make sure that OP0 is valid for operands 0 and 1
8469 of the insn we want to queue. */
8470 && (*insn_operand_predicate[icode][0]) (op0, mode)
8471 && (*insn_operand_predicate[icode][1]) (op0, mode)
8472 && (*insn_operand_predicate[icode][2]) (op1, mode))
8476 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8477 then we cannot just increment OP0. We must therefore contrive to
8478 increment the original value. Then, for postincrement, we can return
8479 OP0 since it is a copy of the old value. For preincrement, expand here
8480 unless we can do it with a single insn.
8482 Likewise if storing directly into OP0 would clobber high bits
8483 we need to preserve (bad_subreg). */
8484 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8486 /* This is the easiest way to increment the value wherever it is.
8487 Problems with multiple evaluation of INCREMENTED are prevented
8488 because either (1) it is a component_ref or preincrement,
8489 in which case it was stabilized above, or (2) it is an array_ref
8490 with constant index in an array in a register, which is
8491 safe to reevaluate. */
8492 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8493 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8494 ? MINUS_EXPR : PLUS_EXPR),
8497 TREE_OPERAND (exp, 1));
8498 temp = expand_assignment (incremented, newexp, ! post, 0);
8499 return post ? op0 : temp;
8504 /* We have a true reference to the value in OP0.
8505 If there is an insn to add or subtract in this mode, queue it.
8506 Queueing the increment insn avoids the register shuffling
8507 that often results if we must increment now and first save
8508 the old value for subsequent use. */
8510 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8511 op0 = stabilize (op0);
8514 icode = (int) this_optab->handlers[(int) mode].insn_code;
8515 if (icode != (int) CODE_FOR_nothing
8516 /* Make sure that OP0 is valid for operands 0 and 1
8517 of the insn we want to queue. */
8518 && (*insn_operand_predicate[icode][0]) (op0, mode)
8519 && (*insn_operand_predicate[icode][1]) (op0, mode))
8521 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8522 op1 = force_reg (mode, op1);
8524 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8528 /* Preincrement, or we can't increment with one simple insn. */
8530 /* Save a copy of the value before inc or dec, to return it later. */
8531 temp = value = copy_to_reg (op0);
8533 /* Arrange to return the incremented value. */
8534 /* Copy the rtx because expand_binop will protect from the queue,
8535 and the results of that would be invalid for us to return
8536 if our caller does emit_queue before using our result. */
8537 temp = copy_rtx (value = op0);
8539 /* Increment however we can. */
8540 op1 = expand_binop (mode, this_optab, value, op1, op0,
8541 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8542 /* Make sure the value is stored into OP0. */
8544 emit_move_insn (op0, op1);
8549 /* Expand all function calls contained within EXP, innermost ones first.
8550 But don't look within expressions that have sequence points.
8551 For each CALL_EXPR, record the rtx for its value
8552 in the CALL_EXPR_RTL field. */
8555 preexpand_calls (exp)
8558 register int nops, i;
8559 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8561 if (! do_preexpand_calls)
8564 /* Only expressions and references can contain calls. */
8566 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8569 switch (TREE_CODE (exp))
8572 /* Do nothing if already expanded. */
8573 if (CALL_EXPR_RTL (exp) != 0)
8576 /* Do nothing to built-in functions. */
8577 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8578 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8579 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8580 /* Do nothing if the call returns a variable-sized object. */
8581 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8582 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8587 case TRUTH_ANDIF_EXPR:
8588 case TRUTH_ORIF_EXPR:
8589 /* If we find one of these, then we can be sure
8590 the adjust will be done for it (since it makes jumps).
8591 Do it now, so that if this is inside an argument
8592 of a function, we don't get the stack adjustment
8593 after some other args have already been pushed. */
8594 do_pending_stack_adjust ();
8599 case WITH_CLEANUP_EXPR:
8603 if (SAVE_EXPR_RTL (exp) != 0)
8607 nops = tree_code_length[(int) TREE_CODE (exp)];
8608 for (i = 0; i < nops; i++)
8609 if (TREE_OPERAND (exp, i) != 0)
8611 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8612 if (type == 'e' || type == '<' || type == '1' || type == '2'
8614 preexpand_calls (TREE_OPERAND (exp, i));
8618 /* At the start of a function, record that we have no previously-pushed
8619 arguments waiting to be popped. */
8622 init_pending_stack_adjust ()
8624 pending_stack_adjust = 0;
8627 /* When exiting from function, if safe, clear out any pending stack adjust
8628 so the adjustment won't get done. */
8631 clear_pending_stack_adjust ()
8633 #ifdef EXIT_IGNORE_STACK
8634 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8635 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8636 && ! flag_inline_functions)
8637 pending_stack_adjust = 0;
8641 /* Pop any previously-pushed arguments that have not been popped yet. */
8644 do_pending_stack_adjust ()
8646 if (inhibit_defer_pop == 0)
8648 if (pending_stack_adjust != 0)
8649 adjust_stack (GEN_INT (pending_stack_adjust));
8650 pending_stack_adjust = 0;
8654 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8655 Returns the cleanups to be performed. */
8658 defer_cleanups_to (old_cleanups)
8661 tree new_cleanups = NULL_TREE;
8662 tree cleanups = cleanups_this_call;
8663 tree last = NULL_TREE;
8665 while (cleanups_this_call != old_cleanups)
8667 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8668 last = cleanups_this_call;
8669 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8674 /* Remove the list from the chain of cleanups. */
8675 TREE_CHAIN (last) = NULL_TREE;
8677 /* reverse them so that we can build them in the right order. */
8678 cleanups = nreverse (cleanups);
8683 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8684 TREE_VALUE (cleanups), new_cleanups);
8686 new_cleanups = TREE_VALUE (cleanups);
8688 cleanups = TREE_CHAIN (cleanups);
8692 return new_cleanups;
8695 /* Expand all cleanups up to OLD_CLEANUPS.
8696 Needed here, and also for language-dependent calls. */
8699 expand_cleanups_to (old_cleanups)
8702 while (cleanups_this_call != old_cleanups)
8704 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8705 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8706 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8710 /* Expand conditional expressions. */
8712 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8713 LABEL is an rtx of code CODE_LABEL, in this function and all the
8717 jumpifnot (exp, label)
8721 do_jump (exp, label, NULL_RTX);
8724 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8731 do_jump (exp, NULL_RTX, label);
8734 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8735 the result is zero, or IF_TRUE_LABEL if the result is one.
8736 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8737 meaning fall through in that case.
8739 do_jump always does any pending stack adjust except when it does not
8740 actually perform a jump. An example where there is no jump
8741 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8743 This function is responsible for optimizing cases such as
8744 &&, || and comparison operators in EXP. */
8747 do_jump (exp, if_false_label, if_true_label)
8749 rtx if_false_label, if_true_label;
8751 register enum tree_code code = TREE_CODE (exp);
8752 /* Some cases need to create a label to jump to
8753 in order to properly fall through.
8754 These cases set DROP_THROUGH_LABEL nonzero. */
8755 rtx drop_through_label = 0;
8760 enum machine_mode mode;
8770 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8776 /* This is not true with #pragma weak */
8778 /* The address of something can never be zero. */
8780 emit_jump (if_true_label);
8785 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8786 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8787 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8790 /* If we are narrowing the operand, we have to do the compare in the
8792 if ((TYPE_PRECISION (TREE_TYPE (exp))
8793 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8795 case NON_LVALUE_EXPR:
8796 case REFERENCE_EXPR:
8801 /* These cannot change zero->non-zero or vice versa. */
8802 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8806 /* This is never less insns than evaluating the PLUS_EXPR followed by
8807 a test and can be longer if the test is eliminated. */
8809 /* Reduce to minus. */
8810 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8811 TREE_OPERAND (exp, 0),
8812 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8813 TREE_OPERAND (exp, 1))));
8814 /* Process as MINUS. */
8818 /* Non-zero iff operands of minus differ. */
8819 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8820 TREE_OPERAND (exp, 0),
8821 TREE_OPERAND (exp, 1)),
8826 /* If we are AND'ing with a small constant, do this comparison in the
8827 smallest type that fits. If the machine doesn't have comparisons
8828 that small, it will be converted back to the wider comparison.
8829 This helps if we are testing the sign bit of a narrower object.
8830 combine can't do this for us because it can't know whether a
8831 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8833 if (! SLOW_BYTE_ACCESS
8834 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8835 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8836 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8837 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8838 && (type = type_for_mode (mode, 1)) != 0
8839 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8840 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8841 != CODE_FOR_nothing))
8843 do_jump (convert (type, exp), if_false_label, if_true_label);
8848 case TRUTH_NOT_EXPR:
8849 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8852 case TRUTH_ANDIF_EXPR:
8855 tree cleanups, old_cleanups;
8857 if (if_false_label == 0)
8858 if_false_label = drop_through_label = gen_label_rtx ();
8860 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8861 seq1 = get_insns ();
8864 old_cleanups = cleanups_this_call;
8866 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8867 seq2 = get_insns ();
8870 cleanups = defer_cleanups_to (old_cleanups);
8873 rtx flag = gen_reg_rtx (word_mode);
8877 /* Flag cleanups as not needed. */
8878 emit_move_insn (flag, const0_rtx);
8881 /* Flag cleanups as needed. */
8882 emit_move_insn (flag, const1_rtx);
8885 /* convert flag, which is an rtx, into a tree. */
8886 cond = make_node (RTL_EXPR);
8887 TREE_TYPE (cond) = integer_type_node;
8888 RTL_EXPR_RTL (cond) = flag;
8889 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8891 new_cleanups = build (COND_EXPR, void_type_node,
8892 truthvalue_conversion (cond),
8893 cleanups, integer_zero_node);
8894 new_cleanups = fold (new_cleanups);
8896 /* Now add in the conditionalized cleanups. */
8898 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8899 (*interim_eh_hook) (NULL_TREE);
8909 case TRUTH_ORIF_EXPR:
8912 tree cleanups, old_cleanups;
8914 if (if_true_label == 0)
8915 if_true_label = drop_through_label = gen_label_rtx ();
8917 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8918 seq1 = get_insns ();
8921 old_cleanups = cleanups_this_call;
8923 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8924 seq2 = get_insns ();
8927 cleanups = defer_cleanups_to (old_cleanups);
8930 rtx flag = gen_reg_rtx (word_mode);
8934 /* Flag cleanups as not needed. */
8935 emit_move_insn (flag, const0_rtx);
8938 /* Flag cleanups as needed. */
8939 emit_move_insn (flag, const1_rtx);
8942 /* convert flag, which is an rtx, into a tree. */
8943 cond = make_node (RTL_EXPR);
8944 TREE_TYPE (cond) = integer_type_node;
8945 RTL_EXPR_RTL (cond) = flag;
8946 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8948 new_cleanups = build (COND_EXPR, void_type_node,
8949 truthvalue_conversion (cond),
8950 cleanups, integer_zero_node);
8951 new_cleanups = fold (new_cleanups);
8953 /* Now add in the conditionalized cleanups. */
8955 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8956 (*interim_eh_hook) (NULL_TREE);
8968 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8972 do_pending_stack_adjust ();
8973 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8980 int bitsize, bitpos, unsignedp;
8981 enum machine_mode mode;
8986 /* Get description of this reference. We don't actually care
8987 about the underlying object here. */
8988 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8989 &mode, &unsignedp, &volatilep);
8991 type = type_for_size (bitsize, unsignedp);
8992 if (! SLOW_BYTE_ACCESS
8993 && type != 0 && bitsize >= 0
8994 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8995 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8996 != CODE_FOR_nothing))
8998 do_jump (convert (type, exp), if_false_label, if_true_label);
9005 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9006 if (integer_onep (TREE_OPERAND (exp, 1))
9007 && integer_zerop (TREE_OPERAND (exp, 2)))
9008 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9010 else if (integer_zerop (TREE_OPERAND (exp, 1))
9011 && integer_onep (TREE_OPERAND (exp, 2)))
9012 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9016 register rtx label1 = gen_label_rtx ();
9017 drop_through_label = gen_label_rtx ();
9018 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9019 /* Now the THEN-expression. */
9020 do_jump (TREE_OPERAND (exp, 1),
9021 if_false_label ? if_false_label : drop_through_label,
9022 if_true_label ? if_true_label : drop_through_label);
9023 /* In case the do_jump just above never jumps. */
9024 do_pending_stack_adjust ();
9025 emit_label (label1);
9026 /* Now the ELSE-expression. */
9027 do_jump (TREE_OPERAND (exp, 2),
9028 if_false_label ? if_false_label : drop_through_label,
9029 if_true_label ? if_true_label : drop_through_label);
9034 if (integer_zerop (TREE_OPERAND (exp, 1)))
9035 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9036 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9039 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9040 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9041 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9042 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9044 comparison = compare (exp, EQ, EQ);
9048 if (integer_zerop (TREE_OPERAND (exp, 1)))
9049 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9050 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9053 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9054 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9055 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9056 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9058 comparison = compare (exp, NE, NE);
9062 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9064 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9065 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9067 comparison = compare (exp, LT, LTU);
9071 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9073 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9074 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9076 comparison = compare (exp, LE, LEU);
9080 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9082 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9083 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9085 comparison = compare (exp, GT, GTU);
9089 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9091 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9092 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9094 comparison = compare (exp, GE, GEU);
9099 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9101 /* This is not needed any more and causes poor code since it causes
9102 comparisons and tests from non-SI objects to have different code
9104 /* Copy to register to avoid generating bad insns by cse
9105 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9106 if (!cse_not_expected && GET_CODE (temp) == MEM)
9107 temp = copy_to_reg (temp);
9109 do_pending_stack_adjust ();
9110 if (GET_CODE (temp) == CONST_INT)
9111 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9112 else if (GET_CODE (temp) == LABEL_REF)
9113 comparison = const_true_rtx;
9114 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9115 && !can_compare_p (GET_MODE (temp)))
9116 /* Note swapping the labels gives us not-equal. */
9117 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9118 else if (GET_MODE (temp) != VOIDmode)
9119 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9120 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9121 GET_MODE (temp), NULL_RTX, 0);
9126 /* Do any postincrements in the expression that was tested. */
9129 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9130 straight into a conditional jump instruction as the jump condition.
9131 Otherwise, all the work has been done already. */
9133 if (comparison == const_true_rtx)
9136 emit_jump (if_true_label);
9138 else if (comparison == const0_rtx)
9141 emit_jump (if_false_label);
9143 else if (comparison)
9144 do_jump_for_compare (comparison, if_false_label, if_true_label);
9146 if (drop_through_label)
9148 /* If do_jump produces code that might be jumped around,
9149 do any stack adjusts from that code, before the place
9150 where control merges in. */
9151 do_pending_stack_adjust ();
9152 emit_label (drop_through_label);
9156 /* Given a comparison expression EXP for values too wide to be compared
9157 with one insn, test the comparison and jump to the appropriate label.
9158 The code of EXP is ignored; we always test GT if SWAP is 0,
9159 and LT if SWAP is 1. */
9162 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9165 rtx if_false_label, if_true_label;
9167 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9168 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9169 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9170 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9171 rtx drop_through_label = 0;
9172 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9175 if (! if_true_label || ! if_false_label)
9176 drop_through_label = gen_label_rtx ();
9177 if (! if_true_label)
9178 if_true_label = drop_through_label;
9179 if (! if_false_label)
9180 if_false_label = drop_through_label;
9182 /* Compare a word at a time, high order first. */
9183 for (i = 0; i < nwords; i++)
9186 rtx op0_word, op1_word;
9188 if (WORDS_BIG_ENDIAN)
9190 op0_word = operand_subword_force (op0, i, mode);
9191 op1_word = operand_subword_force (op1, i, mode);
9195 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9196 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9199 /* All but high-order word must be compared as unsigned. */
9200 comp = compare_from_rtx (op0_word, op1_word,
9201 (unsignedp || i > 0) ? GTU : GT,
9202 unsignedp, word_mode, NULL_RTX, 0);
9203 if (comp == const_true_rtx)
9204 emit_jump (if_true_label);
9205 else if (comp != const0_rtx)
9206 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9208 /* Consider lower words only if these are equal. */
9209 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9211 if (comp == const_true_rtx)
9212 emit_jump (if_false_label);
9213 else if (comp != const0_rtx)
9214 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9218 emit_jump (if_false_label);
9219 if (drop_through_label)
9220 emit_label (drop_through_label);
9223 /* Compare OP0 with OP1, word at a time, in mode MODE.
9224 UNSIGNEDP says to do unsigned comparison.
9225 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9228 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9229 enum machine_mode mode;
9232 rtx if_false_label, if_true_label;
9234 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9235 rtx drop_through_label = 0;
9238 if (! if_true_label || ! if_false_label)
9239 drop_through_label = gen_label_rtx ();
9240 if (! if_true_label)
9241 if_true_label = drop_through_label;
9242 if (! if_false_label)
9243 if_false_label = drop_through_label;
9245 /* Compare a word at a time, high order first. */
9246 for (i = 0; i < nwords; i++)
9249 rtx op0_word, op1_word;
9251 if (WORDS_BIG_ENDIAN)
9253 op0_word = operand_subword_force (op0, i, mode);
9254 op1_word = operand_subword_force (op1, i, mode);
9258 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9259 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9262 /* All but high-order word must be compared as unsigned. */
9263 comp = compare_from_rtx (op0_word, op1_word,
9264 (unsignedp || i > 0) ? GTU : GT,
9265 unsignedp, word_mode, NULL_RTX, 0);
9266 if (comp == const_true_rtx)
9267 emit_jump (if_true_label);
9268 else if (comp != const0_rtx)
9269 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9271 /* Consider lower words only if these are equal. */
9272 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9274 if (comp == const_true_rtx)
9275 emit_jump (if_false_label);
9276 else if (comp != const0_rtx)
9277 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9281 emit_jump (if_false_label);
9282 if (drop_through_label)
9283 emit_label (drop_through_label);
9286 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9287 with one insn, test the comparison and jump to the appropriate label. */
9290 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9292 rtx if_false_label, if_true_label;
9294 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9295 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9296 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9297 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9299 rtx drop_through_label = 0;
9301 if (! if_false_label)
9302 drop_through_label = if_false_label = gen_label_rtx ();
9304 for (i = 0; i < nwords; i++)
9306 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9307 operand_subword_force (op1, i, mode),
9308 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9309 word_mode, NULL_RTX, 0);
9310 if (comp == const_true_rtx)
9311 emit_jump (if_false_label);
9312 else if (comp != const0_rtx)
9313 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9317 emit_jump (if_true_label);
9318 if (drop_through_label)
9319 emit_label (drop_through_label);
9322 /* Jump according to whether OP0 is 0.
9323 We assume that OP0 has an integer mode that is too wide
9324 for the available compare insns. */
9327 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9329 rtx if_false_label, if_true_label;
9331 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9333 rtx drop_through_label = 0;
9335 if (! if_false_label)
9336 drop_through_label = if_false_label = gen_label_rtx ();
9338 for (i = 0; i < nwords; i++)
9340 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9342 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9343 if (comp == const_true_rtx)
9344 emit_jump (if_false_label);
9345 else if (comp != const0_rtx)
9346 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9350 emit_jump (if_true_label);
9351 if (drop_through_label)
9352 emit_label (drop_through_label);
9355 /* Given a comparison expression in rtl form, output conditional branches to
9356 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9359 do_jump_for_compare (comparison, if_false_label, if_true_label)
9360 rtx comparison, if_false_label, if_true_label;
9364 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9365 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9370 emit_jump (if_false_label);
9372 else if (if_false_label)
9375 rtx prev = get_last_insn ();
9378 /* Output the branch with the opposite condition. Then try to invert
9379 what is generated. If more than one insn is a branch, or if the
9380 branch is not the last insn written, abort. If we can't invert
9381 the branch, emit make a true label, redirect this jump to that,
9382 emit a jump to the false label and define the true label. */
9384 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9385 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9389 /* Here we get the first insn that was just emitted. It used to be the
9390 case that, on some machines, emitting the branch would discard
9391 the previous compare insn and emit a replacement. This isn't
9392 done anymore, but abort if we see that PREV is deleted. */
9395 insn = get_insns ();
9396 else if (INSN_DELETED_P (prev))
9399 insn = NEXT_INSN (prev);
9401 for (; insn; insn = NEXT_INSN (insn))
9402 if (GET_CODE (insn) == JUMP_INSN)
9409 if (branch != get_last_insn ())
9412 JUMP_LABEL (branch) = if_false_label;
9413 if (! invert_jump (branch, if_false_label))
9415 if_true_label = gen_label_rtx ();
9416 redirect_jump (branch, if_true_label);
9417 emit_jump (if_false_label);
9418 emit_label (if_true_label);
9423 /* Generate code for a comparison expression EXP
9424 (including code to compute the values to be compared)
9425 and set (CC0) according to the result.
9426 SIGNED_CODE should be the rtx operation for this comparison for
9427 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9429 We force a stack adjustment unless there are currently
9430 things pushed on the stack that aren't yet used. */
9433 compare (exp, signed_code, unsigned_code)
9435 enum rtx_code signed_code, unsigned_code;
9438 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9440 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9441 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9442 register enum machine_mode mode = TYPE_MODE (type);
9443 int unsignedp = TREE_UNSIGNED (type);
9444 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9446 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9448 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9449 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9452 /* Like compare but expects the values to compare as two rtx's.
9453 The decision as to signed or unsigned comparison must be made by the caller.
9455 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9458 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9459 size of MODE should be used. */
9462 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9463 register rtx op0, op1;
9466 enum machine_mode mode;
9472 /* If one operand is constant, make it the second one. Only do this
9473 if the other operand is not constant as well. */
9475 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9476 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9481 code = swap_condition (code);
9486 op0 = force_not_mem (op0);
9487 op1 = force_not_mem (op1);
9490 do_pending_stack_adjust ();
9492 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9493 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9497 /* There's no need to do this now that combine.c can eliminate lots of
9498 sign extensions. This can be less efficient in certain cases on other
9501 /* If this is a signed equality comparison, we can do it as an
9502 unsigned comparison since zero-extension is cheaper than sign
9503 extension and comparisons with zero are done as unsigned. This is
9504 the case even on machines that can do fast sign extension, since
9505 zero-extension is easier to combine with other operations than
9506 sign-extension is. If we are comparing against a constant, we must
9507 convert it to what it would look like unsigned. */
9508 if ((code == EQ || code == NE) && ! unsignedp
9509 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9511 if (GET_CODE (op1) == CONST_INT
9512 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9513 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9518 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9520 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9523 /* Generate code to calculate EXP using a store-flag instruction
9524 and return an rtx for the result. EXP is either a comparison
9525 or a TRUTH_NOT_EXPR whose operand is a comparison.
9527 If TARGET is nonzero, store the result there if convenient.
9529 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9532 Return zero if there is no suitable set-flag instruction
9533 available on this machine.
9535 Once expand_expr has been called on the arguments of the comparison,
9536 we are committed to doing the store flag, since it is not safe to
9537 re-evaluate the expression. We emit the store-flag insn by calling
9538 emit_store_flag, but only expand the arguments if we have a reason
9539 to believe that emit_store_flag will be successful. If we think that
9540 it will, but it isn't, we have to simulate the store-flag with a
9541 set/jump/set sequence. */
9544 do_store_flag (exp, target, mode, only_cheap)
9547 enum machine_mode mode;
9551 tree arg0, arg1, type;
9553 enum machine_mode operand_mode;
9557 enum insn_code icode;
9558 rtx subtarget = target;
9559 rtx result, label, pattern, jump_pat;
9561 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9562 result at the end. We can't simply invert the test since it would
9563 have already been inverted if it were valid. This case occurs for
9564 some floating-point comparisons. */
9566 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9567 invert = 1, exp = TREE_OPERAND (exp, 0);
9569 arg0 = TREE_OPERAND (exp, 0);
9570 arg1 = TREE_OPERAND (exp, 1);
9571 type = TREE_TYPE (arg0);
9572 operand_mode = TYPE_MODE (type);
9573 unsignedp = TREE_UNSIGNED (type);
9575 /* We won't bother with BLKmode store-flag operations because it would mean
9576 passing a lot of information to emit_store_flag. */
9577 if (operand_mode == BLKmode)
9583 /* Get the rtx comparison code to use. We know that EXP is a comparison
9584 operation of some type. Some comparisons against 1 and -1 can be
9585 converted to comparisons with zero. Do so here so that the tests
9586 below will be aware that we have a comparison with zero. These
9587 tests will not catch constants in the first operand, but constants
9588 are rarely passed as the first operand. */
9590 switch (TREE_CODE (exp))
9599 if (integer_onep (arg1))
9600 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9602 code = unsignedp ? LTU : LT;
9605 if (! unsignedp && integer_all_onesp (arg1))
9606 arg1 = integer_zero_node, code = LT;
9608 code = unsignedp ? LEU : LE;
9611 if (! unsignedp && integer_all_onesp (arg1))
9612 arg1 = integer_zero_node, code = GE;
9614 code = unsignedp ? GTU : GT;
9617 if (integer_onep (arg1))
9618 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9620 code = unsignedp ? GEU : GE;
9626 /* Put a constant second. */
9627 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9629 tem = arg0; arg0 = arg1; arg1 = tem;
9630 code = swap_condition (code);
9633 /* If this is an equality or inequality test of a single bit, we can
9634 do this by shifting the bit being tested to the low-order bit and
9635 masking the result with the constant 1. If the condition was EQ,
9636 we xor it with 1. This does not require an scc insn and is faster
9637 than an scc insn even if we have it. */
9639 if ((code == NE || code == EQ)
9640 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9641 && integer_pow2p (TREE_OPERAND (arg0, 1))
9642 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9644 tree inner = TREE_OPERAND (arg0, 0);
9645 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9646 NULL_RTX, VOIDmode, 0)));
9649 /* If INNER is a right shift of a constant and it plus BITNUM does
9650 not overflow, adjust BITNUM and INNER. */
9652 if (TREE_CODE (inner) == RSHIFT_EXPR
9653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9654 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9655 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9656 < TYPE_PRECISION (type)))
9658 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9659 inner = TREE_OPERAND (inner, 0);
9662 /* If we are going to be able to omit the AND below, we must do our
9663 operations as unsigned. If we must use the AND, we have a choice.
9664 Normally unsigned is faster, but for some machines signed is. */
9665 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9666 #ifdef LOAD_EXTEND_OP
9667 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9673 if (subtarget == 0 || GET_CODE (subtarget) != REG
9674 || GET_MODE (subtarget) != operand_mode
9675 || ! safe_from_p (subtarget, inner))
9678 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9681 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9682 size_int (bitnum), subtarget, ops_unsignedp);
9684 if (GET_MODE (op0) != mode)
9685 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9687 if ((code == EQ && ! invert) || (code == NE && invert))
9688 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9689 ops_unsignedp, OPTAB_LIB_WIDEN);
9691 /* Put the AND last so it can combine with more things. */
9692 if (bitnum != TYPE_PRECISION (type) - 1)
9693 op0 = expand_and (op0, const1_rtx, subtarget);
9698 /* Now see if we are likely to be able to do this. Return if not. */
9699 if (! can_compare_p (operand_mode))
9701 icode = setcc_gen_code[(int) code];
9702 if (icode == CODE_FOR_nothing
9703 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9705 /* We can only do this if it is one of the special cases that
9706 can be handled without an scc insn. */
9707 if ((code == LT && integer_zerop (arg1))
9708 || (! only_cheap && code == GE && integer_zerop (arg1)))
9710 else if (BRANCH_COST >= 0
9711 && ! only_cheap && (code == NE || code == EQ)
9712 && TREE_CODE (type) != REAL_TYPE
9713 && ((abs_optab->handlers[(int) operand_mode].insn_code
9714 != CODE_FOR_nothing)
9715 || (ffs_optab->handlers[(int) operand_mode].insn_code
9716 != CODE_FOR_nothing)))
9722 preexpand_calls (exp);
9723 if (subtarget == 0 || GET_CODE (subtarget) != REG
9724 || GET_MODE (subtarget) != operand_mode
9725 || ! safe_from_p (subtarget, arg1))
9728 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9729 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9732 target = gen_reg_rtx (mode);
9734 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9735 because, if the emit_store_flag does anything it will succeed and
9736 OP0 and OP1 will not be used subsequently. */
9738 result = emit_store_flag (target, code,
9739 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9740 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9741 operand_mode, unsignedp, 1);
9746 result = expand_binop (mode, xor_optab, result, const1_rtx,
9747 result, 0, OPTAB_LIB_WIDEN);
9751 /* If this failed, we have to do this with set/compare/jump/set code. */
9752 if (target == 0 || GET_CODE (target) != REG
9753 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9754 target = gen_reg_rtx (GET_MODE (target));
9756 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9757 result = compare_from_rtx (op0, op1, code, unsignedp,
9758 operand_mode, NULL_RTX, 0);
9759 if (GET_CODE (result) == CONST_INT)
9760 return (((result == const0_rtx && ! invert)
9761 || (result != const0_rtx && invert))
9762 ? const0_rtx : const1_rtx);
9764 label = gen_label_rtx ();
9765 if (bcc_gen_fctn[(int) code] == 0)
9768 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9769 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9775 /* Generate a tablejump instruction (used for switch statements). */
9777 #ifdef HAVE_tablejump
9779 /* INDEX is the value being switched on, with the lowest value
9780 in the table already subtracted.
9781 MODE is its expected mode (needed if INDEX is constant).
9782 RANGE is the length of the jump table.
9783 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9785 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9786 index value is out of range. */
9789 do_tablejump (index, mode, range, table_label, default_label)
9790 rtx index, range, table_label, default_label;
9791 enum machine_mode mode;
9793 register rtx temp, vector;
9795 /* Do an unsigned comparison (in the proper mode) between the index
9796 expression and the value which represents the length of the range.
9797 Since we just finished subtracting the lower bound of the range
9798 from the index expression, this comparison allows us to simultaneously
9799 check that the original index expression value is both greater than
9800 or equal to the minimum value of the range and less than or equal to
9801 the maximum value of the range. */
9803 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9804 emit_jump_insn (gen_bgtu (default_label));
9806 /* If index is in range, it must fit in Pmode.
9807 Convert to Pmode so we can index with it. */
9809 index = convert_to_mode (Pmode, index, 1);
9811 /* Don't let a MEM slip thru, because then INDEX that comes
9812 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9813 and break_out_memory_refs will go to work on it and mess it up. */
9814 #ifdef PIC_CASE_VECTOR_ADDRESS
9815 if (flag_pic && GET_CODE (index) != REG)
9816 index = copy_to_mode_reg (Pmode, index);
9819 /* If flag_force_addr were to affect this address
9820 it could interfere with the tricky assumptions made
9821 about addresses that contain label-refs,
9822 which may be valid only very near the tablejump itself. */
9823 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9824 GET_MODE_SIZE, because this indicates how large insns are. The other
9825 uses should all be Pmode, because they are addresses. This code
9826 could fail if addresses and insns are not the same size. */
9827 index = gen_rtx (PLUS, Pmode,
9828 gen_rtx (MULT, Pmode, index,
9829 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9830 gen_rtx (LABEL_REF, Pmode, table_label));
9831 #ifdef PIC_CASE_VECTOR_ADDRESS
9833 index = PIC_CASE_VECTOR_ADDRESS (index);
9836 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9837 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9838 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9839 RTX_UNCHANGING_P (vector) = 1;
9840 convert_move (temp, vector, 0);
9842 emit_jump_insn (gen_tablejump (temp, table_label));
9844 #ifndef CASE_VECTOR_PC_RELATIVE
9845 /* If we are generating PIC code or if the table is PC-relative, the
9846 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9852 #endif /* HAVE_tablejump */
9855 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9856 to that value is on the top of the stack. The resulting type is TYPE, and
9857 the source declaration is DECL. */
9860 bc_load_memory (type, decl)
9863 enum bytecode_opcode opcode;
9866 /* Bit fields are special. We only know about signed and
9867 unsigned ints, and enums. The latter are treated as
9870 if (DECL_BIT_FIELD (decl))
9871 if (TREE_CODE (type) == ENUMERAL_TYPE
9872 || TREE_CODE (type) == INTEGER_TYPE)
9873 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9877 /* See corresponding comment in bc_store_memory(). */
9878 if (TYPE_MODE (type) == BLKmode
9879 || TYPE_MODE (type) == VOIDmode)
9882 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9884 if (opcode == neverneverland)
9887 bc_emit_bytecode (opcode);
9889 #ifdef DEBUG_PRINT_CODE
9890 fputc ('\n', stderr);
9895 /* Store the contents of the second stack slot to the address in the
9896 top stack slot. DECL is the declaration of the destination and is used
9897 to determine whether we're dealing with a bitfield. */
9900 bc_store_memory (type, decl)
9903 enum bytecode_opcode opcode;
9906 if (DECL_BIT_FIELD (decl))
9908 if (TREE_CODE (type) == ENUMERAL_TYPE
9909 || TREE_CODE (type) == INTEGER_TYPE)
9915 if (TYPE_MODE (type) == BLKmode)
9917 /* Copy structure. This expands to a block copy instruction, storeBLK.
9918 In addition to the arguments expected by the other store instructions,
9919 it also expects a type size (SImode) on top of the stack, which is the
9920 structure size in size units (usually bytes). The two first arguments
9921 are already on the stack; so we just put the size on level 1. For some
9922 other languages, the size may be variable, this is why we don't encode
9923 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9925 bc_expand_expr (TYPE_SIZE (type));
9929 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9931 if (opcode == neverneverland)
9934 bc_emit_bytecode (opcode);
9936 #ifdef DEBUG_PRINT_CODE
9937 fputc ('\n', stderr);
9942 /* Allocate local stack space sufficient to hold a value of the given
9943 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9944 integral power of 2. A special case is locals of type VOID, which
9945 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9946 remapped into the corresponding attribute of SI. */
9949 bc_allocate_local (size, alignment)
9950 int size, alignment;
9958 /* Normalize size and alignment */
9960 size = UNITS_PER_WORD;
9962 if (alignment < BITS_PER_UNIT)
9963 byte_alignment = 1 << (INT_ALIGN - 1);
9966 byte_alignment = alignment / BITS_PER_UNIT;
9968 if (local_vars_size & (byte_alignment - 1))
9969 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9971 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9972 local_vars_size += size;
9978 /* Allocate variable-sized local array. Variable-sized arrays are
9979 actually pointers to the address in memory where they are stored. */
9982 bc_allocate_variable_array (size)
9986 const int ptralign = (1 << (PTR_ALIGN - 1));
9989 if (local_vars_size & ptralign)
9990 local_vars_size += ptralign - (local_vars_size & ptralign);
9992 /* Note down local space needed: pointer to block; also return
9995 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9996 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10001 /* Push the machine address for the given external variable offset. */
10003 bc_load_externaddr (externaddr)
10006 bc_emit_bytecode (constP);
10007 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10008 BYTECODE_BC_LABEL (externaddr)->offset);
10010 #ifdef DEBUG_PRINT_CODE
10011 fputc ('\n', stderr);
10020 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10026 /* Like above, but expects an IDENTIFIER. */
10028 bc_load_externaddr_id (id, offset)
10032 if (!IDENTIFIER_POINTER (id))
10035 bc_emit_bytecode (constP);
10036 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10038 #ifdef DEBUG_PRINT_CODE
10039 fputc ('\n', stderr);
10044 /* Push the machine address for the given local variable offset. */
10046 bc_load_localaddr (localaddr)
10049 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10053 /* Push the machine address for the given parameter offset.
10054 NOTE: offset is in bits. */
10056 bc_load_parmaddr (parmaddr)
10059 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10064 /* Convert a[i] into *(a + i). */
10066 bc_canonicalize_array_ref (exp)
10069 tree type = TREE_TYPE (exp);
10070 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10071 TREE_OPERAND (exp, 0));
10072 tree index = TREE_OPERAND (exp, 1);
10075 /* Convert the integer argument to a type the same size as a pointer
10076 so the multiply won't overflow spuriously. */
10078 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10079 index = convert (type_for_size (POINTER_SIZE, 0), index);
10081 /* The array address isn't volatile even if the array is.
10082 (Of course this isn't terribly relevant since the bytecode
10083 translator treats nearly everything as volatile anyway.) */
10084 TREE_THIS_VOLATILE (array_adr) = 0;
10086 return build1 (INDIRECT_REF, type,
10087 fold (build (PLUS_EXPR,
10088 TYPE_POINTER_TO (type),
10090 fold (build (MULT_EXPR,
10091 TYPE_POINTER_TO (type),
10093 size_in_bytes (type))))));
10097 /* Load the address of the component referenced by the given
10098 COMPONENT_REF expression.
10100 Returns innermost lvalue. */
10103 bc_expand_component_address (exp)
10107 enum machine_mode mode;
10109 HOST_WIDE_INT SIval;
10112 tem = TREE_OPERAND (exp, 1);
10113 mode = DECL_MODE (tem);
10116 /* Compute cumulative bit offset for nested component refs
10117 and array refs, and find the ultimate containing object. */
10119 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10121 if (TREE_CODE (tem) == COMPONENT_REF)
10122 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10124 if (TREE_CODE (tem) == ARRAY_REF
10125 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10126 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10128 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10129 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10130 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10135 bc_expand_expr (tem);
10138 /* For bitfields also push their offset and size */
10139 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10140 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10142 if (SIval = bitpos / BITS_PER_UNIT)
10143 bc_emit_instruction (addconstPSI, SIval);
10145 return (TREE_OPERAND (exp, 1));
10149 /* Emit code to push two SI constants */
10151 bc_push_offset_and_size (offset, size)
10152 HOST_WIDE_INT offset, size;
10154 bc_emit_instruction (constSI, offset);
10155 bc_emit_instruction (constSI, size);
10159 /* Emit byte code to push the address of the given lvalue expression to
10160 the stack. If it's a bit field, we also push offset and size info.
10162 Returns innermost component, which allows us to determine not only
10163 its type, but also whether it's a bitfield. */
10166 bc_expand_address (exp)
10170 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10174 switch (TREE_CODE (exp))
10178 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10180 case COMPONENT_REF:
10182 return (bc_expand_component_address (exp));
10186 bc_expand_expr (TREE_OPERAND (exp, 0));
10188 /* For variable-sized types: retrieve pointer. Sometimes the
10189 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10190 also make sure we have an operand, just in case... */
10192 if (TREE_OPERAND (exp, 0)
10193 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10194 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10195 bc_emit_instruction (loadP);
10197 /* If packed, also return offset and size */
10198 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10200 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10201 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10203 return (TREE_OPERAND (exp, 0));
10205 case FUNCTION_DECL:
10207 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10208 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10213 bc_load_parmaddr (DECL_RTL (exp));
10215 /* For variable-sized types: retrieve pointer */
10216 if (TYPE_SIZE (TREE_TYPE (exp))
10217 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10218 bc_emit_instruction (loadP);
10220 /* If packed, also return offset and size */
10221 if (DECL_BIT_FIELD (exp))
10222 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10223 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10229 bc_emit_instruction (returnP);
10235 if (BYTECODE_LABEL (DECL_RTL (exp)))
10236 bc_load_externaddr (DECL_RTL (exp));
10239 if (DECL_EXTERNAL (exp))
10240 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10241 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10243 bc_load_localaddr (DECL_RTL (exp));
10245 /* For variable-sized types: retrieve pointer */
10246 if (TYPE_SIZE (TREE_TYPE (exp))
10247 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10248 bc_emit_instruction (loadP);
10250 /* If packed, also return offset and size */
10251 if (DECL_BIT_FIELD (exp))
10252 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10253 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10261 bc_emit_bytecode (constP);
10262 r = output_constant_def (exp);
10263 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10265 #ifdef DEBUG_PRINT_CODE
10266 fputc ('\n', stderr);
10277 /* Most lvalues don't have components. */
10282 /* Emit a type code to be used by the runtime support in handling
10283 parameter passing. The type code consists of the machine mode
10284 plus the minimal alignment shifted left 8 bits. */
10287 bc_runtime_type_code (type)
10292 switch (TREE_CODE (type))
10298 case ENUMERAL_TYPE:
10302 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10314 return build_int_2 (val, 0);
10318 /* Generate constructor label */
10320 bc_gen_constr_label ()
10322 static int label_counter;
10323 static char label[20];
10325 sprintf (label, "*LR%d", label_counter++);
10327 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10331 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10332 expand the constructor data as static data, and push a pointer to it.
10333 The pointer is put in the pointer table and is retrieved by a constP
10334 bytecode instruction. We then loop and store each constructor member in
10335 the corresponding component. Finally, we return the original pointer on
10339 bc_expand_constructor (constr)
10343 HOST_WIDE_INT ptroffs;
10347 /* Literal constructors are handled as constants, whereas
10348 non-literals are evaluated and stored element by element
10349 into the data segment. */
10351 /* Allocate space in proper segment and push pointer to space on stack.
10354 l = bc_gen_constr_label ();
10356 if (TREE_CONSTANT (constr))
10360 bc_emit_const_labeldef (l);
10361 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10367 bc_emit_data_labeldef (l);
10368 bc_output_data_constructor (constr);
10372 /* Add reference to pointer table and recall pointer to stack;
10373 this code is common for both types of constructors: literals
10374 and non-literals. */
10376 ptroffs = bc_define_pointer (l);
10377 bc_emit_instruction (constP, ptroffs);
10379 /* This is all that has to be done if it's a literal. */
10380 if (TREE_CONSTANT (constr))
10384 /* At this point, we have the pointer to the structure on top of the stack.
10385 Generate sequences of store_memory calls for the constructor. */
10387 /* constructor type is structure */
10388 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10392 /* If the constructor has fewer fields than the structure,
10393 clear the whole structure first. */
10395 if (list_length (CONSTRUCTOR_ELTS (constr))
10396 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10398 bc_emit_instruction (duplicate);
10399 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10400 bc_emit_instruction (clearBLK);
10403 /* Store each element of the constructor into the corresponding
10404 field of TARGET. */
10406 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10408 register tree field = TREE_PURPOSE (elt);
10409 register enum machine_mode mode;
10414 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10415 mode = DECL_MODE (field);
10416 unsignedp = TREE_UNSIGNED (field);
10418 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10420 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10421 /* The alignment of TARGET is
10422 at least what its type requires. */
10424 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10425 int_size_in_bytes (TREE_TYPE (constr)));
10430 /* Constructor type is array */
10431 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10435 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10436 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10437 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10438 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10440 /* If the constructor has fewer fields than the structure,
10441 clear the whole structure first. */
10443 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10445 bc_emit_instruction (duplicate);
10446 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10447 bc_emit_instruction (clearBLK);
10451 /* Store each element of the constructor into the corresponding
10452 element of TARGET, determined by counting the elements. */
10454 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10456 elt = TREE_CHAIN (elt), i++)
10458 register enum machine_mode mode;
10463 mode = TYPE_MODE (elttype);
10464 bitsize = GET_MODE_BITSIZE (mode);
10465 unsignedp = TREE_UNSIGNED (elttype);
10467 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10468 /* * TYPE_SIZE_UNIT (elttype) */ );
10470 bc_store_field (elt, bitsize, bitpos, mode,
10471 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10472 /* The alignment of TARGET is
10473 at least what its type requires. */
10475 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10476 int_size_in_bytes (TREE_TYPE (constr)));
10483 /* Store the value of EXP (an expression tree) into member FIELD of
10484 structure at address on stack, which has type TYPE, mode MODE and
10485 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10488 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10489 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10492 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10493 value_mode, unsignedp, align, total_size)
10494 int bitsize, bitpos;
10495 enum machine_mode mode;
10496 tree field, exp, type;
10497 enum machine_mode value_mode;
10503 /* Expand expression and copy pointer */
10504 bc_expand_expr (exp);
10505 bc_emit_instruction (over);
10508 /* If the component is a bit field, we cannot use addressing to access
10509 it. Use bit-field techniques to store in it. */
10511 if (DECL_BIT_FIELD (field))
10513 bc_store_bit_field (bitpos, bitsize, unsignedp);
10517 /* Not bit field */
10519 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10521 /* Advance pointer to the desired member */
10523 bc_emit_instruction (addconstPSI, offset);
10526 bc_store_memory (type, field);
10531 /* Store SI/SU in bitfield */
10533 bc_store_bit_field (offset, size, unsignedp)
10534 int offset, size, unsignedp;
10536 /* Push bitfield offset and size */
10537 bc_push_offset_and_size (offset, size);
10540 bc_emit_instruction (sstoreBI);
10544 /* Load SI/SU from bitfield */
10546 bc_load_bit_field (offset, size, unsignedp)
10547 int offset, size, unsignedp;
10549 /* Push bitfield offset and size */
10550 bc_push_offset_and_size (offset, size);
10552 /* Load: sign-extend if signed, else zero-extend */
10553 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10557 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10558 (adjust stack pointer upwards), negative means add that number of
10559 levels (adjust the stack pointer downwards). Only positive values
10560 normally make sense. */
10563 bc_adjust_stack (nlevels)
10572 bc_emit_instruction (drop);
10575 bc_emit_instruction (drop);
10580 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10581 stack_depth -= nlevels;
10584 #if defined (VALIDATE_STACK_FOR_BC)
10585 VALIDATE_STACK_FOR_BC ();