1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "insn-flags.h"
30 #include "insn-codes.h"
32 #include "insn-config.h"
35 #include "typeclass.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
125 int explicit_inc_from;
132 /* Used to generate bytecodes: keep track of size of local variables,
133 as well as depth of arithmetic stack. (Notice that variables are
134 stored on the machine's stack, not the arithmetic stack.) */
136 extern int local_vars_size;
137 extern int stack_depth;
138 extern int max_stack_depth;
139 extern struct obstack permanent_obstack;
142 static rtx enqueue_insn PROTO((rtx, rtx));
143 static int queued_subexp_p PROTO((rtx));
144 static void init_queue PROTO((void));
145 static void move_by_pieces PROTO((rtx, rtx, int, int));
146 static int move_by_pieces_ninsns PROTO((unsigned int, int));
147 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
148 struct move_by_pieces *));
149 static void store_constructor PROTO((tree, rtx));
150 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
151 enum machine_mode, int, int, int));
152 static int get_inner_unaligned_p PROTO((tree));
153 static tree save_noncopied_parts PROTO((tree, tree));
154 static tree init_noncopied_parts PROTO((tree, tree));
155 static int safe_from_p PROTO((rtx, tree));
156 static int fixed_type_p PROTO((tree));
157 static int get_pointer_alignment PROTO((tree, unsigned));
158 static tree string_constant PROTO((tree, tree *));
159 static tree c_strlen PROTO((tree));
160 static rtx expand_builtin PROTO((tree, rtx, rtx,
161 enum machine_mode, int));
162 static int apply_args_size PROTO((void));
163 static int apply_result_size PROTO((void));
164 static rtx result_vector PROTO((int, rtx));
165 static rtx expand_builtin_apply_args PROTO((void));
166 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
167 static void expand_builtin_return PROTO((rtx));
168 static rtx expand_increment PROTO((tree, int));
169 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
170 tree bc_runtime_type_code PROTO((tree));
171 rtx bc_allocate_local PROTO((int, int));
172 void bc_store_memory PROTO((tree, tree));
173 tree bc_expand_component_address PROTO((tree));
174 tree bc_expand_address PROTO((tree));
175 void bc_expand_constructor PROTO((tree));
176 void bc_adjust_stack PROTO((int));
177 tree bc_canonicalize_array_ref PROTO((tree));
178 void bc_load_memory PROTO((tree, tree));
179 void bc_load_externaddr PROTO((rtx));
180 void bc_load_externaddr_id PROTO((tree, int));
181 void bc_load_localaddr PROTO((rtx));
182 void bc_load_parmaddr PROTO((rtx));
183 static void preexpand_calls PROTO((tree));
184 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
185 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
186 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
187 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
188 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
189 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
190 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
191 static tree defer_cleanups_to PROTO((tree));
192 extern void (*interim_eh_hook) PROTO((tree));
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
198 static char direct_load[NUM_MACHINE_MODES];
199 static char direct_store[NUM_MACHINE_MODES];
201 /* MOVE_RATIO is the number of move instructions that is better than
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 /* A value of around 6 would minimize code size; infinity would minimize
210 #define MOVE_RATIO 15
214 /* This array records the insn_code of insns to perform block moves. */
215 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
219 #ifndef SLOW_UNALIGNED_ACCESS
220 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
223 /* Register mappings for target machines without register windows. */
224 #ifndef INCOMING_REGNO
225 #define INCOMING_REGNO(OUT) (OUT)
227 #ifndef OUTGOING_REGNO
228 #define OUTGOING_REGNO(IN) (IN)
231 /* Maps used to convert modes to const, load, and store bytecodes. */
232 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
233 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
234 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
236 /* Initialize maps used to convert modes to const, load, and store
239 bc_init_mode_to_opcode_maps ()
243 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
244 mode_to_const_map[mode] =
245 mode_to_load_map[mode] =
246 mode_to_store_map[mode] = neverneverland;
248 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
249 mode_to_const_map[(int) SYM] = CONST; \
250 mode_to_load_map[(int) SYM] = LOAD; \
251 mode_to_store_map[(int) SYM] = STORE;
253 #include "modemap.def"
257 /* This is run once per compilation to set up which modes can be used
258 directly in memory and to initialize the block move optab. */
264 enum machine_mode mode;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
269 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
272 insn = emit_insn (gen_rtx (SET, 0, 0));
273 pat = PATTERN (insn);
275 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
276 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
286 /* See if there is some register that can be used in this mode and
287 directly loaded or stored from memory. */
289 if (mode != VOIDmode && mode != BLKmode)
290 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
291 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 if (! HARD_REGNO_MODE_OK (regno, mode))
297 reg = gen_rtx (REG, mode, regno);
300 SET_DEST (pat) = reg;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_load[(int) mode] = 1;
304 SET_SRC (pat) = mem1;
305 SET_DEST (pat) = reg;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_load[(int) mode] = 1;
310 SET_DEST (pat) = mem;
311 if (recog (pat, insn, &num_clobbers) >= 0)
312 direct_store[(int) mode] = 1;
315 SET_DEST (pat) = mem1;
316 if (recog (pat, insn, &num_clobbers) >= 0)
317 direct_store[(int) mode] = 1;
324 /* This is run at the start of compiling a function. */
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
333 cleanups_this_call = 0;
335 apply_args_value = 0;
339 /* Save all variables describing the current status into the structure *P.
340 This is used before starting a nested function. */
346 /* Instead of saving the postincrement queue, empty it. */
349 p->pending_stack_adjust = pending_stack_adjust;
350 p->inhibit_defer_pop = inhibit_defer_pop;
351 p->cleanups_this_call = cleanups_this_call;
352 p->saveregs_value = saveregs_value;
353 p->apply_args_value = apply_args_value;
354 p->forced_labels = forced_labels;
356 pending_stack_adjust = 0;
357 inhibit_defer_pop = 0;
358 cleanups_this_call = 0;
360 apply_args_value = 0;
364 /* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
368 restore_expr_status (p)
371 pending_stack_adjust = p->pending_stack_adjust;
372 inhibit_defer_pop = p->inhibit_defer_pop;
373 cleanups_this_call = p->cleanups_this_call;
374 saveregs_value = p->saveregs_value;
375 apply_args_value = p->apply_args_value;
376 forced_labels = p->forced_labels;
379 /* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
382 static rtx pending_chain;
384 /* Queue up to increment (or change) VAR later. BODY says how:
385 BODY should be the same thing you would pass to emit_insn
386 to increment right away. It will go to emit_insn later on.
388 The value is a QUEUED expression to be used in place of VAR
389 where you want to guarantee the pre-incrementation value of VAR. */
392 enqueue_insn (var, body)
395 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
396 var, NULL_RTX, NULL_RTX, body, pending_chain);
397 return pending_chain;
400 /* Use protect_from_queue to convert a QUEUED expression
401 into something that you can put immediately into an instruction.
402 If the queued incrementation has not happened yet,
403 protect_from_queue returns the variable itself.
404 If the incrementation has happened, protect_from_queue returns a temp
405 that contains a copy of the old value of the variable.
407 Any time an rtx which might possibly be a QUEUED is to be put
408 into an instruction, it must be passed through protect_from_queue first.
409 QUEUED expressions are not meaningful in instructions.
411 Do not pass a value through protect_from_queue and then hold
412 on to it for a while before putting it in an instruction!
413 If the queue is flushed in between, incorrect code will result. */
416 protect_from_queue (x, modify)
420 register RTX_CODE code = GET_CODE (x);
422 #if 0 /* A QUEUED can hang around after the queue is forced out. */
423 /* Shortcut for most common case. */
424 if (pending_chain == 0)
430 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
431 use of autoincrement. Make a copy of the contents of the memory
432 location rather than a copy of the address, but not if the value is
433 of mode BLKmode. Don't modify X in place since it might be
435 if (code == MEM && GET_MODE (x) != BLKmode
436 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
438 register rtx y = XEXP (x, 0);
439 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
441 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
442 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
443 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
447 register rtx temp = gen_reg_rtx (GET_MODE (new));
448 emit_insn_before (gen_move_insn (temp, new),
454 /* Otherwise, recursively protect the subexpressions of all
455 the kinds of rtx's that can contain a QUEUED. */
458 rtx tem = protect_from_queue (XEXP (x, 0), 0);
459 if (tem != XEXP (x, 0))
465 else if (code == PLUS || code == MULT)
467 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
468 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
469 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
478 /* If the increment has not happened, use the variable itself. */
479 if (QUEUED_INSN (x) == 0)
480 return QUEUED_VAR (x);
481 /* If the increment has happened and a pre-increment copy exists,
483 if (QUEUED_COPY (x) != 0)
484 return QUEUED_COPY (x);
485 /* The increment has happened but we haven't set up a pre-increment copy.
486 Set one up now, and use it. */
487 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
488 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
490 return QUEUED_COPY (x);
493 /* Return nonzero if X contains a QUEUED expression:
494 if it contains anything that will be altered by a queued increment.
495 We handle only combinations of MEM, PLUS, MINUS and MULT operators
496 since memory addresses generally contain only those. */
502 register enum rtx_code code = GET_CODE (x);
508 return queued_subexp_p (XEXP (x, 0));
512 return queued_subexp_p (XEXP (x, 0))
513 || queued_subexp_p (XEXP (x, 1));
518 /* Perform all the pending incrementations. */
524 while (p = pending_chain)
526 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
527 pending_chain = QUEUED_NEXT (p);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
544 convert_move (to, from, unsignedp)
545 register rtx to, from;
548 enum machine_mode to_mode = GET_MODE (to);
549 enum machine_mode from_mode = GET_MODE (from);
550 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
551 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
555 /* rtx code for making an equivalent value. */
556 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
558 to = protect_from_queue (to, 1);
559 from = protect_from_queue (from, 0);
561 if (to_real != from_real)
564 /* If FROM is a SUBREG that indicates that we have already done at least
565 the required extension, strip it. We don't handle such SUBREGs as
568 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
569 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
570 >= GET_MODE_SIZE (to_mode))
571 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
572 from = gen_lowpart (to_mode, from), from_mode = to_mode;
574 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577 if (to_mode == from_mode
578 || (from_mode == VOIDmode && CONSTANT_P (from)))
580 emit_move_insn (to, from);
588 #ifdef HAVE_extendqfhf2
589 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
591 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
595 #ifdef HAVE_extendqfsf2
596 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
598 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
602 #ifdef HAVE_extendqfdf2
603 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
605 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
609 #ifdef HAVE_extendqfxf2
610 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
612 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
616 #ifdef HAVE_extendqftf2
617 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
619 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
624 #ifdef HAVE_extendhftqf2
625 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
627 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
632 #ifdef HAVE_extendhfsf2
633 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
635 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
639 #ifdef HAVE_extendhfdf2
640 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
642 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
646 #ifdef HAVE_extendhfxf2
647 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
649 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
653 #ifdef HAVE_extendhftf2
654 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
656 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
661 #ifdef HAVE_extendsfdf2
662 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
664 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
668 #ifdef HAVE_extendsfxf2
669 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
671 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
675 #ifdef HAVE_extendsftf2
676 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
678 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
682 #ifdef HAVE_extenddfxf2
683 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
685 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
689 #ifdef HAVE_extenddftf2
690 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
692 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
697 #ifdef HAVE_trunchfqf2
698 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
700 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncsfqf2
705 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
707 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncdfqf2
712 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
714 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
718 #ifdef HAVE_truncxfqf2
719 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
721 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
725 #ifdef HAVE_trunctfqf2
726 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
728 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
733 #ifdef HAVE_trunctqfhf2
734 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
736 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
740 #ifdef HAVE_truncsfhf2
741 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
743 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncdfhf2
748 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
750 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
754 #ifdef HAVE_truncxfhf2
755 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
757 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
761 #ifdef HAVE_trunctfhf2
762 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
764 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
768 #ifdef HAVE_truncdfsf2
769 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
771 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
775 #ifdef HAVE_truncxfsf2
776 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
778 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
782 #ifdef HAVE_trunctfsf2
783 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
785 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
789 #ifdef HAVE_truncxfdf2
790 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
792 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
796 #ifdef HAVE_trunctfdf2
797 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
799 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
811 libcall = extendsfdf2_libfunc;
815 libcall = extendsfxf2_libfunc;
819 libcall = extendsftf2_libfunc;
828 libcall = truncdfsf2_libfunc;
832 libcall = extenddfxf2_libfunc;
836 libcall = extenddftf2_libfunc;
845 libcall = truncxfsf2_libfunc;
849 libcall = truncxfdf2_libfunc;
858 libcall = trunctfsf2_libfunc;
862 libcall = trunctfdf2_libfunc;
868 if (libcall == (rtx) 0)
869 /* This conversion is not implemented yet. */
872 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
874 emit_move_insn (to, value);
878 /* Now both modes are integers. */
880 /* Handle expanding beyond a word. */
881 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
882 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
889 enum machine_mode lowpart_mode;
890 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
892 /* Try converting directly if the insn is supported. */
893 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
896 /* If FROM is a SUBREG, put it into a register. Do this
897 so that we always generate the same set of insns for
898 better cse'ing; if an intermediate assignment occurred,
899 we won't be doing the operation directly on the SUBREG. */
900 if (optimize > 0 && GET_CODE (from) == SUBREG)
901 from = force_reg (from_mode, from);
902 emit_unop_insn (code, to, from, equiv_code);
905 /* Next, try converting via full word. */
906 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
907 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
908 != CODE_FOR_nothing))
910 if (GET_CODE (to) == REG)
911 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
912 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
913 emit_unop_insn (code, to,
914 gen_lowpart (word_mode, to), equiv_code);
918 /* No special multiword conversion insn; do it by hand. */
921 /* Since we will turn this into a no conflict block, we must ensure
922 that the source does not overlap the target. */
924 if (reg_overlap_mentioned_p (to, from))
925 from = force_reg (from_mode, from);
927 /* Get a copy of FROM widened to a word, if necessary. */
928 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
929 lowpart_mode = word_mode;
931 lowpart_mode = from_mode;
933 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
935 lowpart = gen_lowpart (lowpart_mode, to);
936 emit_move_insn (lowpart, lowfrom);
938 /* Compute the value to put in each remaining word. */
940 fill_value = const0_rtx;
945 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
946 && STORE_FLAG_VALUE == -1)
948 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
950 fill_value = gen_reg_rtx (word_mode);
951 emit_insn (gen_slt (fill_value));
957 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
958 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
960 fill_value = convert_to_mode (word_mode, fill_value, 1);
964 /* Fill the remaining words. */
965 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
967 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
968 rtx subword = operand_subword (to, index, 1, to_mode);
973 if (fill_value != subword)
974 emit_move_insn (subword, fill_value);
977 insns = get_insns ();
980 emit_no_conflict_block (insns, to, from, NULL_RTX,
981 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
985 /* Truncating multi-word to a word or less. */
986 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
987 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
989 if (!((GET_CODE (from) == MEM
990 && ! MEM_VOLATILE_P (from)
991 && direct_load[(int) to_mode]
992 && ! mode_dependent_address_p (XEXP (from, 0)))
993 || GET_CODE (from) == REG
994 || GET_CODE (from) == SUBREG))
995 from = force_reg (from_mode, from);
996 convert_move (to, gen_lowpart (word_mode, from), 0);
1000 /* Handle pointer conversion */ /* SPEE 900220 */
1001 if (to_mode == PSImode)
1003 if (from_mode != SImode)
1004 from = convert_to_mode (SImode, from, unsignedp);
1006 #ifdef HAVE_truncsipsi2
1007 if (HAVE_truncsipsi2)
1009 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1012 #endif /* HAVE_truncsipsi2 */
1016 if (from_mode == PSImode)
1018 if (to_mode != SImode)
1020 from = convert_to_mode (SImode, from, unsignedp);
1025 #ifdef HAVE_extendpsisi2
1026 if (HAVE_extendpsisi2)
1028 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1031 #endif /* HAVE_extendpsisi2 */
1036 if (to_mode == PDImode)
1038 if (from_mode != DImode)
1039 from = convert_to_mode (DImode, from, unsignedp);
1041 #ifdef HAVE_truncdipdi2
1042 if (HAVE_truncdipdi2)
1044 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1047 #endif /* HAVE_truncdipdi2 */
1051 if (from_mode == PDImode)
1053 if (to_mode != DImode)
1055 from = convert_to_mode (DImode, from, unsignedp);
1060 #ifdef HAVE_extendpdidi2
1061 if (HAVE_extendpdidi2)
1063 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1066 #endif /* HAVE_extendpdidi2 */
1071 /* Now follow all the conversions between integers
1072 no more than a word long. */
1074 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1075 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1076 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1077 GET_MODE_BITSIZE (from_mode)))
1079 if (!((GET_CODE (from) == MEM
1080 && ! MEM_VOLATILE_P (from)
1081 && direct_load[(int) to_mode]
1082 && ! mode_dependent_address_p (XEXP (from, 0)))
1083 || GET_CODE (from) == REG
1084 || GET_CODE (from) == SUBREG))
1085 from = force_reg (from_mode, from);
1086 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1087 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1088 from = copy_to_reg (from);
1089 emit_move_insn (to, gen_lowpart (to_mode, from));
1093 /* Handle extension. */
1094 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1096 /* Convert directly if that works. */
1097 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1098 != CODE_FOR_nothing)
1100 emit_unop_insn (code, to, from, equiv_code);
1105 enum machine_mode intermediate;
1107 /* Search for a mode to convert via. */
1108 for (intermediate = from_mode; intermediate != VOIDmode;
1109 intermediate = GET_MODE_WIDER_MODE (intermediate))
1110 if (((can_extend_p (to_mode, intermediate, unsignedp)
1111 != CODE_FOR_nothing)
1112 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1113 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1114 && (can_extend_p (intermediate, from_mode, unsignedp)
1115 != CODE_FOR_nothing))
1117 convert_move (to, convert_to_mode (intermediate, from,
1118 unsignedp), unsignedp);
1122 /* No suitable intermediate mode. */
1127 /* Support special truncate insns for certain modes. */
1129 if (from_mode == DImode && to_mode == SImode)
1131 #ifdef HAVE_truncdisi2
1132 if (HAVE_truncdisi2)
1134 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == HImode)
1144 #ifdef HAVE_truncdihi2
1145 if (HAVE_truncdihi2)
1147 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == DImode && to_mode == QImode)
1157 #ifdef HAVE_truncdiqi2
1158 if (HAVE_truncdiqi2)
1160 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == HImode)
1170 #ifdef HAVE_truncsihi2
1171 if (HAVE_truncsihi2)
1173 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == SImode && to_mode == QImode)
1183 #ifdef HAVE_truncsiqi2
1184 if (HAVE_truncsiqi2)
1186 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == HImode && to_mode == QImode)
1196 #ifdef HAVE_trunchiqi2
1197 if (HAVE_trunchiqi2)
1199 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == DImode)
1209 #ifdef HAVE_trunctidi2
1210 if (HAVE_trunctidi2)
1212 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == SImode)
1222 #ifdef HAVE_trunctisi2
1223 if (HAVE_trunctisi2)
1225 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == HImode)
1235 #ifdef HAVE_trunctihi2
1236 if (HAVE_trunctihi2)
1238 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 if (from_mode == TImode && to_mode == QImode)
1248 #ifdef HAVE_trunctiqi2
1249 if (HAVE_trunctiqi2)
1251 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1259 /* Handle truncation of volatile memrefs, and so on;
1260 the things that couldn't be truncated directly,
1261 and for which there was no special instruction. */
1262 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1264 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1265 emit_move_insn (to, temp);
1269 /* Mode combination is not recognized. */
1273 /* Return an rtx for a value that would result
1274 from converting X to mode MODE.
1275 Both X and MODE may be floating, or both integer.
1276 UNSIGNEDP is nonzero if X is an unsigned value.
1277 This can be done by referring to a part of X in place
1278 or by copying to a new temporary with conversion.
1280 This function *must not* call protect_from_queue
1281 except when putting X into an insn (in which case convert_move does it). */
1284 convert_to_mode (mode, x, unsignedp)
1285 enum machine_mode mode;
1289 return convert_modes (mode, VOIDmode, x, unsignedp);
1292 /* Return an rtx for a value that would result
1293 from converting X from mode OLDMODE to mode MODE.
1294 Both modes may be floating, or both integer.
1295 UNSIGNEDP is nonzero if X is an unsigned value.
1297 This can be done by referring to a part of X in place
1298 or by copying to a new temporary with conversion.
1300 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1302 This function *must not* call protect_from_queue
1303 except when putting X into an insn (in which case convert_move does it). */
1306 convert_modes (mode, oldmode, x, unsignedp)
1307 enum machine_mode mode, oldmode;
1313 /* If FROM is a SUBREG that indicates that we have already done at least
1314 the required extension, strip it. */
1316 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1317 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1318 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1319 x = gen_lowpart (mode, x);
1321 if (GET_MODE (x) != VOIDmode)
1322 oldmode = GET_MODE (x);
1324 if (mode == oldmode)
1327 /* There is one case that we must handle specially: If we are converting
1328 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1329 we are to interpret the constant as unsigned, gen_lowpart will do
1330 the wrong if the constant appears negative. What we want to do is
1331 make the high-order word of the constant zero, not all ones. */
1333 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1334 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1335 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1336 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (val);
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* Generate several move instructions to copy LEN bytes
1383 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1384 The caller must pass FROM and TO
1385 through protect_from_queue before calling.
1386 ALIGN (in bytes) is maximum alignment we can assume. */
1389 move_by_pieces (to, from, len, align)
1393 struct move_by_pieces data;
1394 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1395 int max_size = MOVE_MAX + 1;
1398 data.to_addr = to_addr;
1399 data.from_addr = from_addr;
1403 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1404 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1406 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1407 || GET_CODE (from_addr) == POST_INC
1408 || GET_CODE (from_addr) == POST_DEC);
1410 data.explicit_inc_from = 0;
1411 data.explicit_inc_to = 0;
1413 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1414 if (data.reverse) data.offset = len;
1417 data.to_struct = MEM_IN_STRUCT_P (to);
1418 data.from_struct = MEM_IN_STRUCT_P (from);
1420 /* If copying requires more than two move insns,
1421 copy addresses to registers (to make displacements shorter)
1422 and use post-increment if available. */
1423 if (!(data.autinc_from && data.autinc_to)
1424 && move_by_pieces_ninsns (len, align) > 2)
1426 #ifdef HAVE_PRE_DECREMENT
1427 if (data.reverse && ! data.autinc_from)
1429 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1430 data.autinc_from = 1;
1431 data.explicit_inc_from = -1;
1434 #ifdef HAVE_POST_INCREMENT
1435 if (! data.autinc_from)
1437 data.from_addr = copy_addr_to_reg (from_addr);
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = 1;
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 #ifdef HAVE_PRE_DECREMENT
1445 if (data.reverse && ! data.autinc_to)
1447 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1449 data.explicit_inc_to = -1;
1452 #ifdef HAVE_POST_INCREMENT
1453 if (! data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (to_addr);
1457 data.explicit_inc_to = 1;
1460 if (!data.autinc_to && CONSTANT_P (to_addr))
1461 data.to_addr = copy_addr_to_reg (to_addr);
1464 if (! SLOW_UNALIGNED_ACCESS
1465 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1468 /* First move what we can in the largest integer mode, then go to
1469 successively smaller modes. */
1471 while (max_size > 1)
1473 enum machine_mode mode = VOIDmode, tmode;
1474 enum insn_code icode;
1476 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1477 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1478 if (GET_MODE_SIZE (tmode) < max_size)
1481 if (mode == VOIDmode)
1484 icode = mov_optab->handlers[(int) mode].insn_code;
1485 if (icode != CODE_FOR_nothing
1486 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1487 GET_MODE_SIZE (mode)))
1488 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490 max_size = GET_MODE_SIZE (mode);
1493 /* The code above should have handled everything. */
1498 /* Return number of insns required to move L bytes by pieces.
1499 ALIGN (in bytes) is maximum alignment we can assume. */
1502 move_by_pieces_ninsns (l, align)
1506 register int n_insns = 0;
1507 int max_size = MOVE_MAX + 1;
1509 if (! SLOW_UNALIGNED_ACCESS
1510 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1513 while (max_size > 1)
1515 enum machine_mode mode = VOIDmode, tmode;
1516 enum insn_code icode;
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1523 if (mode == VOIDmode)
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing
1528 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1529 GET_MODE_SIZE (mode)))
1530 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1532 max_size = GET_MODE_SIZE (mode);
1538 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1539 with move instructions for mode MODE. GENFUN is the gen_... function
1540 to make a move insn for that mode. DATA has all the other info. */
1543 move_by_pieces_1 (genfun, mode, data)
1545 enum machine_mode mode;
1546 struct move_by_pieces *data;
1548 register int size = GET_MODE_SIZE (mode);
1549 register rtx to1, from1;
1551 while (data->len >= size)
1553 if (data->reverse) data->offset -= size;
1555 to1 = (data->autinc_to
1556 ? gen_rtx (MEM, mode, data->to_addr)
1557 : change_address (data->to, mode,
1558 plus_constant (data->to_addr, data->offset)));
1559 MEM_IN_STRUCT_P (to1) = data->to_struct;
1562 ? gen_rtx (MEM, mode, data->from_addr)
1563 : change_address (data->from, mode,
1564 plus_constant (data->from_addr, data->offset)));
1565 MEM_IN_STRUCT_P (from1) = data->from_struct;
1567 #ifdef HAVE_PRE_DECREMENT
1568 if (data->explicit_inc_to < 0)
1569 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1570 if (data->explicit_inc_from < 0)
1571 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1574 emit_insn ((*genfun) (to1, from1));
1575 #ifdef HAVE_POST_INCREMENT
1576 if (data->explicit_inc_to > 0)
1577 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1578 if (data->explicit_inc_from > 0)
1579 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582 if (! data->reverse) data->offset += size;
1588 /* Emit code to move a block Y to a block X.
1589 This may be done with string-move instructions,
1590 with multiple scalar move instructions, or with a library call.
1592 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1594 SIZE is an rtx that says how long they are.
1595 ALIGN is the maximum alignment we can assume they have,
1596 measured in bytes. */
1599 emit_block_move (x, y, size, align)
1604 if (GET_MODE (x) != BLKmode)
1607 if (GET_MODE (y) != BLKmode)
1610 x = protect_from_queue (x, 1);
1611 y = protect_from_queue (y, 0);
1612 size = protect_from_queue (size, 0);
1614 if (GET_CODE (x) != MEM)
1616 if (GET_CODE (y) != MEM)
1621 if (GET_CODE (size) == CONST_INT
1622 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1623 move_by_pieces (x, y, INTVAL (size), align);
1626 /* Try the most limited insn first, because there's no point
1627 including more than one in the machine description unless
1628 the more limited one has some advantage. */
1630 rtx opalign = GEN_INT (align);
1631 enum machine_mode mode;
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1636 enum insn_code code = movstr_optab[(int) mode];
1638 if (code != CODE_FOR_nothing
1639 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1640 here because if SIZE is less than the mode mask, as it is
1641 returned by the macro, it will definitely be less than the
1642 actual mode mask. */
1643 && ((GET_CODE (size) == CONST_INT
1644 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1645 <= GET_MODE_MASK (mode)))
1646 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1647 && (insn_operand_predicate[(int) code][0] == 0
1648 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1649 && (insn_operand_predicate[(int) code][1] == 0
1650 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1651 && (insn_operand_predicate[(int) code][3] == 0
1652 || (*insn_operand_predicate[(int) code][3]) (opalign,
1656 rtx last = get_last_insn ();
1659 op2 = convert_to_mode (mode, size, 1);
1660 if (insn_operand_predicate[(int) code][2] != 0
1661 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1671 delete_insns_since (last);
1675 #ifdef TARGET_MEM_FUNCTIONS
1676 emit_library_call (memcpy_libfunc, 0,
1677 VOIDmode, 3, XEXP (x, 0), Pmode,
1679 convert_to_mode (TYPE_MODE (sizetype), size,
1680 TREE_UNSIGNED (sizetype)),
1681 TYPE_MODE (sizetype));
1683 emit_library_call (bcopy_libfunc, 0,
1684 VOIDmode, 3, XEXP (y, 0), Pmode,
1686 convert_to_mode (TYPE_MODE (sizetype), size,
1687 TREE_UNSIGNED (sizetype)),
1688 TYPE_MODE (sizetype));
1693 /* Copy all or part of a value X into registers starting at REGNO.
1694 The number of registers to be filled is NREGS. */
1697 move_block_to_reg (regno, x, nregs, mode)
1701 enum machine_mode mode;
1709 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1710 x = validize_mem (force_const_mem (mode, x));
1712 /* See if the machine can do this with a load multiple insn. */
1713 #ifdef HAVE_load_multiple
1714 if (HAVE_load_multiple)
1716 last = get_last_insn ();
1717 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1725 delete_insns_since (last);
1729 for (i = 0; i < nregs; i++)
1730 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1731 operand_subword_force (x, i, mode));
1734 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1735 The number of registers to be filled is NREGS. SIZE indicates the number
1736 of bytes in the object X. */
1740 move_block_from_reg (regno, x, nregs, size)
1749 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1750 to the left before storing to memory. */
1751 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1753 rtx tem = operand_subword (x, 0, 1, BLKmode);
1759 shift = expand_shift (LSHIFT_EXPR, word_mode,
1760 gen_rtx (REG, word_mode, regno),
1761 build_int_2 ((UNITS_PER_WORD - size)
1762 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1763 emit_move_insn (tem, shift);
1767 /* See if the machine can do this with a store multiple insn. */
1768 #ifdef HAVE_store_multiple
1769 if (HAVE_store_multiple)
1771 last = get_last_insn ();
1772 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1780 delete_insns_since (last);
1784 for (i = 0; i < nregs; i++)
1786 rtx tem = operand_subword (x, i, 1, BLKmode);
1791 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1795 /* Add a USE expression for REG to the (possibly empty) list pointed
1796 to by CALL_FUSAGE. REG must denote a hard register. */
1799 use_reg (call_fusage, reg)
1800 rtx *call_fusage, reg;
1802 if (GET_CODE (reg) != REG
1803 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1807 = gen_rtx (EXPR_LIST, VOIDmode,
1808 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1811 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1812 starting at REGNO. All of these registers must be hard registers. */
1815 use_regs (call_fusage, regno, nregs)
1822 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1825 for (i = 0; i < nregs; i++)
1826 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1829 /* Write zeros through the storage of OBJECT.
1830 If OBJECT has BLKmode, SIZE is its length in bytes. */
1833 clear_storage (object, size)
1837 if (GET_MODE (object) == BLKmode)
1839 #ifdef TARGET_MEM_FUNCTIONS
1840 emit_library_call (memset_libfunc, 0,
1842 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1843 GEN_INT (size), ptr_mode);
1845 emit_library_call (bzero_libfunc, 0,
1847 XEXP (object, 0), Pmode,
1848 GEN_INT (size), ptr_mode);
1852 emit_move_insn (object, const0_rtx);
1855 /* Generate code to copy Y into X.
1856 Both Y and X must have the same mode, except that
1857 Y can be a constant with VOIDmode.
1858 This mode cannot be BLKmode; use emit_block_move for that.
1860 Return the last instruction emitted. */
1863 emit_move_insn (x, y)
1866 enum machine_mode mode = GET_MODE (x);
1868 x = protect_from_queue (x, 1);
1869 y = protect_from_queue (y, 0);
1871 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1874 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1875 y = force_const_mem (mode, y);
1877 /* If X or Y are memory references, verify that their addresses are valid
1879 if (GET_CODE (x) == MEM
1880 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1881 && ! push_operand (x, GET_MODE (x)))
1883 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1884 x = change_address (x, VOIDmode, XEXP (x, 0));
1886 if (GET_CODE (y) == MEM
1887 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1889 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1890 y = change_address (y, VOIDmode, XEXP (y, 0));
1892 if (mode == BLKmode)
1895 return emit_move_insn_1 (x, y);
1898 /* Low level part of emit_move_insn.
1899 Called just like emit_move_insn, but assumes X and Y
1900 are basically valid. */
1903 emit_move_insn_1 (x, y)
1906 enum machine_mode mode = GET_MODE (x);
1907 enum machine_mode submode;
1908 enum mode_class class = GET_MODE_CLASS (mode);
1911 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1913 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1915 /* Expand complex moves by moving real part and imag part, if possible. */
1916 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1917 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1919 (class == MODE_COMPLEX_INT
1920 ? MODE_INT : MODE_FLOAT),
1922 && (mov_optab->handlers[(int) submode].insn_code
1923 != CODE_FOR_nothing))
1925 /* Don't split destination if it is a stack push. */
1926 int stack = push_operand (x, GET_MODE (x));
1929 /* If this is a stack, push the highpart first, so it
1930 will be in the argument order.
1932 In that case, change_address is used only to convert
1933 the mode, not to change the address. */
1936 /* Note that the real part always precedes the imag part in memory
1937 regardless of machine's endianness. */
1938 #ifdef STACK_GROWS_DOWNWARD
1939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1940 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1941 gen_imagpart (submode, y)));
1942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1943 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1944 gen_realpart (submode, y)));
1946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1947 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1948 gen_realpart (submode, y)));
1949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1950 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1951 gen_imagpart (submode, y)));
1956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1957 (gen_realpart (submode, x), gen_realpart (submode, y)));
1958 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1959 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1962 return get_last_insn ();
1965 /* This will handle any multi-word mode that lacks a move_insn pattern.
1966 However, you will get better code if you define such patterns,
1967 even if they must turn into multiple assembler instructions. */
1968 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1973 #ifdef PUSH_ROUNDING
1975 /* If X is a push on the stack, do the push now and replace
1976 X with a reference to the stack pointer. */
1977 if (push_operand (x, GET_MODE (x)))
1979 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1980 x = change_address (x, VOIDmode, stack_pointer_rtx);
1985 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1988 rtx xpart = operand_subword (x, i, 1, mode);
1989 rtx ypart = operand_subword (y, i, 1, mode);
1991 /* If we can't get a part of Y, put Y into memory if it is a
1992 constant. Otherwise, force it into a register. If we still
1993 can't get a part of Y, abort. */
1994 if (ypart == 0 && CONSTANT_P (y))
1996 y = force_const_mem (mode, y);
1997 ypart = operand_subword (y, i, 1, mode);
1999 else if (ypart == 0)
2000 ypart = operand_subword_force (y, i, mode);
2002 if (xpart == 0 || ypart == 0)
2005 last_insn = emit_move_insn (xpart, ypart);
2014 /* Pushing data onto the stack. */
2016 /* Push a block of length SIZE (perhaps variable)
2017 and return an rtx to address the beginning of the block.
2018 Note that it is not possible for the value returned to be a QUEUED.
2019 The value may be virtual_outgoing_args_rtx.
2021 EXTRA is the number of bytes of padding to push in addition to SIZE.
2022 BELOW nonzero means this padding comes at low addresses;
2023 otherwise, the padding comes at high addresses. */
2026 push_block (size, extra, below)
2032 size = convert_modes (Pmode, ptr_mode, size, 1);
2033 if (CONSTANT_P (size))
2034 anti_adjust_stack (plus_constant (size, extra));
2035 else if (GET_CODE (size) == REG && extra == 0)
2036 anti_adjust_stack (size);
2039 rtx temp = copy_to_mode_reg (Pmode, size);
2041 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2042 temp, 0, OPTAB_LIB_WIDEN);
2043 anti_adjust_stack (temp);
2046 #ifdef STACK_GROWS_DOWNWARD
2047 temp = virtual_outgoing_args_rtx;
2048 if (extra != 0 && below)
2049 temp = plus_constant (temp, extra);
2051 if (GET_CODE (size) == CONST_INT)
2052 temp = plus_constant (virtual_outgoing_args_rtx,
2053 - INTVAL (size) - (below ? 0 : extra));
2054 else if (extra != 0 && !below)
2055 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2056 negate_rtx (Pmode, plus_constant (size, extra)));
2058 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2059 negate_rtx (Pmode, size));
2062 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2068 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2071 /* Generate code to push X onto the stack, assuming it has mode MODE and
2073 MODE is redundant except when X is a CONST_INT (since they don't
2075 SIZE is an rtx for the size of data to be copied (in bytes),
2076 needed only if X is BLKmode.
2078 ALIGN (in bytes) is maximum alignment we can assume.
2080 If PARTIAL and REG are both nonzero, then copy that many of the first
2081 words of X into registers starting with REG, and push the rest of X.
2082 The amount of space pushed is decreased by PARTIAL words,
2083 rounded *down* to a multiple of PARM_BOUNDARY.
2084 REG must be a hard register in this case.
2085 If REG is zero but PARTIAL is not, take any all others actions for an
2086 argument partially in registers, but do not actually load any
2089 EXTRA is the amount in bytes of extra space to leave next to this arg.
2090 This is ignored if an argument block has already been allocated.
2092 On a machine that lacks real push insns, ARGS_ADDR is the address of
2093 the bottom of the argument block for this call. We use indexing off there
2094 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2095 argument block has not been preallocated.
2097 ARGS_SO_FAR is the size of args previously pushed for this call. */
2100 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2101 args_addr, args_so_far)
2103 enum machine_mode mode;
2114 enum direction stack_direction
2115 #ifdef STACK_GROWS_DOWNWARD
2121 /* Decide where to pad the argument: `downward' for below,
2122 `upward' for above, or `none' for don't pad it.
2123 Default is below for small data on big-endian machines; else above. */
2124 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2126 /* Invert direction if stack is post-update. */
2127 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2128 if (where_pad != none)
2129 where_pad = (where_pad == downward ? upward : downward);
2131 xinner = x = protect_from_queue (x, 0);
2133 if (mode == BLKmode)
2135 /* Copy a block into the stack, entirely or partially. */
2138 int used = partial * UNITS_PER_WORD;
2139 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2147 /* USED is now the # of bytes we need not copy to the stack
2148 because registers will take care of them. */
2151 xinner = change_address (xinner, BLKmode,
2152 plus_constant (XEXP (xinner, 0), used));
2154 /* If the partial register-part of the arg counts in its stack size,
2155 skip the part of stack space corresponding to the registers.
2156 Otherwise, start copying to the beginning of the stack space,
2157 by setting SKIP to 0. */
2158 #ifndef REG_PARM_STACK_SPACE
2164 #ifdef PUSH_ROUNDING
2165 /* Do it with several push insns if that doesn't take lots of insns
2166 and if there is no difficulty with push insns that skip bytes
2167 on the stack for alignment purposes. */
2169 && GET_CODE (size) == CONST_INT
2171 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2173 /* Here we avoid the case of a structure whose weak alignment
2174 forces many pushes of a small amount of data,
2175 and such small pushes do rounding that causes trouble. */
2176 && ((! SLOW_UNALIGNED_ACCESS)
2177 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2178 || PUSH_ROUNDING (align) == align)
2179 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2181 /* Push padding now if padding above and stack grows down,
2182 or if padding below and stack grows up.
2183 But if space already allocated, this has already been done. */
2184 if (extra && args_addr == 0
2185 && where_pad != none && where_pad != stack_direction)
2186 anti_adjust_stack (GEN_INT (extra));
2188 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2189 INTVAL (size) - used, align);
2192 #endif /* PUSH_ROUNDING */
2194 /* Otherwise make space on the stack and copy the data
2195 to the address of that space. */
2197 /* Deduct words put into registers from the size we must copy. */
2200 if (GET_CODE (size) == CONST_INT)
2201 size = GEN_INT (INTVAL (size) - used);
2203 size = expand_binop (GET_MODE (size), sub_optab, size,
2204 GEN_INT (used), NULL_RTX, 0,
2208 /* Get the address of the stack space.
2209 In this case, we do not deal with EXTRA separately.
2210 A single stack adjust will do. */
2213 temp = push_block (size, extra, where_pad == downward);
2216 else if (GET_CODE (args_so_far) == CONST_INT)
2217 temp = memory_address (BLKmode,
2218 plus_constant (args_addr,
2219 skip + INTVAL (args_so_far)));
2221 temp = memory_address (BLKmode,
2222 plus_constant (gen_rtx (PLUS, Pmode,
2223 args_addr, args_so_far),
2226 /* TEMP is the address of the block. Copy the data there. */
2227 if (GET_CODE (size) == CONST_INT
2228 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2231 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2232 INTVAL (size), align);
2235 /* Try the most limited insn first, because there's no point
2236 including more than one in the machine description unless
2237 the more limited one has some advantage. */
2238 #ifdef HAVE_movstrqi
2240 && GET_CODE (size) == CONST_INT
2241 && ((unsigned) INTVAL (size)
2242 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2244 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2245 xinner, size, GEN_INT (align));
2253 #ifdef HAVE_movstrhi
2255 && GET_CODE (size) == CONST_INT
2256 && ((unsigned) INTVAL (size)
2257 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2259 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2260 xinner, size, GEN_INT (align));
2268 #ifdef HAVE_movstrsi
2271 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2272 xinner, size, GEN_INT (align));
2280 #ifdef HAVE_movstrdi
2283 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2284 xinner, size, GEN_INT (align));
2293 #ifndef ACCUMULATE_OUTGOING_ARGS
2294 /* If the source is referenced relative to the stack pointer,
2295 copy it to another register to stabilize it. We do not need
2296 to do this if we know that we won't be changing sp. */
2298 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2299 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2300 temp = copy_to_reg (temp);
2303 /* Make inhibit_defer_pop nonzero around the library call
2304 to force it to pop the bcopy-arguments right away. */
2306 #ifdef TARGET_MEM_FUNCTIONS
2307 emit_library_call (memcpy_libfunc, 0,
2308 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2309 convert_to_mode (TYPE_MODE (sizetype),
2310 size, TREE_UNSIGNED (sizetype)),
2311 TYPE_MODE (sizetype));
2313 emit_library_call (bcopy_libfunc, 0,
2314 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2315 convert_to_mode (TYPE_MODE (sizetype),
2316 size, TREE_UNSIGNED (sizetype)),
2317 TYPE_MODE (sizetype));
2322 else if (partial > 0)
2324 /* Scalar partly in registers. */
2326 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2329 /* # words of start of argument
2330 that we must make space for but need not store. */
2331 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2332 int args_offset = INTVAL (args_so_far);
2335 /* Push padding now if padding above and stack grows down,
2336 or if padding below and stack grows up.
2337 But if space already allocated, this has already been done. */
2338 if (extra && args_addr == 0
2339 && where_pad != none && where_pad != stack_direction)
2340 anti_adjust_stack (GEN_INT (extra));
2342 /* If we make space by pushing it, we might as well push
2343 the real data. Otherwise, we can leave OFFSET nonzero
2344 and leave the space uninitialized. */
2348 /* Now NOT_STACK gets the number of words that we don't need to
2349 allocate on the stack. */
2350 not_stack = partial - offset;
2352 /* If the partial register-part of the arg counts in its stack size,
2353 skip the part of stack space corresponding to the registers.
2354 Otherwise, start copying to the beginning of the stack space,
2355 by setting SKIP to 0. */
2356 #ifndef REG_PARM_STACK_SPACE
2362 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2363 x = validize_mem (force_const_mem (mode, x));
2365 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2366 SUBREGs of such registers are not allowed. */
2367 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2368 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2369 x = copy_to_reg (x);
2371 /* Loop over all the words allocated on the stack for this arg. */
2372 /* We can do it by words, because any scalar bigger than a word
2373 has a size a multiple of a word. */
2374 #ifndef PUSH_ARGS_REVERSED
2375 for (i = not_stack; i < size; i++)
2377 for (i = size - 1; i >= not_stack; i--)
2379 if (i >= not_stack + offset)
2380 emit_push_insn (operand_subword_force (x, i, mode),
2381 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2383 GEN_INT (args_offset + ((i - not_stack + skip)
2384 * UNITS_PER_WORD)));
2390 /* Push padding now if padding above and stack grows down,
2391 or if padding below and stack grows up.
2392 But if space already allocated, this has already been done. */
2393 if (extra && args_addr == 0
2394 && where_pad != none && where_pad != stack_direction)
2395 anti_adjust_stack (GEN_INT (extra));
2397 #ifdef PUSH_ROUNDING
2399 addr = gen_push_operand ();
2402 if (GET_CODE (args_so_far) == CONST_INT)
2404 = memory_address (mode,
2405 plus_constant (args_addr, INTVAL (args_so_far)));
2407 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2410 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2414 /* If part should go in registers, copy that part
2415 into the appropriate registers. Do this now, at the end,
2416 since mem-to-mem copies above may do function calls. */
2417 if (partial > 0 && reg != 0)
2418 move_block_to_reg (REGNO (reg), x, partial, mode);
2420 if (extra && args_addr == 0 && where_pad == stack_direction)
2421 anti_adjust_stack (GEN_INT (extra));
2424 /* Expand an assignment that stores the value of FROM into TO.
2425 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2426 (This may contain a QUEUED rtx;
2427 if the value is constant, this rtx is a constant.)
2428 Otherwise, the returned value is NULL_RTX.
2430 SUGGEST_REG is no longer actually used.
2431 It used to mean, copy the value through a register
2432 and return that register, if that is possible.
2433 We now use WANT_VALUE to decide whether to do this. */
2436 expand_assignment (to, from, want_value, suggest_reg)
2441 register rtx to_rtx = 0;
2444 /* Don't crash if the lhs of the assignment was erroneous. */
2446 if (TREE_CODE (to) == ERROR_MARK)
2448 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2449 return want_value ? result : NULL_RTX;
2452 if (output_bytecode)
2454 tree dest_innermost;
2456 bc_expand_expr (from);
2457 bc_emit_instruction (duplicate);
2459 dest_innermost = bc_expand_address (to);
2461 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2462 take care of it here. */
2464 bc_store_memory (TREE_TYPE (to), dest_innermost);
2468 /* Assignment of a structure component needs special treatment
2469 if the structure component's rtx is not simply a MEM.
2470 Assignment of an array element at a constant index, and assignment of
2471 an array element in an unaligned packed structure field, has the same
2474 if (TREE_CODE (to) == COMPONENT_REF
2475 || TREE_CODE (to) == BIT_FIELD_REF
2476 || (TREE_CODE (to) == ARRAY_REF
2477 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2479 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2481 enum machine_mode mode1;
2491 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2492 &mode1, &unsignedp, &volatilep);
2494 /* If we are going to use store_bit_field and extract_bit_field,
2495 make sure to_rtx will be safe for multiple use. */
2497 if (mode1 == VOIDmode && want_value)
2498 tem = stabilize_reference (tem);
2500 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2501 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2504 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2506 if (GET_CODE (to_rtx) != MEM)
2508 to_rtx = change_address (to_rtx, VOIDmode,
2509 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2510 force_reg (ptr_mode, offset_rtx)));
2511 /* If we have a variable offset, the known alignment
2512 is only that of the innermost structure containing the field.
2513 (Actually, we could sometimes do better by using the
2514 align of an element of the innermost array, but no need.) */
2515 if (TREE_CODE (to) == COMPONENT_REF
2516 || TREE_CODE (to) == BIT_FIELD_REF)
2518 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2522 if (GET_CODE (to_rtx) == MEM)
2524 /* When the offset is zero, to_rtx is the address of the
2525 structure we are storing into, and hence may be shared.
2526 We must make a new MEM before setting the volatile bit. */
2528 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2529 MEM_VOLATILE_P (to_rtx) = 1;
2531 #if 0 /* This was turned off because, when a field is volatile
2532 in an object which is not volatile, the object may be in a register,
2533 and then we would abort over here. */
2539 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2541 /* Spurious cast makes HPUX compiler happy. */
2542 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2545 /* Required alignment of containing datum. */
2547 int_size_in_bytes (TREE_TYPE (tem)));
2548 preserve_temp_slots (result);
2552 /* If the value is meaningful, convert RESULT to the proper mode.
2553 Otherwise, return nothing. */
2554 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2555 TYPE_MODE (TREE_TYPE (from)),
2557 TREE_UNSIGNED (TREE_TYPE (to)))
2561 /* If the rhs is a function call and its value is not an aggregate,
2562 call the function before we start to compute the lhs.
2563 This is needed for correct code for cases such as
2564 val = setjmp (buf) on machines where reference to val
2565 requires loading up part of an address in a separate insn.
2567 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2568 a promoted variable where the zero- or sign- extension needs to be done.
2569 Handling this in the normal way is safe because no computation is done
2571 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2572 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2577 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2579 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2581 if (GET_MODE (to_rtx) == BLKmode)
2583 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2584 emit_block_move (to_rtx, value, expr_size (from), align);
2587 emit_move_insn (to_rtx, value);
2588 preserve_temp_slots (to_rtx);
2591 return want_value ? to_rtx : NULL_RTX;
2594 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2595 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2598 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2600 /* Don't move directly into a return register. */
2601 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2606 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2607 emit_move_insn (to_rtx, temp);
2608 preserve_temp_slots (to_rtx);
2611 return want_value ? to_rtx : NULL_RTX;
2614 /* In case we are returning the contents of an object which overlaps
2615 the place the value is being stored, use a safe function when copying
2616 a value through a pointer into a structure value return block. */
2617 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2618 && current_function_returns_struct
2619 && !current_function_returns_pcc_struct)
2624 size = expr_size (from);
2625 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2627 #ifdef TARGET_MEM_FUNCTIONS
2628 emit_library_call (memcpy_libfunc, 0,
2629 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2630 XEXP (from_rtx, 0), Pmode,
2631 convert_to_mode (TYPE_MODE (sizetype),
2632 size, TREE_UNSIGNED (sizetype)),
2633 TYPE_MODE (sizetype));
2635 emit_library_call (bcopy_libfunc, 0,
2636 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2637 XEXP (to_rtx, 0), Pmode,
2638 convert_to_mode (TYPE_MODE (sizetype),
2639 size, TREE_UNSIGNED (sizetype)),
2640 TYPE_MODE (sizetype));
2643 preserve_temp_slots (to_rtx);
2646 return want_value ? to_rtx : NULL_RTX;
2649 /* Compute FROM and store the value in the rtx we got. */
2652 result = store_expr (from, to_rtx, want_value);
2653 preserve_temp_slots (result);
2656 return want_value ? result : NULL_RTX;
2659 /* Generate code for computing expression EXP,
2660 and storing the value into TARGET.
2661 TARGET may contain a QUEUED rtx.
2663 If WANT_VALUE is nonzero, return a copy of the value
2664 not in TARGET, so that we can be sure to use the proper
2665 value in a containing expression even if TARGET has something
2666 else stored in it. If possible, we copy the value through a pseudo
2667 and return that pseudo. Or, if the value is constant, we try to
2668 return the constant. In some cases, we return a pseudo
2669 copied *from* TARGET.
2671 If the mode is BLKmode then we may return TARGET itself.
2672 It turns out that in BLKmode it doesn't cause a problem.
2673 because C has no operators that could combine two different
2674 assignments into the same BLKmode object with different values
2675 with no sequence point. Will other languages need this to
2678 If WANT_VALUE is 0, we return NULL, to make sure
2679 to catch quickly any cases where the caller uses the value
2680 and fails to set WANT_VALUE. */
2683 store_expr (exp, target, want_value)
2685 register rtx target;
2689 int dont_return_target = 0;
2691 if (TREE_CODE (exp) == COMPOUND_EXPR)
2693 /* Perform first part of compound expression, then assign from second
2695 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2697 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2699 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2701 /* For conditional expression, get safe form of the target. Then
2702 test the condition, doing the appropriate assignment on either
2703 side. This avoids the creation of unnecessary temporaries.
2704 For non-BLKmode, it is more efficient not to do this. */
2706 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2709 target = protect_from_queue (target, 1);
2712 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2713 store_expr (TREE_OPERAND (exp, 1), target, 0);
2715 emit_jump_insn (gen_jump (lab2));
2718 store_expr (TREE_OPERAND (exp, 2), target, 0);
2722 return want_value ? target : NULL_RTX;
2724 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2725 && GET_MODE (target) != BLKmode)
2726 /* If target is in memory and caller wants value in a register instead,
2727 arrange that. Pass TARGET as target for expand_expr so that,
2728 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2729 We know expand_expr will not use the target in that case.
2730 Don't do this if TARGET is volatile because we are supposed
2731 to write it and then read it. */
2733 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2734 GET_MODE (target), 0);
2735 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2736 temp = copy_to_reg (temp);
2737 dont_return_target = 1;
2739 else if (queued_subexp_p (target))
2740 /* If target contains a postincrement, let's not risk
2741 using it as the place to generate the rhs. */
2743 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2745 /* Expand EXP into a new pseudo. */
2746 temp = gen_reg_rtx (GET_MODE (target));
2747 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2750 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2752 /* If target is volatile, ANSI requires accessing the value
2753 *from* the target, if it is accessed. So make that happen.
2754 In no case return the target itself. */
2755 if (! MEM_VOLATILE_P (target) && want_value)
2756 dont_return_target = 1;
2758 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2759 /* If this is an scalar in a register that is stored in a wider mode
2760 than the declared mode, compute the result into its declared mode
2761 and then convert to the wider mode. Our value is the computed
2764 /* If we don't want a value, we can do the conversion inside EXP,
2765 which will often result in some optimizations. Do the conversion
2766 in two steps: first change the signedness, if needed, then
2770 if (TREE_UNSIGNED (TREE_TYPE (exp))
2771 != SUBREG_PROMOTED_UNSIGNED_P (target))
2774 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2778 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2779 SUBREG_PROMOTED_UNSIGNED_P (target)),
2783 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2785 /* If TEMP is a volatile MEM and we want a result value, make
2786 the access now so it gets done only once. */
2787 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2788 temp = copy_to_reg (temp);
2790 /* If TEMP is a VOIDmode constant, use convert_modes to make
2791 sure that we properly convert it. */
2792 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2793 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2794 TYPE_MODE (TREE_TYPE (exp)), temp,
2795 SUBREG_PROMOTED_UNSIGNED_P (target));
2797 convert_move (SUBREG_REG (target), temp,
2798 SUBREG_PROMOTED_UNSIGNED_P (target));
2799 return want_value ? temp : NULL_RTX;
2803 temp = expand_expr (exp, target, GET_MODE (target), 0);
2804 /* Return TARGET if it's a specified hardware register.
2805 If TARGET is a volatile mem ref, either return TARGET
2806 or return a reg copied *from* TARGET; ANSI requires this.
2808 Otherwise, if TEMP is not TARGET, return TEMP
2809 if it is constant (for efficiency),
2810 or if we really want the correct value. */
2811 if (!(target && GET_CODE (target) == REG
2812 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2813 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2815 && (CONSTANT_P (temp) || want_value))
2816 dont_return_target = 1;
2819 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2820 the same as that of TARGET, adjust the constant. This is needed, for
2821 example, in case it is a CONST_DOUBLE and we want only a word-sized
2823 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2824 && TREE_CODE (exp) != ERROR_MARK
2825 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2826 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2827 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2829 /* If value was not generated in the target, store it there.
2830 Convert the value to TARGET's type first if nec. */
2832 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2834 target = protect_from_queue (target, 1);
2835 if (GET_MODE (temp) != GET_MODE (target)
2836 && GET_MODE (temp) != VOIDmode)
2838 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2839 if (dont_return_target)
2841 /* In this case, we will return TEMP,
2842 so make sure it has the proper mode.
2843 But don't forget to store the value into TARGET. */
2844 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2845 emit_move_insn (target, temp);
2848 convert_move (target, temp, unsignedp);
2851 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2853 /* Handle copying a string constant into an array.
2854 The string constant may be shorter than the array.
2855 So copy just the string's actual length, and clear the rest. */
2859 /* Get the size of the data type of the string,
2860 which is actually the size of the target. */
2861 size = expr_size (exp);
2862 if (GET_CODE (size) == CONST_INT
2863 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2864 emit_block_move (target, temp, size,
2865 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2868 /* Compute the size of the data to copy from the string. */
2870 = size_binop (MIN_EXPR,
2871 make_tree (sizetype, size),
2873 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2874 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2878 /* Copy that much. */
2879 emit_block_move (target, temp, copy_size_rtx,
2880 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2882 /* Figure out how much is left in TARGET that we have to clear.
2883 Do all calculations in ptr_mode. */
2885 addr = XEXP (target, 0);
2886 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2888 if (GET_CODE (copy_size_rtx) == CONST_INT)
2890 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2891 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2895 addr = force_reg (ptr_mode, addr);
2896 addr = expand_binop (ptr_mode, add_optab, addr,
2897 copy_size_rtx, NULL_RTX, 0,
2900 size = expand_binop (ptr_mode, sub_optab, size,
2901 copy_size_rtx, NULL_RTX, 0,
2904 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2905 GET_MODE (size), 0, 0);
2906 label = gen_label_rtx ();
2907 emit_jump_insn (gen_blt (label));
2910 if (size != const0_rtx)
2912 #ifdef TARGET_MEM_FUNCTIONS
2913 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2914 Pmode, const0_rtx, Pmode, size, ptr_mode);
2916 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2917 addr, Pmode, size, ptr_mode);
2925 else if (GET_MODE (temp) == BLKmode)
2926 emit_block_move (target, temp, expr_size (exp),
2927 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2929 emit_move_insn (target, temp);
2932 /* If we don't want a value, return NULL_RTX. */
2936 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2937 ??? The latter test doesn't seem to make sense. */
2938 else if (dont_return_target && GET_CODE (temp) != MEM)
2941 /* Return TARGET itself if it is a hard register. */
2942 else if (want_value && GET_MODE (target) != BLKmode
2943 && ! (GET_CODE (target) == REG
2944 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2945 return copy_to_reg (target);
2951 /* Store the value of constructor EXP into the rtx TARGET.
2952 TARGET is either a REG or a MEM. */
2955 store_constructor (exp, target)
2959 tree type = TREE_TYPE (exp);
2961 /* We know our target cannot conflict, since safe_from_p has been called. */
2963 /* Don't try copying piece by piece into a hard register
2964 since that is vulnerable to being clobbered by EXP.
2965 Instead, construct in a pseudo register and then copy it all. */
2966 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2968 rtx temp = gen_reg_rtx (GET_MODE (target));
2969 store_constructor (exp, temp);
2970 emit_move_insn (target, temp);
2975 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2976 || TREE_CODE (type) == QUAL_UNION_TYPE)
2980 /* Inform later passes that the whole union value is dead. */
2981 if (TREE_CODE (type) == UNION_TYPE
2982 || TREE_CODE (type) == QUAL_UNION_TYPE)
2983 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2985 /* If we are building a static constructor into a register,
2986 set the initial value as zero so we can fold the value into
2987 a constant. But if more than one register is involved,
2988 this probably loses. */
2989 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
2990 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
2991 emit_move_insn (target, const0_rtx);
2993 /* If the constructor has fewer fields than the structure,
2994 clear the whole structure first. */
2995 else if (list_length (CONSTRUCTOR_ELTS (exp))
2996 != list_length (TYPE_FIELDS (type)))
2997 clear_storage (target, int_size_in_bytes (type));
2999 /* Inform later passes that the old value is dead. */
3000 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3002 /* Store each element of the constructor into
3003 the corresponding field of TARGET. */
3005 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3007 register tree field = TREE_PURPOSE (elt);
3008 register enum machine_mode mode;
3012 tree pos, constant = 0, offset = 0;
3013 rtx to_rtx = target;
3015 /* Just ignore missing fields.
3016 We cleared the whole structure, above,
3017 if any fields are missing. */
3021 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3022 unsignedp = TREE_UNSIGNED (field);
3023 mode = DECL_MODE (field);
3024 if (DECL_BIT_FIELD (field))
3027 pos = DECL_FIELD_BITPOS (field);
3028 if (TREE_CODE (pos) == INTEGER_CST)
3030 else if (TREE_CODE (pos) == PLUS_EXPR
3031 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3032 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3037 bitpos = TREE_INT_CST_LOW (constant);
3043 if (contains_placeholder_p (offset))
3044 offset = build (WITH_RECORD_EXPR, sizetype,
3047 offset = size_binop (FLOOR_DIV_EXPR, offset,
3048 size_int (BITS_PER_UNIT));
3050 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3051 if (GET_CODE (to_rtx) != MEM)
3055 = change_address (to_rtx, VOIDmode,
3056 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3057 force_reg (ptr_mode, offset_rtx)));
3060 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3061 /* The alignment of TARGET is
3062 at least what its type requires. */
3064 TYPE_ALIGN (type) / BITS_PER_UNIT,
3065 int_size_in_bytes (type));
3068 else if (TREE_CODE (type) == ARRAY_TYPE)
3072 tree domain = TYPE_DOMAIN (type);
3073 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3074 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3075 tree elttype = TREE_TYPE (type);
3077 /* If the constructor has fewer fields than the structure,
3078 clear the whole structure first. Similarly if this this is
3079 static constructor of a non-BLKmode object. */
3081 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3082 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3083 clear_storage (target, int_size_in_bytes (type));
3085 /* Inform later passes that the old value is dead. */
3086 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3088 /* Store each element of the constructor into
3089 the corresponding element of TARGET, determined
3090 by counting the elements. */
3091 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3093 elt = TREE_CHAIN (elt), i++)
3095 register enum machine_mode mode;
3099 tree index = TREE_PURPOSE (elt);
3100 rtx xtarget = target;
3102 mode = TYPE_MODE (elttype);
3103 bitsize = GET_MODE_BITSIZE (mode);
3104 unsignedp = TREE_UNSIGNED (elttype);
3106 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3107 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3109 rtx pos_rtx, addr, xtarget;
3113 index = size_int (i);
3115 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3116 size_int (BITS_PER_UNIT));
3117 position = size_binop (MULT_EXPR, index, position);
3118 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3119 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3120 xtarget = change_address (target, mode, addr);
3121 store_expr (TREE_VALUE (elt), xtarget, 0);
3126 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3127 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3129 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3131 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3132 /* The alignment of TARGET is
3133 at least what its type requires. */
3135 TYPE_ALIGN (type) / BITS_PER_UNIT,
3136 int_size_in_bytes (type));
3140 /* set constructor assignments */
3141 else if (TREE_CODE (type) == SET_TYPE)
3144 rtx xtarget = XEXP (target, 0);
3145 int set_word_size = TYPE_ALIGN (type);
3146 int nbytes = int_size_in_bytes (type);
3147 tree non_const_elements;
3148 int need_to_clear_first;
3149 tree domain = TYPE_DOMAIN (type);
3150 tree domain_min, domain_max, bitlength;
3152 /* The default implementation strategy is to extract the constant
3153 parts of the constructor, use that to initialize the target,
3154 and then "or" in whatever non-constant ranges we need in addition.
3156 If a large set is all zero or all ones, it is
3157 probably better to set it using memset (if available) or bzero.
3158 Also, if a large set has just a single range, it may also be
3159 better to first clear all the first clear the set (using
3160 bzero/memset), and set the bits we want. */
3162 /* Check for all zeros. */
3163 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3165 clear_storage (target, nbytes);
3172 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3173 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3174 bitlength = size_binop (PLUS_EXPR,
3175 size_binop (MINUS_EXPR, domain_max, domain_min),
3178 /* Check for range all ones, or at most a single range.
3179 (This optimization is only a win for big sets.) */
3180 if (GET_MODE (target) == BLKmode && nbytes > 16
3181 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3183 need_to_clear_first = 1;
3184 non_const_elements = CONSTRUCTOR_ELTS (exp);
3188 int nbits = nbytes * BITS_PER_UNIT;
3189 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3190 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3191 char *bit_buffer = (char*) alloca (nbits);
3192 HOST_WIDE_INT word = 0;
3195 int offset = 0; /* In bytes from beginning of set. */
3196 non_const_elements = get_set_constructor_bits (exp,
3200 if (bit_buffer[ibit])
3202 if (BYTES_BIG_ENDIAN)
3203 word |= (1 << (set_word_size - 1 - bit_pos));
3205 word |= 1 << bit_pos;
3208 if (bit_pos >= set_word_size || ibit == nbits)
3210 rtx datum = GEN_INT (word);
3212 /* The assumption here is that it is safe to use XEXP if
3213 the set is multi-word, but not if it's single-word. */
3214 if (GET_CODE (target) == MEM)
3215 to_rtx = change_address (target, mode,
3216 plus_constant (XEXP (target, 0),
3218 else if (offset == 0)
3222 emit_move_insn (to_rtx, datum);
3227 offset += set_word_size / BITS_PER_UNIT;
3230 need_to_clear_first = 0;
3233 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3235 /* start of range of element or NULL */
3236 tree startbit = TREE_PURPOSE (elt);
3237 /* end of range of element, or element value */
3238 tree endbit = TREE_VALUE (elt);
3239 HOST_WIDE_INT startb, endb;
3240 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3242 bitlength_rtx = expand_expr (bitlength,
3243 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3245 /* handle non-range tuple element like [ expr ] */
3246 if (startbit == NULL_TREE)
3248 startbit = save_expr (endbit);
3251 startbit = convert (sizetype, startbit);
3252 endbit = convert (sizetype, endbit);
3253 if (! integer_zerop (domain_min))
3255 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3256 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3258 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3259 EXPAND_CONST_ADDRESS);
3260 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3261 EXPAND_CONST_ADDRESS);
3265 targetx = assign_stack_temp (GET_MODE (target),
3266 GET_MODE_SIZE (GET_MODE (target)),
3268 emit_move_insn (targetx, target);
3270 else if (GET_CODE (target) == MEM)
3275 #ifdef TARGET_MEM_FUNCTIONS
3276 /* Optimization: If startbit and endbit are
3277 constants divisible by BITS_PER_UNIT,
3278 call memset instead. */
3279 if (TREE_CODE (startbit) == INTEGER_CST
3280 && TREE_CODE (endbit) == INTEGER_CST
3281 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3282 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3285 if (need_to_clear_first
3286 && endb - startb != nbytes * BITS_PER_UNIT)
3287 clear_storage (target, nbytes);
3288 need_to_clear_first = 0;
3289 emit_library_call (memset_libfunc, 0,
3291 plus_constant (XEXP (targetx, 0), startb),
3294 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3300 if (need_to_clear_first)
3302 clear_storage (target, nbytes);
3303 need_to_clear_first = 0;
3305 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3306 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3307 bitlength_rtx, TYPE_MODE (sizetype),
3308 startbit_rtx, TYPE_MODE (sizetype),
3309 endbit_rtx, TYPE_MODE (sizetype));
3312 emit_move_insn (target, targetx);
3320 /* Store the value of EXP (an expression tree)
3321 into a subfield of TARGET which has mode MODE and occupies
3322 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3323 If MODE is VOIDmode, it means that we are storing into a bit-field.
3325 If VALUE_MODE is VOIDmode, return nothing in particular.
3326 UNSIGNEDP is not used in this case.
3328 Otherwise, return an rtx for the value stored. This rtx
3329 has mode VALUE_MODE if that is convenient to do.
3330 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3332 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3333 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3336 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3337 unsignedp, align, total_size)
3339 int bitsize, bitpos;
3340 enum machine_mode mode;
3342 enum machine_mode value_mode;
3347 HOST_WIDE_INT width_mask = 0;
3349 if (bitsize < HOST_BITS_PER_WIDE_INT)
3350 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3352 /* If we are storing into an unaligned field of an aligned union that is
3353 in a register, we may have the mode of TARGET being an integer mode but
3354 MODE == BLKmode. In that case, get an aligned object whose size and
3355 alignment are the same as TARGET and store TARGET into it (we can avoid
3356 the store if the field being stored is the entire width of TARGET). Then
3357 call ourselves recursively to store the field into a BLKmode version of
3358 that object. Finally, load from the object into TARGET. This is not
3359 very efficient in general, but should only be slightly more expensive
3360 than the otherwise-required unaligned accesses. Perhaps this can be
3361 cleaned up later. */
3364 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3366 rtx object = assign_stack_temp (GET_MODE (target),
3367 GET_MODE_SIZE (GET_MODE (target)), 0);
3368 rtx blk_object = copy_rtx (object);
3370 MEM_IN_STRUCT_P (object) = 1;
3371 MEM_IN_STRUCT_P (blk_object) = 1;
3372 PUT_MODE (blk_object, BLKmode);
3374 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3375 emit_move_insn (object, target);
3377 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3380 /* Even though we aren't returning target, we need to
3381 give it the updated value. */
3382 emit_move_insn (target, object);
3387 /* If the structure is in a register or if the component
3388 is a bit field, we cannot use addressing to access it.
3389 Use bit-field techniques or SUBREG to store in it. */
3391 if (mode == VOIDmode
3392 || (mode != BLKmode && ! direct_store[(int) mode])
3393 || GET_CODE (target) == REG
3394 || GET_CODE (target) == SUBREG
3395 /* If the field isn't aligned enough to store as an ordinary memref,
3396 store it as a bit field. */
3397 || (SLOW_UNALIGNED_ACCESS
3398 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3399 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3401 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3403 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3405 if (mode != VOIDmode && mode != BLKmode
3406 && mode != TYPE_MODE (TREE_TYPE (exp)))
3407 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3409 /* Store the value in the bitfield. */
3410 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3411 if (value_mode != VOIDmode)
3413 /* The caller wants an rtx for the value. */
3414 /* If possible, avoid refetching from the bitfield itself. */
3416 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3419 enum machine_mode tmode;
3422 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3423 tmode = GET_MODE (temp);
3424 if (tmode == VOIDmode)
3426 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3427 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3428 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3430 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3431 NULL_RTX, value_mode, 0, align,
3438 rtx addr = XEXP (target, 0);
3441 /* If a value is wanted, it must be the lhs;
3442 so make the address stable for multiple use. */
3444 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3445 && ! CONSTANT_ADDRESS_P (addr)
3446 /* A frame-pointer reference is already stable. */
3447 && ! (GET_CODE (addr) == PLUS
3448 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3449 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3450 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3451 addr = copy_to_reg (addr);
3453 /* Now build a reference to just the desired component. */
3455 to_rtx = change_address (target, mode,
3456 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3457 MEM_IN_STRUCT_P (to_rtx) = 1;
3459 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3463 /* Return true if any object containing the innermost array is an unaligned
3464 packed structure field. */
3467 get_inner_unaligned_p (exp)
3470 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3474 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3476 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3480 else if (TREE_CODE (exp) != ARRAY_REF
3481 && TREE_CODE (exp) != NON_LVALUE_EXPR
3482 && ! ((TREE_CODE (exp) == NOP_EXPR
3483 || TREE_CODE (exp) == CONVERT_EXPR)
3484 && (TYPE_MODE (TREE_TYPE (exp))
3485 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3488 exp = TREE_OPERAND (exp, 0);
3494 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3495 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3496 ARRAY_REFs and find the ultimate containing object, which we return.
3498 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3499 bit position, and *PUNSIGNEDP to the signedness of the field.
3500 If the position of the field is variable, we store a tree
3501 giving the variable offset (in units) in *POFFSET.
3502 This offset is in addition to the bit position.
3503 If the position is not variable, we store 0 in *POFFSET.
3505 If any of the extraction expressions is volatile,
3506 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3508 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3509 is a mode that can be used to access the field. In that case, *PBITSIZE
3512 If the field describes a variable-sized object, *PMODE is set to
3513 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3514 this case, but the address of the object can be found. */
3517 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3518 punsignedp, pvolatilep)
3523 enum machine_mode *pmode;
3527 tree orig_exp = exp;
3529 enum machine_mode mode = VOIDmode;
3530 tree offset = integer_zero_node;
3532 if (TREE_CODE (exp) == COMPONENT_REF)
3534 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3535 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3536 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3537 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3539 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3541 size_tree = TREE_OPERAND (exp, 1);
3542 *punsignedp = TREE_UNSIGNED (exp);
3546 mode = TYPE_MODE (TREE_TYPE (exp));
3547 *pbitsize = GET_MODE_BITSIZE (mode);
3548 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3553 if (TREE_CODE (size_tree) != INTEGER_CST)
3554 mode = BLKmode, *pbitsize = -1;
3556 *pbitsize = TREE_INT_CST_LOW (size_tree);
3559 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3560 and find the ultimate containing object. */
3566 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3568 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3569 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3570 : TREE_OPERAND (exp, 2));
3571 tree constant = integer_zero_node, var = pos;
3573 /* If this field hasn't been filled in yet, don't go
3574 past it. This should only happen when folding expressions
3575 made during type construction. */
3579 /* Assume here that the offset is a multiple of a unit.
3580 If not, there should be an explicitly added constant. */
3581 if (TREE_CODE (pos) == PLUS_EXPR
3582 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3583 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3584 else if (TREE_CODE (pos) == INTEGER_CST)
3585 constant = pos, var = integer_zero_node;
3587 *pbitpos += TREE_INT_CST_LOW (constant);
3590 offset = size_binop (PLUS_EXPR, offset,
3591 size_binop (EXACT_DIV_EXPR, var,
3592 size_int (BITS_PER_UNIT)));
3595 else if (TREE_CODE (exp) == ARRAY_REF)
3597 /* This code is based on the code in case ARRAY_REF in expand_expr
3598 below. We assume here that the size of an array element is
3599 always an integral multiple of BITS_PER_UNIT. */
3601 tree index = TREE_OPERAND (exp, 1);
3602 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3604 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3605 tree index_type = TREE_TYPE (index);
3607 if (! integer_zerop (low_bound))
3608 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3610 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3612 index = convert (type_for_size (POINTER_SIZE, 0), index);
3613 index_type = TREE_TYPE (index);
3616 index = fold (build (MULT_EXPR, index_type, index,
3617 TYPE_SIZE (TREE_TYPE (exp))));
3619 if (TREE_CODE (index) == INTEGER_CST
3620 && TREE_INT_CST_HIGH (index) == 0)
3621 *pbitpos += TREE_INT_CST_LOW (index);
3623 offset = size_binop (PLUS_EXPR, offset,
3624 size_binop (FLOOR_DIV_EXPR, index,
3625 size_int (BITS_PER_UNIT)));
3627 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3628 && ! ((TREE_CODE (exp) == NOP_EXPR
3629 || TREE_CODE (exp) == CONVERT_EXPR)
3630 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3633 && (TYPE_MODE (TREE_TYPE (exp))
3634 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3637 /* If any reference in the chain is volatile, the effect is volatile. */
3638 if (TREE_THIS_VOLATILE (exp))
3640 exp = TREE_OPERAND (exp, 0);
3643 /* If this was a bit-field, see if there is a mode that allows direct
3644 access in case EXP is in memory. */
3645 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3647 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3648 if (mode == BLKmode)
3652 if (integer_zerop (offset))
3655 if (offset != 0 && contains_placeholder_p (offset))
3656 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3663 /* Given an rtx VALUE that may contain additions and multiplications,
3664 return an equivalent value that just refers to a register or memory.
3665 This is done by generating instructions to perform the arithmetic
3666 and returning a pseudo-register containing the value.
3668 The returned value may be a REG, SUBREG, MEM or constant. */
3671 force_operand (value, target)
3674 register optab binoptab = 0;
3675 /* Use a temporary to force order of execution of calls to
3679 /* Use subtarget as the target for operand 0 of a binary operation. */
3680 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3682 if (GET_CODE (value) == PLUS)
3683 binoptab = add_optab;
3684 else if (GET_CODE (value) == MINUS)
3685 binoptab = sub_optab;
3686 else if (GET_CODE (value) == MULT)
3688 op2 = XEXP (value, 1);
3689 if (!CONSTANT_P (op2)
3690 && !(GET_CODE (op2) == REG && op2 != subtarget))
3692 tmp = force_operand (XEXP (value, 0), subtarget);
3693 return expand_mult (GET_MODE (value), tmp,
3694 force_operand (op2, NULL_RTX),
3700 op2 = XEXP (value, 1);
3701 if (!CONSTANT_P (op2)
3702 && !(GET_CODE (op2) == REG && op2 != subtarget))
3704 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3706 binoptab = add_optab;
3707 op2 = negate_rtx (GET_MODE (value), op2);
3710 /* Check for an addition with OP2 a constant integer and our first
3711 operand a PLUS of a virtual register and something else. In that
3712 case, we want to emit the sum of the virtual register and the
3713 constant first and then add the other value. This allows virtual
3714 register instantiation to simply modify the constant rather than
3715 creating another one around this addition. */
3716 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3717 && GET_CODE (XEXP (value, 0)) == PLUS
3718 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3719 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3720 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3722 rtx temp = expand_binop (GET_MODE (value), binoptab,
3723 XEXP (XEXP (value, 0), 0), op2,
3724 subtarget, 0, OPTAB_LIB_WIDEN);
3725 return expand_binop (GET_MODE (value), binoptab, temp,
3726 force_operand (XEXP (XEXP (value, 0), 1), 0),
3727 target, 0, OPTAB_LIB_WIDEN);
3730 tmp = force_operand (XEXP (value, 0), subtarget);
3731 return expand_binop (GET_MODE (value), binoptab, tmp,
3732 force_operand (op2, NULL_RTX),
3733 target, 0, OPTAB_LIB_WIDEN);
3734 /* We give UNSIGNEDP = 0 to expand_binop
3735 because the only operations we are expanding here are signed ones. */
3740 /* Subroutine of expand_expr:
3741 save the non-copied parts (LIST) of an expr (LHS), and return a list
3742 which can restore these values to their previous values,
3743 should something modify their storage. */
3746 save_noncopied_parts (lhs, list)
3753 for (tail = list; tail; tail = TREE_CHAIN (tail))
3754 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3755 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3758 tree part = TREE_VALUE (tail);
3759 tree part_type = TREE_TYPE (part);
3760 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3761 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3762 int_size_in_bytes (part_type), 0);
3763 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3764 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3765 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3766 parts = tree_cons (to_be_saved,
3767 build (RTL_EXPR, part_type, NULL_TREE,
3770 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3775 /* Subroutine of expand_expr:
3776 record the non-copied parts (LIST) of an expr (LHS), and return a list
3777 which specifies the initial values of these parts. */
3780 init_noncopied_parts (lhs, list)
3787 for (tail = list; tail; tail = TREE_CHAIN (tail))
3788 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3789 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3792 tree part = TREE_VALUE (tail);
3793 tree part_type = TREE_TYPE (part);
3794 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3795 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3800 /* Subroutine of expand_expr: return nonzero iff there is no way that
3801 EXP can reference X, which is being modified. */
3804 safe_from_p (x, exp)
3812 /* If EXP has varying size, we MUST use a target since we currently
3813 have no way of allocating temporaries of variable size. So we
3814 assume here that something at a higher level has prevented a
3815 clash. This is somewhat bogus, but the best we can do. */
3816 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3817 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3820 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3821 find the underlying pseudo. */
3822 if (GET_CODE (x) == SUBREG)
3825 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3829 /* If X is a location in the outgoing argument area, it is always safe. */
3830 if (GET_CODE (x) == MEM
3831 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3832 || (GET_CODE (XEXP (x, 0)) == PLUS
3833 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3836 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3839 exp_rtl = DECL_RTL (exp);
3846 if (TREE_CODE (exp) == TREE_LIST)
3847 return ((TREE_VALUE (exp) == 0
3848 || safe_from_p (x, TREE_VALUE (exp)))
3849 && (TREE_CHAIN (exp) == 0
3850 || safe_from_p (x, TREE_CHAIN (exp))));
3855 return safe_from_p (x, TREE_OPERAND (exp, 0));
3859 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3860 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3864 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3865 the expression. If it is set, we conflict iff we are that rtx or
3866 both are in memory. Otherwise, we check all operands of the
3867 expression recursively. */
3869 switch (TREE_CODE (exp))
3872 return (staticp (TREE_OPERAND (exp, 0))
3873 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3876 if (GET_CODE (x) == MEM)
3881 exp_rtl = CALL_EXPR_RTL (exp);
3884 /* Assume that the call will clobber all hard registers and
3886 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3887 || GET_CODE (x) == MEM)
3894 exp_rtl = RTL_EXPR_RTL (exp);
3896 /* We don't know what this can modify. */
3901 case WITH_CLEANUP_EXPR:
3902 exp_rtl = RTL_EXPR_RTL (exp);
3905 case CLEANUP_POINT_EXPR:
3906 return safe_from_p (x, TREE_OPERAND (exp, 0));
3909 exp_rtl = SAVE_EXPR_RTL (exp);
3913 /* The only operand we look at is operand 1. The rest aren't
3914 part of the expression. */
3915 return safe_from_p (x, TREE_OPERAND (exp, 1));
3917 case METHOD_CALL_EXPR:
3918 /* This takes a rtx argument, but shouldn't appear here. */
3922 /* If we have an rtx, we do not need to scan our operands. */
3926 nops = tree_code_length[(int) TREE_CODE (exp)];
3927 for (i = 0; i < nops; i++)
3928 if (TREE_OPERAND (exp, i) != 0
3929 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3933 /* If we have an rtl, find any enclosed object. Then see if we conflict
3937 if (GET_CODE (exp_rtl) == SUBREG)
3939 exp_rtl = SUBREG_REG (exp_rtl);
3940 if (GET_CODE (exp_rtl) == REG
3941 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3945 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3946 are memory and EXP is not readonly. */
3947 return ! (rtx_equal_p (x, exp_rtl)
3948 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3949 && ! TREE_READONLY (exp)));
3952 /* If we reach here, it is safe. */
3956 /* Subroutine of expand_expr: return nonzero iff EXP is an
3957 expression whose type is statically determinable. */
3963 if (TREE_CODE (exp) == PARM_DECL
3964 || TREE_CODE (exp) == VAR_DECL
3965 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3966 || TREE_CODE (exp) == COMPONENT_REF
3967 || TREE_CODE (exp) == ARRAY_REF)
3972 /* expand_expr: generate code for computing expression EXP.
3973 An rtx for the computed value is returned. The value is never null.
3974 In the case of a void EXP, const0_rtx is returned.
3976 The value may be stored in TARGET if TARGET is nonzero.
3977 TARGET is just a suggestion; callers must assume that
3978 the rtx returned may not be the same as TARGET.
3980 If TARGET is CONST0_RTX, it means that the value will be ignored.
3982 If TMODE is not VOIDmode, it suggests generating the
3983 result in mode TMODE. But this is done only when convenient.
3984 Otherwise, TMODE is ignored and the value generated in its natural mode.
3985 TMODE is just a suggestion; callers must assume that
3986 the rtx returned may not have mode TMODE.
3988 Note that TARGET may have neither TMODE nor MODE. In that case, it
3989 probably will not be used.
3991 If MODIFIER is EXPAND_SUM then when EXP is an addition
3992 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3993 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3994 products as above, or REG or MEM, or constant.
3995 Ordinarily in such cases we would output mul or add instructions
3996 and then return a pseudo reg containing the sum.
3998 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3999 it also marks a label as absolutely required (it can't be dead).
4000 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4001 This is used for outputting expressions used in initializers.
4003 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4004 with a constant address even if that address is not normally legitimate.
4005 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4008 expand_expr (exp, target, tmode, modifier)
4011 enum machine_mode tmode;
4012 enum expand_modifier modifier;
4014 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4015 This is static so it will be accessible to our recursive callees. */
4016 static tree placeholder_list = 0;
4017 register rtx op0, op1, temp;
4018 tree type = TREE_TYPE (exp);
4019 int unsignedp = TREE_UNSIGNED (type);
4020 register enum machine_mode mode = TYPE_MODE (type);
4021 register enum tree_code code = TREE_CODE (exp);
4023 /* Use subtarget as the target for operand 0 of a binary operation. */
4024 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4025 rtx original_target = target;
4026 /* Maybe defer this until sure not doing bytecode? */
4027 int ignore = (target == const0_rtx
4028 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4029 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4030 || code == COND_EXPR)
4031 && TREE_CODE (type) == VOID_TYPE));
4035 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4037 bc_expand_expr (exp);
4041 /* Don't use hard regs as subtargets, because the combiner
4042 can only handle pseudo regs. */
4043 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4045 /* Avoid subtargets inside loops,
4046 since they hide some invariant expressions. */
4047 if (preserve_subexpressions_p ())
4050 /* If we are going to ignore this result, we need only do something
4051 if there is a side-effect somewhere in the expression. If there
4052 is, short-circuit the most common cases here. Note that we must
4053 not call expand_expr with anything but const0_rtx in case this
4054 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4058 if (! TREE_SIDE_EFFECTS (exp))
4061 /* Ensure we reference a volatile object even if value is ignored. */
4062 if (TREE_THIS_VOLATILE (exp)
4063 && TREE_CODE (exp) != FUNCTION_DECL
4064 && mode != VOIDmode && mode != BLKmode)
4066 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4067 if (GET_CODE (temp) == MEM)
4068 temp = copy_to_reg (temp);
4072 if (TREE_CODE_CLASS (code) == '1')
4073 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4074 VOIDmode, modifier);
4075 else if (TREE_CODE_CLASS (code) == '2'
4076 || TREE_CODE_CLASS (code) == '<')
4078 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4079 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4082 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4083 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4084 /* If the second operand has no side effects, just evaluate
4086 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4087 VOIDmode, modifier);
4092 /* If will do cse, generate all results into pseudo registers
4093 since 1) that allows cse to find more things
4094 and 2) otherwise cse could produce an insn the machine
4097 if (! cse_not_expected && mode != BLKmode && target
4098 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4105 tree function = decl_function_context (exp);
4106 /* Handle using a label in a containing function. */
4107 if (function != current_function_decl && function != 0)
4109 struct function *p = find_function_data (function);
4110 /* Allocate in the memory associated with the function
4111 that the label is in. */
4112 push_obstacks (p->function_obstack,
4113 p->function_maybepermanent_obstack);
4115 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4116 label_rtx (exp), p->forced_labels);
4119 else if (modifier == EXPAND_INITIALIZER)
4120 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4121 label_rtx (exp), forced_labels);
4122 temp = gen_rtx (MEM, FUNCTION_MODE,
4123 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4124 if (function != current_function_decl && function != 0)
4125 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4130 if (DECL_RTL (exp) == 0)
4132 error_with_decl (exp, "prior parameter's size depends on `%s'");
4133 return CONST0_RTX (mode);
4136 /* ... fall through ... */
4139 /* If a static var's type was incomplete when the decl was written,
4140 but the type is complete now, lay out the decl now. */
4141 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4142 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4144 push_obstacks_nochange ();
4145 end_temporary_allocation ();
4146 layout_decl (exp, 0);
4147 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4151 /* ... fall through ... */
4155 if (DECL_RTL (exp) == 0)
4158 /* Ensure variable marked as used even if it doesn't go through
4159 a parser. If it hasn't be used yet, write out an external
4161 if (! TREE_USED (exp))
4163 assemble_external (exp);
4164 TREE_USED (exp) = 1;
4167 /* Handle variables inherited from containing functions. */
4168 context = decl_function_context (exp);
4170 /* We treat inline_function_decl as an alias for the current function
4171 because that is the inline function whose vars, types, etc.
4172 are being merged into the current function.
4173 See expand_inline_function. */
4175 if (context != 0 && context != current_function_decl
4176 && context != inline_function_decl
4177 /* If var is static, we don't need a static chain to access it. */
4178 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4179 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4183 /* Mark as non-local and addressable. */
4184 DECL_NONLOCAL (exp) = 1;
4185 mark_addressable (exp);
4186 if (GET_CODE (DECL_RTL (exp)) != MEM)
4188 addr = XEXP (DECL_RTL (exp), 0);
4189 if (GET_CODE (addr) == MEM)
4190 addr = gen_rtx (MEM, Pmode,
4191 fix_lexical_addr (XEXP (addr, 0), exp));
4193 addr = fix_lexical_addr (addr, exp);
4194 return change_address (DECL_RTL (exp), mode, addr);
4197 /* This is the case of an array whose size is to be determined
4198 from its initializer, while the initializer is still being parsed.
4201 if (GET_CODE (DECL_RTL (exp)) == MEM
4202 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4203 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4204 XEXP (DECL_RTL (exp), 0));
4206 /* If DECL_RTL is memory, we are in the normal case and either
4207 the address is not valid or it is not a register and -fforce-addr
4208 is specified, get the address into a register. */
4210 if (GET_CODE (DECL_RTL (exp)) == MEM
4211 && modifier != EXPAND_CONST_ADDRESS
4212 && modifier != EXPAND_SUM
4213 && modifier != EXPAND_INITIALIZER
4214 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4216 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4217 return change_address (DECL_RTL (exp), VOIDmode,
4218 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4220 /* If the mode of DECL_RTL does not match that of the decl, it
4221 must be a promoted value. We return a SUBREG of the wanted mode,
4222 but mark it so that we know that it was already extended. */
4224 if (GET_CODE (DECL_RTL (exp)) == REG
4225 && GET_MODE (DECL_RTL (exp)) != mode)
4227 /* Get the signedness used for this variable. Ensure we get the
4228 same mode we got when the variable was declared. */
4229 if (GET_MODE (DECL_RTL (exp))
4230 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4233 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4234 SUBREG_PROMOTED_VAR_P (temp) = 1;
4235 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4239 return DECL_RTL (exp);
4242 return immed_double_const (TREE_INT_CST_LOW (exp),
4243 TREE_INT_CST_HIGH (exp),
4247 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4250 /* If optimized, generate immediate CONST_DOUBLE
4251 which will be turned into memory by reload if necessary.
4253 We used to force a register so that loop.c could see it. But
4254 this does not allow gen_* patterns to perform optimizations with
4255 the constants. It also produces two insns in cases like "x = 1.0;".
4256 On most machines, floating-point constants are not permitted in
4257 many insns, so we'd end up copying it to a register in any case.
4259 Now, we do the copying in expand_binop, if appropriate. */
4260 return immed_real_const (exp);
4264 if (! TREE_CST_RTL (exp))
4265 output_constant_def (exp);
4267 /* TREE_CST_RTL probably contains a constant address.
4268 On RISC machines where a constant address isn't valid,
4269 make some insns to get that address into a register. */
4270 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4271 && modifier != EXPAND_CONST_ADDRESS
4272 && modifier != EXPAND_INITIALIZER
4273 && modifier != EXPAND_SUM
4274 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4276 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4277 return change_address (TREE_CST_RTL (exp), VOIDmode,
4278 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4279 return TREE_CST_RTL (exp);
4282 context = decl_function_context (exp);
4284 /* We treat inline_function_decl as an alias for the current function
4285 because that is the inline function whose vars, types, etc.
4286 are being merged into the current function.
4287 See expand_inline_function. */
4288 if (context == current_function_decl || context == inline_function_decl)
4291 /* If this is non-local, handle it. */
4294 temp = SAVE_EXPR_RTL (exp);
4295 if (temp && GET_CODE (temp) == REG)
4297 put_var_into_stack (exp);
4298 temp = SAVE_EXPR_RTL (exp);
4300 if (temp == 0 || GET_CODE (temp) != MEM)
4302 return change_address (temp, mode,
4303 fix_lexical_addr (XEXP (temp, 0), exp));
4305 if (SAVE_EXPR_RTL (exp) == 0)
4307 if (mode == BLKmode)
4310 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4311 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4313 else if (mode == VOIDmode)
4316 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4318 SAVE_EXPR_RTL (exp) = temp;
4319 if (!optimize && GET_CODE (temp) == REG)
4320 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4323 /* If the mode of TEMP does not match that of the expression, it
4324 must be a promoted value. We pass store_expr a SUBREG of the
4325 wanted mode but mark it so that we know that it was already
4326 extended. Note that `unsignedp' was modified above in
4329 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4331 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4332 SUBREG_PROMOTED_VAR_P (temp) = 1;
4333 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4336 if (temp == const0_rtx)
4337 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4339 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4342 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4343 must be a promoted value. We return a SUBREG of the wanted mode,
4344 but mark it so that we know that it was already extended. */
4346 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4347 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4349 /* Compute the signedness and make the proper SUBREG. */
4350 promote_mode (type, mode, &unsignedp, 0);
4351 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4352 SUBREG_PROMOTED_VAR_P (temp) = 1;
4353 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4357 return SAVE_EXPR_RTL (exp);
4359 case PLACEHOLDER_EXPR:
4360 /* If there is an object on the head of the placeholder list,
4361 see if some object in it's references is of type TYPE. For
4362 further information, see tree.def. */
4363 if (placeholder_list)
4366 tree old_list = placeholder_list;
4368 for (object = TREE_PURPOSE (placeholder_list);
4369 TREE_TYPE (object) != type
4370 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4371 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4372 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4373 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4374 object = TREE_OPERAND (object, 0))
4377 if (object && TREE_TYPE (object) == type)
4379 /* Expand this object skipping the list entries before
4380 it was found in case it is also a PLACEHOLDER_EXPR.
4381 In that case, we want to translate it using subsequent
4383 placeholder_list = TREE_CHAIN (placeholder_list);
4384 temp = expand_expr (object, original_target, tmode, modifier);
4385 placeholder_list = old_list;
4390 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4393 case WITH_RECORD_EXPR:
4394 /* Put the object on the placeholder list, expand our first operand,
4395 and pop the list. */
4396 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4398 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4400 placeholder_list = TREE_CHAIN (placeholder_list);
4404 expand_exit_loop_if_false (NULL_PTR,
4405 invert_truthvalue (TREE_OPERAND (exp, 0)));
4410 expand_start_loop (1);
4411 expand_expr_stmt (TREE_OPERAND (exp, 0));
4419 tree vars = TREE_OPERAND (exp, 0);
4420 int vars_need_expansion = 0;
4422 /* Need to open a binding contour here because
4423 if there are any cleanups they most be contained here. */
4424 expand_start_bindings (0);
4426 /* Mark the corresponding BLOCK for output in its proper place. */
4427 if (TREE_OPERAND (exp, 2) != 0
4428 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4429 insert_block (TREE_OPERAND (exp, 2));
4431 /* If VARS have not yet been expanded, expand them now. */
4434 if (DECL_RTL (vars) == 0)
4436 vars_need_expansion = 1;
4439 expand_decl_init (vars);
4440 vars = TREE_CHAIN (vars);
4443 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4445 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4451 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4453 emit_insns (RTL_EXPR_SEQUENCE (exp));
4454 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4455 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4456 free_temps_for_rtl_expr (exp);
4457 return RTL_EXPR_RTL (exp);
4460 /* If we don't need the result, just ensure we evaluate any
4465 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4466 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4470 /* All elts simple constants => refer to a constant in memory. But
4471 if this is a non-BLKmode mode, let it store a field at a time
4472 since that should make a CONST_INT or CONST_DOUBLE when we
4473 fold. Likewise, if we have a target we can use, it is best to
4474 store directly into the target unless the type is large enough
4475 that memcpy will be used. If we are making an initializer and
4476 all operands are constant, put it in memory as well. */
4477 else if ((TREE_STATIC (exp)
4478 && ((mode == BLKmode
4479 && ! (target != 0 && safe_from_p (target, exp)))
4480 || TREE_ADDRESSABLE (exp)
4481 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4482 && (move_by_pieces_ninsns
4483 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4484 TYPE_ALIGN (type) / BITS_PER_UNIT)
4486 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4488 rtx constructor = output_constant_def (exp);
4489 if (modifier != EXPAND_CONST_ADDRESS
4490 && modifier != EXPAND_INITIALIZER
4491 && modifier != EXPAND_SUM
4492 && (! memory_address_p (GET_MODE (constructor),
4493 XEXP (constructor, 0))
4495 && GET_CODE (XEXP (constructor, 0)) != REG)))
4496 constructor = change_address (constructor, VOIDmode,
4497 XEXP (constructor, 0));
4503 if (target == 0 || ! safe_from_p (target, exp))
4505 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4506 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4510 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4511 if (AGGREGATE_TYPE_P (type))
4512 MEM_IN_STRUCT_P (target) = 1;
4515 store_constructor (exp, target);
4521 tree exp1 = TREE_OPERAND (exp, 0);
4524 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4525 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4526 This code has the same general effect as simply doing
4527 expand_expr on the save expr, except that the expression PTR
4528 is computed for use as a memory address. This means different
4529 code, suitable for indexing, may be generated. */
4530 if (TREE_CODE (exp1) == SAVE_EXPR
4531 && SAVE_EXPR_RTL (exp1) == 0
4532 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4534 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4535 VOIDmode, EXPAND_SUM);
4536 op0 = memory_address (mode, temp);
4537 op0 = copy_all_regs (op0);
4538 SAVE_EXPR_RTL (exp1) = op0;
4542 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4543 op0 = memory_address (mode, op0);
4546 temp = gen_rtx (MEM, mode, op0);
4547 /* If address was computed by addition,
4548 mark this as an element of an aggregate. */
4549 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4550 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4551 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4552 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4553 || (TREE_CODE (exp1) == ADDR_EXPR
4554 && (exp2 = TREE_OPERAND (exp1, 0))
4555 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4556 MEM_IN_STRUCT_P (temp) = 1;
4557 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4559 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4560 here, because, in C and C++, the fact that a location is accessed
4561 through a pointer to const does not mean that the value there can
4562 never change. Languages where it can never change should
4563 also set TREE_STATIC. */
4564 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) | TREE_STATIC (exp);
4569 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4573 tree array = TREE_OPERAND (exp, 0);
4574 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4575 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4576 tree index = TREE_OPERAND (exp, 1);
4577 tree index_type = TREE_TYPE (index);
4580 if (TREE_CODE (low_bound) != INTEGER_CST
4581 && contains_placeholder_p (low_bound))
4582 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4584 /* Optimize the special-case of a zero lower bound.
4586 We convert the low_bound to sizetype to avoid some problems
4587 with constant folding. (E.g. suppose the lower bound is 1,
4588 and its mode is QI. Without the conversion, (ARRAY
4589 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4590 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4592 But sizetype isn't quite right either (especially if
4593 the lowbound is negative). FIXME */
4595 if (! integer_zerop (low_bound))
4596 index = fold (build (MINUS_EXPR, index_type, index,
4597 convert (sizetype, low_bound)));
4599 if ((TREE_CODE (index) != INTEGER_CST
4600 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4601 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4603 /* Nonconstant array index or nonconstant element size, and
4604 not an array in an unaligned (packed) structure field.
4605 Generate the tree for *(&array+index) and expand that,
4606 except do it in a language-independent way
4607 and don't complain about non-lvalue arrays.
4608 `mark_addressable' should already have been called
4609 for any array for which this case will be reached. */
4611 /* Don't forget the const or volatile flag from the array
4613 tree variant_type = build_type_variant (type,
4614 TREE_READONLY (exp),
4615 TREE_THIS_VOLATILE (exp));
4616 tree array_adr = build1 (ADDR_EXPR,
4617 build_pointer_type (variant_type), array);
4619 tree size = size_in_bytes (type);
4621 /* Convert the integer argument to a type the same size as a
4622 pointer so the multiply won't overflow spuriously. */
4623 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4624 index = convert (type_for_size (POINTER_SIZE, 0), index);
4626 if (TREE_CODE (size) != INTEGER_CST
4627 && contains_placeholder_p (size))
4628 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4630 /* Don't think the address has side effects
4631 just because the array does.
4632 (In some cases the address might have side effects,
4633 and we fail to record that fact here. However, it should not
4634 matter, since expand_expr should not care.) */
4635 TREE_SIDE_EFFECTS (array_adr) = 0;
4637 elt = build1 (INDIRECT_REF, type,
4638 fold (build (PLUS_EXPR,
4639 TYPE_POINTER_TO (variant_type),
4641 fold (build (MULT_EXPR,
4642 TYPE_POINTER_TO (variant_type),
4645 /* Volatility, etc., of new expression is same as old
4647 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4648 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4649 TREE_READONLY (elt) = TREE_READONLY (exp);
4651 return expand_expr (elt, target, tmode, modifier);
4654 /* Fold an expression like: "foo"[2].
4655 This is not done in fold so it won't happen inside &.
4656 Don't fold if this is for wide characters since it's too
4657 difficult to do correctly and this is a very rare case. */
4659 if (TREE_CODE (array) == STRING_CST
4660 && TREE_CODE (index) == INTEGER_CST
4661 && !TREE_INT_CST_HIGH (index)
4662 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4663 && GET_MODE_CLASS (mode) == MODE_INT
4664 && GET_MODE_SIZE (mode) == 1)
4665 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4667 /* If this is a constant index into a constant array,
4668 just get the value from the array. Handle both the cases when
4669 we have an explicit constructor and when our operand is a variable
4670 that was declared const. */
4672 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4674 if (TREE_CODE (index) == INTEGER_CST
4675 && TREE_INT_CST_HIGH (index) == 0)
4677 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4679 i = TREE_INT_CST_LOW (index);
4681 elem = TREE_CHAIN (elem);
4683 return expand_expr (fold (TREE_VALUE (elem)), target,
4688 else if (optimize >= 1
4689 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4690 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4691 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4693 if (TREE_CODE (index) == INTEGER_CST
4694 && TREE_INT_CST_HIGH (index) == 0)
4696 tree init = DECL_INITIAL (array);
4698 i = TREE_INT_CST_LOW (index);
4699 if (TREE_CODE (init) == CONSTRUCTOR)
4701 tree elem = CONSTRUCTOR_ELTS (init);
4704 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4705 elem = TREE_CHAIN (elem);
4707 return expand_expr (fold (TREE_VALUE (elem)), target,
4710 else if (TREE_CODE (init) == STRING_CST
4711 && i < TREE_STRING_LENGTH (init))
4712 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4717 /* Treat array-ref with constant index as a component-ref. */
4721 /* If the operand is a CONSTRUCTOR, we can just extract the
4722 appropriate field if it is present. Don't do this if we have
4723 already written the data since we want to refer to that copy
4724 and varasm.c assumes that's what we'll do. */
4725 if (code != ARRAY_REF
4726 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4727 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4731 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4732 elt = TREE_CHAIN (elt))
4733 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4734 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4738 enum machine_mode mode1;
4743 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4744 &mode1, &unsignedp, &volatilep);
4747 /* If we got back the original object, something is wrong. Perhaps
4748 we are evaluating an expression too early. In any event, don't
4749 infinitely recurse. */
4753 /* In some cases, we will be offsetting OP0's address by a constant.
4754 So get it as a sum, if possible. If we will be using it
4755 directly in an insn, we validate it. */
4756 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4758 /* If this is a constant, put it into a register if it is a
4759 legitimate constant and memory if it isn't. */
4760 if (CONSTANT_P (op0))
4762 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4763 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4764 op0 = force_reg (mode, op0);
4766 op0 = validize_mem (force_const_mem (mode, op0));
4769 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4772 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4774 if (GET_CODE (op0) != MEM)
4776 op0 = change_address (op0, VOIDmode,
4777 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4778 force_reg (ptr_mode, offset_rtx)));
4779 /* If we have a variable offset, the known alignment
4780 is only that of the innermost structure containing the field.
4781 (Actually, we could sometimes do better by using the
4782 size of an element of the innermost array, but no need.) */
4783 if (TREE_CODE (exp) == COMPONENT_REF
4784 || TREE_CODE (exp) == BIT_FIELD_REF)
4785 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4789 /* Don't forget about volatility even if this is a bitfield. */
4790 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4792 op0 = copy_rtx (op0);
4793 MEM_VOLATILE_P (op0) = 1;
4796 /* In cases where an aligned union has an unaligned object
4797 as a field, we might be extracting a BLKmode value from
4798 an integer-mode (e.g., SImode) object. Handle this case
4799 by doing the extract into an object as wide as the field
4800 (which we know to be the width of a basic mode), then
4801 storing into memory, and changing the mode to BLKmode. */
4802 if (mode1 == VOIDmode
4803 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4804 || (modifier != EXPAND_CONST_ADDRESS
4805 && modifier != EXPAND_SUM
4806 && modifier != EXPAND_INITIALIZER
4807 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4808 /* If the field isn't aligned enough to fetch as a memref,
4809 fetch it as a bit field. */
4810 || (SLOW_UNALIGNED_ACCESS
4811 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4812 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4814 enum machine_mode ext_mode = mode;
4816 if (ext_mode == BLKmode)
4817 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4819 if (ext_mode == BLKmode)
4822 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4823 unsignedp, target, ext_mode, ext_mode,
4825 int_size_in_bytes (TREE_TYPE (tem)));
4826 if (mode == BLKmode)
4828 rtx new = assign_stack_temp (ext_mode,
4829 bitsize / BITS_PER_UNIT, 0);
4831 emit_move_insn (new, op0);
4832 op0 = copy_rtx (new);
4833 PUT_MODE (op0, BLKmode);
4834 MEM_IN_STRUCT_P (op0) = 1;
4840 /* Get a reference to just this component. */
4841 if (modifier == EXPAND_CONST_ADDRESS
4842 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4843 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4844 (bitpos / BITS_PER_UNIT)));
4846 op0 = change_address (op0, mode1,
4847 plus_constant (XEXP (op0, 0),
4848 (bitpos / BITS_PER_UNIT)));
4849 MEM_IN_STRUCT_P (op0) = 1;
4850 MEM_VOLATILE_P (op0) |= volatilep;
4851 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4854 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4855 convert_move (target, op0, unsignedp);
4861 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4862 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4863 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4864 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4865 MEM_IN_STRUCT_P (temp) = 1;
4866 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4867 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4868 a location is accessed through a pointer to const does not mean
4869 that the value there can never change. */
4870 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4875 /* Intended for a reference to a buffer of a file-object in Pascal.
4876 But it's not certain that a special tree code will really be
4877 necessary for these. INDIRECT_REF might work for them. */
4883 /* Pascal set IN expression.
4886 rlo = set_low - (set_low%bits_per_word);
4887 the_word = set [ (index - rlo)/bits_per_word ];
4888 bit_index = index % bits_per_word;
4889 bitmask = 1 << bit_index;
4890 return !!(the_word & bitmask); */
4892 tree set = TREE_OPERAND (exp, 0);
4893 tree index = TREE_OPERAND (exp, 1);
4894 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4895 tree set_type = TREE_TYPE (set);
4896 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4897 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4898 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4899 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4900 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4901 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4902 rtx setaddr = XEXP (setval, 0);
4903 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4905 rtx diff, quo, rem, addr, bit, result;
4907 preexpand_calls (exp);
4909 /* If domain is empty, answer is no. Likewise if index is constant
4910 and out of bounds. */
4911 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4912 && TREE_CODE (set_low_bound) == INTEGER_CST
4913 && tree_int_cst_lt (set_high_bound, set_low_bound)
4914 || (TREE_CODE (index) == INTEGER_CST
4915 && TREE_CODE (set_low_bound) == INTEGER_CST
4916 && tree_int_cst_lt (index, set_low_bound))
4917 || (TREE_CODE (set_high_bound) == INTEGER_CST
4918 && TREE_CODE (index) == INTEGER_CST
4919 && tree_int_cst_lt (set_high_bound, index))))
4923 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4925 /* If we get here, we have to generate the code for both cases
4926 (in range and out of range). */
4928 op0 = gen_label_rtx ();
4929 op1 = gen_label_rtx ();
4931 if (! (GET_CODE (index_val) == CONST_INT
4932 && GET_CODE (lo_r) == CONST_INT))
4934 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4935 GET_MODE (index_val), iunsignedp, 0);
4936 emit_jump_insn (gen_blt (op1));
4939 if (! (GET_CODE (index_val) == CONST_INT
4940 && GET_CODE (hi_r) == CONST_INT))
4942 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4943 GET_MODE (index_val), iunsignedp, 0);
4944 emit_jump_insn (gen_bgt (op1));
4947 /* Calculate the element number of bit zero in the first word
4949 if (GET_CODE (lo_r) == CONST_INT)
4950 rlow = GEN_INT (INTVAL (lo_r)
4951 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4953 rlow = expand_binop (index_mode, and_optab, lo_r,
4954 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4955 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4957 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4958 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4960 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4961 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4962 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4963 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4965 addr = memory_address (byte_mode,
4966 expand_binop (index_mode, add_optab, diff,
4967 setaddr, NULL_RTX, iunsignedp,
4970 /* Extract the bit we want to examine */
4971 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4972 gen_rtx (MEM, byte_mode, addr),
4973 make_tree (TREE_TYPE (index), rem),
4975 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4976 GET_MODE (target) == byte_mode ? target : 0,
4977 1, OPTAB_LIB_WIDEN);
4979 if (result != target)
4980 convert_move (target, result, 1);
4982 /* Output the code to handle the out-of-range case. */
4985 emit_move_insn (target, const0_rtx);
4990 case WITH_CLEANUP_EXPR:
4991 if (RTL_EXPR_RTL (exp) == 0)
4994 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4996 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4997 /* That's it for this cleanup. */
4998 TREE_OPERAND (exp, 2) = 0;
4999 (*interim_eh_hook) (NULL_TREE);
5001 return RTL_EXPR_RTL (exp);
5003 case CLEANUP_POINT_EXPR:
5005 extern int temp_slot_level;
5006 tree old_cleanups = cleanups_this_call;
5007 int old_temp_level = target_temp_slot_level;
5009 target_temp_slot_level = temp_slot_level;
5010 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5011 /* If we're going to use this value, load it up now. */
5013 op0 = force_not_mem (op0);
5014 expand_cleanups_to (old_cleanups);
5015 preserve_temp_slots (op0);
5018 target_temp_slot_level = old_temp_level;
5023 /* Check for a built-in function. */
5024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5025 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5027 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5028 return expand_builtin (exp, target, subtarget, tmode, ignore);
5030 /* If this call was expanded already by preexpand_calls,
5031 just return the result we got. */
5032 if (CALL_EXPR_RTL (exp) != 0)
5033 return CALL_EXPR_RTL (exp);
5035 return expand_call (exp, target, ignore);
5037 case NON_LVALUE_EXPR:
5040 case REFERENCE_EXPR:
5041 if (TREE_CODE (type) == UNION_TYPE)
5043 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5046 if (mode == BLKmode)
5048 if (TYPE_SIZE (type) == 0
5049 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5051 target = assign_stack_temp (BLKmode,
5052 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5053 + BITS_PER_UNIT - 1)
5054 / BITS_PER_UNIT, 0);
5055 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5058 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5061 if (GET_CODE (target) == MEM)
5062 /* Store data into beginning of memory target. */
5063 store_expr (TREE_OPERAND (exp, 0),
5064 change_address (target, TYPE_MODE (valtype), 0), 0);
5066 else if (GET_CODE (target) == REG)
5067 /* Store this field into a union of the proper type. */
5068 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5069 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5071 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5075 /* Return the entire union. */
5079 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5081 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5084 /* If the signedness of the conversion differs and OP0 is
5085 a promoted SUBREG, clear that indication since we now
5086 have to do the proper extension. */
5087 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5088 && GET_CODE (op0) == SUBREG)
5089 SUBREG_PROMOTED_VAR_P (op0) = 0;
5094 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5095 if (GET_MODE (op0) == mode)
5098 /* If OP0 is a constant, just convert it into the proper mode. */
5099 if (CONSTANT_P (op0))
5101 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5102 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5104 if (modifier == EXPAND_INITIALIZER)
5105 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5107 if (flag_force_mem && GET_CODE (op0) == MEM)
5108 op0 = copy_to_reg (op0);
5112 convert_to_mode (mode, op0,
5113 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5115 convert_move (target, op0,
5116 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5120 /* We come here from MINUS_EXPR when the second operand is a constant. */
5122 this_optab = add_optab;
5124 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5125 something else, make sure we add the register to the constant and
5126 then to the other thing. This case can occur during strength
5127 reduction and doing it this way will produce better code if the
5128 frame pointer or argument pointer is eliminated.
5130 fold-const.c will ensure that the constant is always in the inner
5131 PLUS_EXPR, so the only case we need to do anything about is if
5132 sp, ap, or fp is our second argument, in which case we must swap
5133 the innermost first argument and our second argument. */
5135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5136 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5137 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5138 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5139 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5140 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5142 tree t = TREE_OPERAND (exp, 1);
5144 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5145 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5148 /* If the result is to be ptr_mode and we are adding an integer to
5149 something, we might be forming a constant. So try to use
5150 plus_constant. If it produces a sum and we can't accept it,
5151 use force_operand. This allows P = &ARR[const] to generate
5152 efficient code on machines where a SYMBOL_REF is not a valid
5155 If this is an EXPAND_SUM call, always return the sum. */
5156 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5157 || mode == ptr_mode)
5159 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5160 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5161 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5163 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5165 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5166 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5167 op1 = force_operand (op1, target);
5171 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5172 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5173 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5175 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5177 if (! CONSTANT_P (op0))
5179 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5180 VOIDmode, modifier);
5181 /* Don't go to both_summands if modifier
5182 says it's not right to return a PLUS. */
5183 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5187 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5188 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5189 op0 = force_operand (op0, target);
5194 /* No sense saving up arithmetic to be done
5195 if it's all in the wrong mode to form part of an address.
5196 And force_operand won't know whether to sign-extend or
5198 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5199 || mode != ptr_mode)
5202 preexpand_calls (exp);
5203 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5206 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5207 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5210 /* Make sure any term that's a sum with a constant comes last. */
5211 if (GET_CODE (op0) == PLUS
5212 && CONSTANT_P (XEXP (op0, 1)))
5218 /* If adding to a sum including a constant,
5219 associate it to put the constant outside. */
5220 if (GET_CODE (op1) == PLUS
5221 && CONSTANT_P (XEXP (op1, 1)))
5223 rtx constant_term = const0_rtx;
5225 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5228 /* Ensure that MULT comes first if there is one. */
5229 else if (GET_CODE (op0) == MULT)
5230 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5232 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5234 /* Let's also eliminate constants from op0 if possible. */
5235 op0 = eliminate_constant_term (op0, &constant_term);
5237 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5238 their sum should be a constant. Form it into OP1, since the
5239 result we want will then be OP0 + OP1. */
5241 temp = simplify_binary_operation (PLUS, mode, constant_term,
5246 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5249 /* Put a constant term last and put a multiplication first. */
5250 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5251 temp = op1, op1 = op0, op0 = temp;
5253 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5254 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5257 /* For initializers, we are allowed to return a MINUS of two
5258 symbolic constants. Here we handle all cases when both operands
5260 /* Handle difference of two symbolic constants,
5261 for the sake of an initializer. */
5262 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5263 && really_constant_p (TREE_OPERAND (exp, 0))
5264 && really_constant_p (TREE_OPERAND (exp, 1)))
5266 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5267 VOIDmode, modifier);
5268 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5269 VOIDmode, modifier);
5271 /* If the last operand is a CONST_INT, use plus_constant of
5272 the negated constant. Else make the MINUS. */
5273 if (GET_CODE (op1) == CONST_INT)
5274 return plus_constant (op0, - INTVAL (op1));
5276 return gen_rtx (MINUS, mode, op0, op1);
5278 /* Convert A - const to A + (-const). */
5279 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5281 tree negated = fold (build1 (NEGATE_EXPR, type,
5282 TREE_OPERAND (exp, 1)));
5284 /* Deal with the case where we can't negate the constant
5286 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5288 tree newtype = signed_type (type);
5289 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5290 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5291 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5293 if (! TREE_OVERFLOW (newneg))
5294 return expand_expr (convert (type,
5295 build (PLUS_EXPR, newtype,
5297 target, tmode, modifier);
5301 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5305 this_optab = sub_optab;
5309 preexpand_calls (exp);
5310 /* If first operand is constant, swap them.
5311 Thus the following special case checks need only
5312 check the second operand. */
5313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5315 register tree t1 = TREE_OPERAND (exp, 0);
5316 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5317 TREE_OPERAND (exp, 1) = t1;
5320 /* Attempt to return something suitable for generating an
5321 indexed address, for machines that support that. */
5323 if (modifier == EXPAND_SUM && mode == ptr_mode
5324 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5325 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5329 /* Apply distributive law if OP0 is x+c. */
5330 if (GET_CODE (op0) == PLUS
5331 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5332 return gen_rtx (PLUS, mode,
5333 gen_rtx (MULT, mode, XEXP (op0, 0),
5334 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5336 * INTVAL (XEXP (op0, 1))));
5338 if (GET_CODE (op0) != REG)
5339 op0 = force_operand (op0, NULL_RTX);
5340 if (GET_CODE (op0) != REG)
5341 op0 = copy_to_mode_reg (mode, op0);
5343 return gen_rtx (MULT, mode, op0,
5344 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5347 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5350 /* Check for multiplying things that have been extended
5351 from a narrower type. If this machine supports multiplying
5352 in that narrower type with a result in the desired type,
5353 do it that way, and avoid the explicit type-conversion. */
5354 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5355 && TREE_CODE (type) == INTEGER_TYPE
5356 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5357 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5358 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5359 && int_fits_type_p (TREE_OPERAND (exp, 1),
5360 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5361 /* Don't use a widening multiply if a shift will do. */
5362 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5363 > HOST_BITS_PER_WIDE_INT)
5364 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5366 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5367 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5369 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5370 /* If both operands are extended, they must either both
5371 be zero-extended or both be sign-extended. */
5372 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5374 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5376 enum machine_mode innermode
5377 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5378 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5379 ? umul_widen_optab : smul_widen_optab);
5380 if (mode == GET_MODE_WIDER_MODE (innermode)
5381 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5383 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5384 NULL_RTX, VOIDmode, 0);
5385 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5386 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5389 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5390 NULL_RTX, VOIDmode, 0);
5394 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5395 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5396 return expand_mult (mode, op0, op1, target, unsignedp);
5398 case TRUNC_DIV_EXPR:
5399 case FLOOR_DIV_EXPR:
5401 case ROUND_DIV_EXPR:
5402 case EXACT_DIV_EXPR:
5403 preexpand_calls (exp);
5404 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5406 /* Possible optimization: compute the dividend with EXPAND_SUM
5407 then if the divisor is constant can optimize the case
5408 where some terms of the dividend have coeffs divisible by it. */
5409 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5410 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5411 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5414 this_optab = flodiv_optab;
5417 case TRUNC_MOD_EXPR:
5418 case FLOOR_MOD_EXPR:
5420 case ROUND_MOD_EXPR:
5421 preexpand_calls (exp);
5422 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5424 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5425 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5426 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5428 case FIX_ROUND_EXPR:
5429 case FIX_FLOOR_EXPR:
5431 abort (); /* Not used for C. */
5433 case FIX_TRUNC_EXPR:
5434 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5436 target = gen_reg_rtx (mode);
5437 expand_fix (target, op0, unsignedp);
5441 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5443 target = gen_reg_rtx (mode);
5444 /* expand_float can't figure out what to do if FROM has VOIDmode.
5445 So give it the correct mode. With -O, cse will optimize this. */
5446 if (GET_MODE (op0) == VOIDmode)
5447 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5449 expand_float (target, op0,
5450 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5454 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5455 temp = expand_unop (mode, neg_optab, op0, target, 0);
5461 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5463 /* Handle complex values specially. */
5464 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5465 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5466 return expand_complex_abs (mode, op0, target, unsignedp);
5468 /* Unsigned abs is simply the operand. Testing here means we don't
5469 risk generating incorrect code below. */
5470 if (TREE_UNSIGNED (type))
5473 return expand_abs (mode, op0, target, unsignedp,
5474 safe_from_p (target, TREE_OPERAND (exp, 0)));
5478 target = original_target;
5479 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5480 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5481 || GET_MODE (target) != mode
5482 || (GET_CODE (target) == REG
5483 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5484 target = gen_reg_rtx (mode);
5485 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5486 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5488 /* First try to do it with a special MIN or MAX instruction.
5489 If that does not win, use a conditional jump to select the proper
5491 this_optab = (TREE_UNSIGNED (type)
5492 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5493 : (code == MIN_EXPR ? smin_optab : smax_optab));
5495 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5500 /* At this point, a MEM target is no longer useful; we will get better
5503 if (GET_CODE (target) == MEM)
5504 target = gen_reg_rtx (mode);
5507 emit_move_insn (target, op0);
5509 op0 = gen_label_rtx ();
5511 /* If this mode is an integer too wide to compare properly,
5512 compare word by word. Rely on cse to optimize constant cases. */
5513 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5515 if (code == MAX_EXPR)
5516 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5517 target, op1, NULL_RTX, op0);
5519 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5520 op1, target, NULL_RTX, op0);
5521 emit_move_insn (target, op1);
5525 if (code == MAX_EXPR)
5526 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5527 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5528 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5530 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5531 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5532 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5533 if (temp == const0_rtx)
5534 emit_move_insn (target, op1);
5535 else if (temp != const_true_rtx)
5537 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5538 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5541 emit_move_insn (target, op1);
5548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5549 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5555 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5556 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5561 /* ??? Can optimize bitwise operations with one arg constant.
5562 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5563 and (a bitwise1 b) bitwise2 b (etc)
5564 but that is probably not worth while. */
5566 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5567 boolean values when we want in all cases to compute both of them. In
5568 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5569 as actual zero-or-1 values and then bitwise anding. In cases where
5570 there cannot be any side effects, better code would be made by
5571 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5572 how to recognize those cases. */
5574 case TRUTH_AND_EXPR:
5576 this_optab = and_optab;
5581 this_optab = ior_optab;
5584 case TRUTH_XOR_EXPR:
5586 this_optab = xor_optab;
5593 preexpand_calls (exp);
5594 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5596 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5597 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5600 /* Could determine the answer when only additive constants differ. Also,
5601 the addition of one can be handled by changing the condition. */
5608 preexpand_calls (exp);
5609 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5613 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5614 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5616 && GET_CODE (original_target) == REG
5617 && (GET_MODE (original_target)
5618 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5620 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5623 if (temp != original_target)
5624 temp = copy_to_reg (temp);
5626 op1 = gen_label_rtx ();
5627 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5628 GET_MODE (temp), unsignedp, 0);
5629 emit_jump_insn (gen_beq (op1));
5630 emit_move_insn (temp, const1_rtx);
5635 /* If no set-flag instruction, must generate a conditional
5636 store into a temporary variable. Drop through
5637 and handle this like && and ||. */
5639 case TRUTH_ANDIF_EXPR:
5640 case TRUTH_ORIF_EXPR:
5642 && (target == 0 || ! safe_from_p (target, exp)
5643 /* Make sure we don't have a hard reg (such as function's return
5644 value) live across basic blocks, if not optimizing. */
5645 || (!optimize && GET_CODE (target) == REG
5646 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5647 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5650 emit_clr_insn (target);
5652 op1 = gen_label_rtx ();
5653 jumpifnot (exp, op1);
5656 emit_0_to_1_insn (target);
5659 return ignore ? const0_rtx : target;
5661 case TRUTH_NOT_EXPR:
5662 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5663 /* The parser is careful to generate TRUTH_NOT_EXPR
5664 only with operands that are always zero or one. */
5665 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5666 target, 1, OPTAB_LIB_WIDEN);
5672 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5674 return expand_expr (TREE_OPERAND (exp, 1),
5675 (ignore ? const0_rtx : target),
5680 rtx flag = NULL_RTX;
5681 tree left_cleanups = NULL_TREE;
5682 tree right_cleanups = NULL_TREE;
5684 /* Used to save a pointer to the place to put the setting of
5685 the flag that indicates if this side of the conditional was
5686 taken. We backpatch the code, if we find out later that we
5687 have any conditional cleanups that need to be performed. */
5688 rtx dest_right_flag = NULL_RTX;
5689 rtx dest_left_flag = NULL_RTX;
5691 /* Note that COND_EXPRs whose type is a structure or union
5692 are required to be constructed to contain assignments of
5693 a temporary variable, so that we can evaluate them here
5694 for side effect only. If type is void, we must do likewise. */
5696 /* If an arm of the branch requires a cleanup,
5697 only that cleanup is performed. */
5700 tree binary_op = 0, unary_op = 0;
5701 tree old_cleanups = cleanups_this_call;
5703 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5704 convert it to our mode, if necessary. */
5705 if (integer_onep (TREE_OPERAND (exp, 1))
5706 && integer_zerop (TREE_OPERAND (exp, 2))
5707 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5711 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5716 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5717 if (GET_MODE (op0) == mode)
5721 target = gen_reg_rtx (mode);
5722 convert_move (target, op0, unsignedp);
5726 /* If we are not to produce a result, we have no target. Otherwise,
5727 if a target was specified use it; it will not be used as an
5728 intermediate target unless it is safe. If no target, use a
5733 else if (original_target
5734 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5735 && GET_MODE (original_target) == mode
5736 && ! (GET_CODE (original_target) == MEM
5737 && MEM_VOLATILE_P (original_target)))
5738 temp = original_target;
5739 else if (mode == BLKmode)
5741 if (TYPE_SIZE (type) == 0
5742 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5745 temp = assign_stack_temp (BLKmode,
5746 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5747 + BITS_PER_UNIT - 1)
5748 / BITS_PER_UNIT, 0);
5749 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5752 temp = gen_reg_rtx (mode);
5754 /* Check for X ? A + B : A. If we have this, we can copy
5755 A to the output and conditionally add B. Similarly for unary
5756 operations. Don't do this if X has side-effects because
5757 those side effects might affect A or B and the "?" operation is
5758 a sequence point in ANSI. (We test for side effects later.) */
5760 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5761 && operand_equal_p (TREE_OPERAND (exp, 2),
5762 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5763 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5764 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5765 && operand_equal_p (TREE_OPERAND (exp, 1),
5766 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5767 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5768 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5769 && operand_equal_p (TREE_OPERAND (exp, 2),
5770 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5771 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5772 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5773 && operand_equal_p (TREE_OPERAND (exp, 1),
5774 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5775 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5777 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5778 operation, do this as A + (X != 0). Similarly for other simple
5779 binary operators. */
5780 if (temp && singleton && binary_op
5781 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5782 && (TREE_CODE (binary_op) == PLUS_EXPR
5783 || TREE_CODE (binary_op) == MINUS_EXPR
5784 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5785 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5786 && integer_onep (TREE_OPERAND (binary_op, 1))
5787 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5790 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5791 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5792 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5795 /* If we had X ? A : A + 1, do this as A + (X == 0).
5797 We have to invert the truth value here and then put it
5798 back later if do_store_flag fails. We cannot simply copy
5799 TREE_OPERAND (exp, 0) to another variable and modify that
5800 because invert_truthvalue can modify the tree pointed to
5802 if (singleton == TREE_OPERAND (exp, 1))
5803 TREE_OPERAND (exp, 0)
5804 = invert_truthvalue (TREE_OPERAND (exp, 0));
5806 result = do_store_flag (TREE_OPERAND (exp, 0),
5807 (safe_from_p (temp, singleton)
5809 mode, BRANCH_COST <= 1);
5813 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5814 return expand_binop (mode, boptab, op1, result, temp,
5815 unsignedp, OPTAB_LIB_WIDEN);
5817 else if (singleton == TREE_OPERAND (exp, 1))
5818 TREE_OPERAND (exp, 0)
5819 = invert_truthvalue (TREE_OPERAND (exp, 0));
5823 op0 = gen_label_rtx ();
5825 flag = gen_reg_rtx (word_mode);
5826 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5830 /* If the target conflicts with the other operand of the
5831 binary op, we can't use it. Also, we can't use the target
5832 if it is a hard register, because evaluating the condition
5833 might clobber it. */
5835 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5836 || (GET_CODE (temp) == REG
5837 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5838 temp = gen_reg_rtx (mode);
5839 store_expr (singleton, temp, 0);
5842 expand_expr (singleton,
5843 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5844 dest_left_flag = get_last_insn ();
5845 if (singleton == TREE_OPERAND (exp, 1))
5846 jumpif (TREE_OPERAND (exp, 0), op0);
5848 jumpifnot (TREE_OPERAND (exp, 0), op0);
5850 /* Allows cleanups up to here. */
5851 old_cleanups = cleanups_this_call;
5852 if (binary_op && temp == 0)
5853 /* Just touch the other operand. */
5854 expand_expr (TREE_OPERAND (binary_op, 1),
5855 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5857 store_expr (build (TREE_CODE (binary_op), type,
5858 make_tree (type, temp),
5859 TREE_OPERAND (binary_op, 1)),
5862 store_expr (build1 (TREE_CODE (unary_op), type,
5863 make_tree (type, temp)),
5866 dest_right_flag = get_last_insn ();
5869 /* This is now done in jump.c and is better done there because it
5870 produces shorter register lifetimes. */
5872 /* Check for both possibilities either constants or variables
5873 in registers (but not the same as the target!). If so, can
5874 save branches by assigning one, branching, and assigning the
5876 else if (temp && GET_MODE (temp) != BLKmode
5877 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5878 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5879 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5880 && DECL_RTL (TREE_OPERAND (exp, 1))
5881 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5882 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5883 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5884 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5885 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5886 && DECL_RTL (TREE_OPERAND (exp, 2))
5887 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5888 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5890 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5891 temp = gen_reg_rtx (mode);
5892 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5893 dest_left_flag = get_last_insn ();
5894 jumpifnot (TREE_OPERAND (exp, 0), op0);
5896 /* Allows cleanups up to here. */
5897 old_cleanups = cleanups_this_call;
5898 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5900 dest_right_flag = get_last_insn ();
5903 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5904 comparison operator. If we have one of these cases, set the
5905 output to A, branch on A (cse will merge these two references),
5906 then set the output to FOO. */
5908 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5909 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5910 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5911 TREE_OPERAND (exp, 1), 0)
5912 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5913 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5915 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5916 temp = gen_reg_rtx (mode);
5917 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5918 dest_left_flag = get_last_insn ();
5919 jumpif (TREE_OPERAND (exp, 0), op0);
5921 /* Allows cleanups up to here. */
5922 old_cleanups = cleanups_this_call;
5923 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5925 dest_right_flag = get_last_insn ();
5928 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5929 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5930 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5931 TREE_OPERAND (exp, 2), 0)
5932 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5933 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5935 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5936 temp = gen_reg_rtx (mode);
5937 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5938 dest_left_flag = get_last_insn ();
5939 jumpifnot (TREE_OPERAND (exp, 0), op0);
5941 /* Allows cleanups up to here. */
5942 old_cleanups = cleanups_this_call;
5943 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5945 dest_right_flag = get_last_insn ();
5949 op1 = gen_label_rtx ();
5950 jumpifnot (TREE_OPERAND (exp, 0), op0);
5952 /* Allows cleanups up to here. */
5953 old_cleanups = cleanups_this_call;
5955 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5957 expand_expr (TREE_OPERAND (exp, 1),
5958 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5959 dest_left_flag = get_last_insn ();
5961 /* Handle conditional cleanups, if any. */
5962 left_cleanups = defer_cleanups_to (old_cleanups);
5965 emit_jump_insn (gen_jump (op1));
5969 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5971 expand_expr (TREE_OPERAND (exp, 2),
5972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5973 dest_right_flag = get_last_insn ();
5976 /* Handle conditional cleanups, if any. */
5977 right_cleanups = defer_cleanups_to (old_cleanups);
5983 /* Add back in, any conditional cleanups. */
5984 if (left_cleanups || right_cleanups)
5990 /* Now that we know that a flag is needed, go back and add in the
5991 setting of the flag. */
5993 /* Do the left side flag. */
5994 last = get_last_insn ();
5995 /* Flag left cleanups as needed. */
5996 emit_move_insn (flag, const1_rtx);
5997 /* ??? deprecated, use sequences instead. */
5998 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6000 /* Do the right side flag. */
6001 last = get_last_insn ();
6002 /* Flag left cleanups as needed. */
6003 emit_move_insn (flag, const0_rtx);
6004 /* ??? deprecated, use sequences instead. */
6005 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6007 /* convert flag, which is an rtx, into a tree. */
6008 cond = make_node (RTL_EXPR);
6009 TREE_TYPE (cond) = integer_type_node;
6010 RTL_EXPR_RTL (cond) = flag;
6011 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6013 if (! left_cleanups)
6014 left_cleanups = integer_zero_node;
6015 if (! right_cleanups)
6016 right_cleanups = integer_zero_node;
6017 new_cleanups = build (COND_EXPR, void_type_node,
6018 truthvalue_conversion (cond),
6019 left_cleanups, right_cleanups);
6020 new_cleanups = fold (new_cleanups);
6022 /* Now add in the conditionalized cleanups. */
6024 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6025 (*interim_eh_hook) (NULL_TREE);
6032 int need_exception_region = 0;
6033 /* Something needs to be initialized, but we didn't know
6034 where that thing was when building the tree. For example,
6035 it could be the return value of a function, or a parameter
6036 to a function which lays down in the stack, or a temporary
6037 variable which must be passed by reference.
6039 We guarantee that the expression will either be constructed
6040 or copied into our original target. */
6042 tree slot = TREE_OPERAND (exp, 0);
6046 if (TREE_CODE (slot) != VAR_DECL)
6051 if (DECL_RTL (slot) != 0)
6053 target = DECL_RTL (slot);
6054 /* If we have already expanded the slot, so don't do
6056 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6061 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6062 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6063 /* All temp slots at this level must not conflict. */
6064 preserve_temp_slots (target);
6065 DECL_RTL (slot) = target;
6067 /* Since SLOT is not known to the called function
6068 to belong to its stack frame, we must build an explicit
6069 cleanup. This case occurs when we must build up a reference
6070 to pass the reference as an argument. In this case,
6071 it is very likely that such a reference need not be
6074 if (TREE_OPERAND (exp, 2) == 0)
6075 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6076 if (TREE_OPERAND (exp, 2))
6078 cleanups_this_call = tree_cons (NULL_TREE,
6079 TREE_OPERAND (exp, 2),
6080 cleanups_this_call);
6081 need_exception_region = 1;
6087 /* This case does occur, when expanding a parameter which
6088 needs to be constructed on the stack. The target
6089 is the actual stack address that we want to initialize.
6090 The function we call will perform the cleanup in this case. */
6092 /* If we have already assigned it space, use that space,
6093 not target that we were passed in, as our target
6094 parameter is only a hint. */
6095 if (DECL_RTL (slot) != 0)
6097 target = DECL_RTL (slot);
6098 /* If we have already expanded the slot, so don't do
6100 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6104 DECL_RTL (slot) = target;
6107 exp1 = TREE_OPERAND (exp, 1);
6108 /* Mark it as expanded. */
6109 TREE_OPERAND (exp, 1) = NULL_TREE;
6111 temp = expand_expr (exp1, target, tmode, modifier);
6113 if (need_exception_region)
6114 (*interim_eh_hook) (NULL_TREE);
6121 tree lhs = TREE_OPERAND (exp, 0);
6122 tree rhs = TREE_OPERAND (exp, 1);
6123 tree noncopied_parts = 0;
6124 tree lhs_type = TREE_TYPE (lhs);
6126 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6127 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6128 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6129 TYPE_NONCOPIED_PARTS (lhs_type));
6130 while (noncopied_parts != 0)
6132 expand_assignment (TREE_VALUE (noncopied_parts),
6133 TREE_PURPOSE (noncopied_parts), 0, 0);
6134 noncopied_parts = TREE_CHAIN (noncopied_parts);
6141 /* If lhs is complex, expand calls in rhs before computing it.
6142 That's so we don't compute a pointer and save it over a call.
6143 If lhs is simple, compute it first so we can give it as a
6144 target if the rhs is just a call. This avoids an extra temp and copy
6145 and that prevents a partial-subsumption which makes bad code.
6146 Actually we could treat component_ref's of vars like vars. */
6148 tree lhs = TREE_OPERAND (exp, 0);
6149 tree rhs = TREE_OPERAND (exp, 1);
6150 tree noncopied_parts = 0;
6151 tree lhs_type = TREE_TYPE (lhs);
6155 if (TREE_CODE (lhs) != VAR_DECL
6156 && TREE_CODE (lhs) != RESULT_DECL
6157 && TREE_CODE (lhs) != PARM_DECL)
6158 preexpand_calls (exp);
6160 /* Check for |= or &= of a bitfield of size one into another bitfield
6161 of size 1. In this case, (unless we need the result of the
6162 assignment) we can do this more efficiently with a
6163 test followed by an assignment, if necessary.
6165 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6166 things change so we do, this code should be enhanced to
6169 && TREE_CODE (lhs) == COMPONENT_REF
6170 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6171 || TREE_CODE (rhs) == BIT_AND_EXPR)
6172 && TREE_OPERAND (rhs, 0) == lhs
6173 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6174 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6175 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6177 rtx label = gen_label_rtx ();
6179 do_jump (TREE_OPERAND (rhs, 1),
6180 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6181 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6182 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6183 (TREE_CODE (rhs) == BIT_IOR_EXPR
6185 : integer_zero_node)),
6187 do_pending_stack_adjust ();
6192 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6193 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6194 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6195 TYPE_NONCOPIED_PARTS (lhs_type));
6197 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6198 while (noncopied_parts != 0)
6200 expand_assignment (TREE_PURPOSE (noncopied_parts),
6201 TREE_VALUE (noncopied_parts), 0, 0);
6202 noncopied_parts = TREE_CHAIN (noncopied_parts);
6207 case PREINCREMENT_EXPR:
6208 case PREDECREMENT_EXPR:
6209 return expand_increment (exp, 0);
6211 case POSTINCREMENT_EXPR:
6212 case POSTDECREMENT_EXPR:
6213 /* Faster to treat as pre-increment if result is not used. */
6214 return expand_increment (exp, ! ignore);
6217 /* If nonzero, TEMP will be set to the address of something that might
6218 be a MEM corresponding to a stack slot. */
6221 /* Are we taking the address of a nested function? */
6222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6223 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6225 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6226 op0 = force_operand (op0, target);
6228 /* If we are taking the address of something erroneous, just
6230 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6234 /* We make sure to pass const0_rtx down if we came in with
6235 ignore set, to avoid doing the cleanups twice for something. */
6236 op0 = expand_expr (TREE_OPERAND (exp, 0),
6237 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6238 (modifier == EXPAND_INITIALIZER
6239 ? modifier : EXPAND_CONST_ADDRESS));
6241 /* If we are going to ignore the result, OP0 will have been set
6242 to const0_rtx, so just return it. Don't get confused and
6243 think we are taking the address of the constant. */
6247 /* We would like the object in memory. If it is a constant,
6248 we can have it be statically allocated into memory. For
6249 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6250 memory and store the value into it. */
6252 if (CONSTANT_P (op0))
6253 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6255 else if (GET_CODE (op0) == MEM)
6257 mark_temp_addr_taken (op0);
6258 temp = XEXP (op0, 0);
6261 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6262 || GET_CODE (op0) == CONCAT)
6264 /* If this object is in a register, it must be not
6266 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6267 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6269 = assign_stack_temp (inner_mode,
6270 int_size_in_bytes (inner_type), 1);
6271 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6273 mark_temp_addr_taken (memloc);
6274 emit_move_insn (memloc, op0);
6278 if (GET_CODE (op0) != MEM)
6281 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6283 temp = XEXP (op0, 0);
6284 #ifdef POINTERS_EXTEND_UNSIGNED
6285 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6286 && mode == ptr_mode)
6287 temp = convert_memory_address (ptr_mode, temp);
6292 op0 = force_operand (XEXP (op0, 0), target);
6295 if (flag_force_addr && GET_CODE (op0) != REG)
6296 op0 = force_reg (Pmode, op0);
6298 if (GET_CODE (op0) == REG)
6299 mark_reg_pointer (op0);
6301 /* If we might have had a temp slot, add an equivalent address
6304 update_temp_slot_address (temp, op0);
6306 #ifdef POINTERS_EXTEND_UNSIGNED
6307 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6308 && mode == ptr_mode)
6309 op0 = convert_memory_address (ptr_mode, op0);
6314 case ENTRY_VALUE_EXPR:
6317 /* COMPLEX type for Extended Pascal & Fortran */
6320 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6323 /* Get the rtx code of the operands. */
6324 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6325 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6328 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6332 /* Move the real (op0) and imaginary (op1) parts to their location. */
6333 emit_move_insn (gen_realpart (mode, target), op0);
6334 emit_move_insn (gen_imagpart (mode, target), op1);
6336 insns = get_insns ();
6339 /* Complex construction should appear as a single unit. */
6340 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6341 each with a separate pseudo as destination.
6342 It's not correct for flow to treat them as a unit. */
6343 if (GET_CODE (target) != CONCAT)
6344 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6353 return gen_realpart (mode, op0);
6356 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6357 return gen_imagpart (mode, op0);
6361 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6365 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6368 target = gen_reg_rtx (mode);
6372 /* Store the realpart and the negated imagpart to target. */
6373 emit_move_insn (gen_realpart (partmode, target),
6374 gen_realpart (partmode, op0));
6376 imag_t = gen_imagpart (partmode, target);
6377 temp = expand_unop (partmode, neg_optab,
6378 gen_imagpart (partmode, op0), imag_t, 0);
6380 emit_move_insn (imag_t, temp);
6382 insns = get_insns ();
6385 /* Conjugate should appear as a single unit
6386 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6387 each with a separate pseudo as destination.
6388 It's not correct for flow to treat them as a unit. */
6389 if (GET_CODE (target) != CONCAT)
6390 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6398 op0 = CONST0_RTX (tmode);
6404 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6407 /* Here to do an ordinary binary operator, generating an instruction
6408 from the optab already placed in `this_optab'. */
6410 preexpand_calls (exp);
6411 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6413 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6414 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6416 temp = expand_binop (mode, this_optab, op0, op1, target,
6417 unsignedp, OPTAB_LIB_WIDEN);
6424 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6426 bc_expand_expr (exp)
6429 enum tree_code code;
6432 struct binary_operator *binoptab;
6433 struct unary_operator *unoptab;
6434 struct increment_operator *incroptab;
6435 struct bc_label *lab, *lab1;
6436 enum bytecode_opcode opcode;
6439 code = TREE_CODE (exp);
6445 if (DECL_RTL (exp) == 0)
6447 error_with_decl (exp, "prior parameter's size depends on `%s'");
6451 bc_load_parmaddr (DECL_RTL (exp));
6452 bc_load_memory (TREE_TYPE (exp), exp);
6458 if (DECL_RTL (exp) == 0)
6462 if (BYTECODE_LABEL (DECL_RTL (exp)))
6463 bc_load_externaddr (DECL_RTL (exp));
6465 bc_load_localaddr (DECL_RTL (exp));
6467 if (TREE_PUBLIC (exp))
6468 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6469 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6471 bc_load_localaddr (DECL_RTL (exp));
6473 bc_load_memory (TREE_TYPE (exp), exp);
6478 #ifdef DEBUG_PRINT_CODE
6479 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6481 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6483 : TYPE_MODE (TREE_TYPE (exp)))],
6484 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6490 #ifdef DEBUG_PRINT_CODE
6491 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6493 /* FIX THIS: find a better way to pass real_cst's. -bson */
6494 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6495 (double) TREE_REAL_CST (exp));
6504 /* We build a call description vector describing the type of
6505 the return value and of the arguments; this call vector,
6506 together with a pointer to a location for the return value
6507 and the base of the argument list, is passed to the low
6508 level machine dependent call subroutine, which is responsible
6509 for putting the arguments wherever real functions expect
6510 them, as well as getting the return value back. */
6512 tree calldesc = 0, arg;
6516 /* Push the evaluated args on the evaluation stack in reverse
6517 order. Also make an entry for each arg in the calldesc
6518 vector while we're at it. */
6520 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6522 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6525 bc_expand_expr (TREE_VALUE (arg));
6527 calldesc = tree_cons ((tree) 0,
6528 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6530 calldesc = tree_cons ((tree) 0,
6531 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6535 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6537 /* Allocate a location for the return value and push its
6538 address on the evaluation stack. Also make an entry
6539 at the front of the calldesc for the return value type. */
6541 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6542 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6543 bc_load_localaddr (retval);
6545 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6546 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6548 /* Prepend the argument count. */
6549 calldesc = tree_cons ((tree) 0,
6550 build_int_2 (nargs, 0),
6553 /* Push the address of the call description vector on the stack. */
6554 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6555 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6556 build_index_type (build_int_2 (nargs * 2, 0)));
6557 r = output_constant_def (calldesc);
6558 bc_load_externaddr (r);
6560 /* Push the address of the function to be called. */
6561 bc_expand_expr (TREE_OPERAND (exp, 0));
6563 /* Call the function, popping its address and the calldesc vector
6564 address off the evaluation stack in the process. */
6565 bc_emit_instruction (call);
6567 /* Pop the arguments off the stack. */
6568 bc_adjust_stack (nargs);
6570 /* Load the return value onto the stack. */
6571 bc_load_localaddr (retval);
6572 bc_load_memory (type, TREE_OPERAND (exp, 0));
6578 if (!SAVE_EXPR_RTL (exp))
6580 /* First time around: copy to local variable */
6581 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6582 TYPE_ALIGN (TREE_TYPE(exp)));
6583 bc_expand_expr (TREE_OPERAND (exp, 0));
6584 bc_emit_instruction (duplicate);
6586 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6587 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6591 /* Consecutive reference: use saved copy */
6592 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6593 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6598 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6599 how are they handled instead? */
6602 TREE_USED (exp) = 1;
6603 bc_expand_expr (STMT_BODY (exp));
6610 bc_expand_expr (TREE_OPERAND (exp, 0));
6611 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6616 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6621 bc_expand_address (TREE_OPERAND (exp, 0));
6626 bc_expand_expr (TREE_OPERAND (exp, 0));
6627 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6632 bc_expand_expr (bc_canonicalize_array_ref (exp));
6637 bc_expand_component_address (exp);
6639 /* If we have a bitfield, generate a proper load */
6640 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6645 bc_expand_expr (TREE_OPERAND (exp, 0));
6646 bc_emit_instruction (drop);
6647 bc_expand_expr (TREE_OPERAND (exp, 1));
6652 bc_expand_expr (TREE_OPERAND (exp, 0));
6653 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6654 lab = bc_get_bytecode_label ();
6655 bc_emit_bytecode (xjumpifnot);
6656 bc_emit_bytecode_labelref (lab);
6658 #ifdef DEBUG_PRINT_CODE
6659 fputc ('\n', stderr);
6661 bc_expand_expr (TREE_OPERAND (exp, 1));
6662 lab1 = bc_get_bytecode_label ();
6663 bc_emit_bytecode (jump);
6664 bc_emit_bytecode_labelref (lab1);
6666 #ifdef DEBUG_PRINT_CODE
6667 fputc ('\n', stderr);
6670 bc_emit_bytecode_labeldef (lab);
6671 bc_expand_expr (TREE_OPERAND (exp, 2));
6672 bc_emit_bytecode_labeldef (lab1);
6675 case TRUTH_ANDIF_EXPR:
6677 opcode = xjumpifnot;
6680 case TRUTH_ORIF_EXPR:
6687 binoptab = optab_plus_expr;
6692 binoptab = optab_minus_expr;
6697 binoptab = optab_mult_expr;
6700 case TRUNC_DIV_EXPR:
6701 case FLOOR_DIV_EXPR:
6703 case ROUND_DIV_EXPR:
6704 case EXACT_DIV_EXPR:
6706 binoptab = optab_trunc_div_expr;
6709 case TRUNC_MOD_EXPR:
6710 case FLOOR_MOD_EXPR:
6712 case ROUND_MOD_EXPR:
6714 binoptab = optab_trunc_mod_expr;
6717 case FIX_ROUND_EXPR:
6718 case FIX_FLOOR_EXPR:
6720 abort (); /* Not used for C. */
6722 case FIX_TRUNC_EXPR:
6729 abort (); /* FIXME */
6733 binoptab = optab_rdiv_expr;
6738 binoptab = optab_bit_and_expr;
6743 binoptab = optab_bit_ior_expr;
6748 binoptab = optab_bit_xor_expr;
6753 binoptab = optab_lshift_expr;
6758 binoptab = optab_rshift_expr;
6761 case TRUTH_AND_EXPR:
6763 binoptab = optab_truth_and_expr;
6768 binoptab = optab_truth_or_expr;
6773 binoptab = optab_lt_expr;
6778 binoptab = optab_le_expr;
6783 binoptab = optab_ge_expr;
6788 binoptab = optab_gt_expr;
6793 binoptab = optab_eq_expr;
6798 binoptab = optab_ne_expr;
6803 unoptab = optab_negate_expr;
6808 unoptab = optab_bit_not_expr;
6811 case TRUTH_NOT_EXPR:
6813 unoptab = optab_truth_not_expr;
6816 case PREDECREMENT_EXPR:
6818 incroptab = optab_predecrement_expr;
6821 case PREINCREMENT_EXPR:
6823 incroptab = optab_preincrement_expr;
6826 case POSTDECREMENT_EXPR:
6828 incroptab = optab_postdecrement_expr;
6831 case POSTINCREMENT_EXPR:
6833 incroptab = optab_postincrement_expr;
6838 bc_expand_constructor (exp);
6848 tree vars = TREE_OPERAND (exp, 0);
6849 int vars_need_expansion = 0;
6851 /* Need to open a binding contour here because
6852 if there are any cleanups they most be contained here. */
6853 expand_start_bindings (0);
6855 /* Mark the corresponding BLOCK for output. */
6856 if (TREE_OPERAND (exp, 2) != 0)
6857 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6859 /* If VARS have not yet been expanded, expand them now. */
6862 if (DECL_RTL (vars) == 0)
6864 vars_need_expansion = 1;
6867 expand_decl_init (vars);
6868 vars = TREE_CHAIN (vars);
6871 bc_expand_expr (TREE_OPERAND (exp, 1));
6873 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6883 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6884 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6890 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6896 bc_expand_expr (TREE_OPERAND (exp, 0));
6897 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6898 lab = bc_get_bytecode_label ();
6900 bc_emit_instruction (duplicate);
6901 bc_emit_bytecode (opcode);
6902 bc_emit_bytecode_labelref (lab);
6904 #ifdef DEBUG_PRINT_CODE
6905 fputc ('\n', stderr);
6908 bc_emit_instruction (drop);
6910 bc_expand_expr (TREE_OPERAND (exp, 1));
6911 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6912 bc_emit_bytecode_labeldef (lab);
6918 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6920 /* Push the quantum. */
6921 bc_expand_expr (TREE_OPERAND (exp, 1));
6923 /* Convert it to the lvalue's type. */
6924 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6926 /* Push the address of the lvalue */
6927 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6929 /* Perform actual increment */
6930 bc_expand_increment (incroptab, type);
6934 /* Return the alignment in bits of EXP, a pointer valued expression.
6935 But don't return more than MAX_ALIGN no matter what.
6936 The alignment returned is, by default, the alignment of the thing that
6937 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6939 Otherwise, look at the expression to see if we can do better, i.e., if the
6940 expression is actually pointing at an object whose alignment is tighter. */
6943 get_pointer_alignment (exp, max_align)
6947 unsigned align, inner;
6949 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6952 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6953 align = MIN (align, max_align);
6957 switch (TREE_CODE (exp))
6961 case NON_LVALUE_EXPR:
6962 exp = TREE_OPERAND (exp, 0);
6963 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6965 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6966 align = MIN (inner, max_align);
6970 /* If sum of pointer + int, restrict our maximum alignment to that
6971 imposed by the integer. If not, we can't do any better than
6973 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6976 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6981 exp = TREE_OPERAND (exp, 0);
6985 /* See what we are pointing at and look at its alignment. */
6986 exp = TREE_OPERAND (exp, 0);
6987 if (TREE_CODE (exp) == FUNCTION_DECL)
6988 align = FUNCTION_BOUNDARY;
6989 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6990 align = DECL_ALIGN (exp);
6991 #ifdef CONSTANT_ALIGNMENT
6992 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6993 align = CONSTANT_ALIGNMENT (exp, align);
6995 return MIN (align, max_align);
7003 /* Return the tree node and offset if a given argument corresponds to
7004 a string constant. */
7007 string_constant (arg, ptr_offset)
7013 if (TREE_CODE (arg) == ADDR_EXPR
7014 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7016 *ptr_offset = integer_zero_node;
7017 return TREE_OPERAND (arg, 0);
7019 else if (TREE_CODE (arg) == PLUS_EXPR)
7021 tree arg0 = TREE_OPERAND (arg, 0);
7022 tree arg1 = TREE_OPERAND (arg, 1);
7027 if (TREE_CODE (arg0) == ADDR_EXPR
7028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7031 return TREE_OPERAND (arg0, 0);
7033 else if (TREE_CODE (arg1) == ADDR_EXPR
7034 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7037 return TREE_OPERAND (arg1, 0);
7044 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7045 way, because it could contain a zero byte in the middle.
7046 TREE_STRING_LENGTH is the size of the character array, not the string.
7048 Unfortunately, string_constant can't access the values of const char
7049 arrays with initializers, so neither can we do so here. */
7059 src = string_constant (src, &offset_node);
7062 max = TREE_STRING_LENGTH (src);
7063 ptr = TREE_STRING_POINTER (src);
7064 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7066 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7067 compute the offset to the following null if we don't know where to
7068 start searching for it. */
7070 for (i = 0; i < max; i++)
7073 /* We don't know the starting offset, but we do know that the string
7074 has no internal zero bytes. We can assume that the offset falls
7075 within the bounds of the string; otherwise, the programmer deserves
7076 what he gets. Subtract the offset from the length of the string,
7078 /* This would perhaps not be valid if we were dealing with named
7079 arrays in addition to literal string constants. */
7080 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7083 /* We have a known offset into the string. Start searching there for
7084 a null character. */
7085 if (offset_node == 0)
7089 /* Did we get a long long offset? If so, punt. */
7090 if (TREE_INT_CST_HIGH (offset_node) != 0)
7092 offset = TREE_INT_CST_LOW (offset_node);
7094 /* If the offset is known to be out of bounds, warn, and call strlen at
7096 if (offset < 0 || offset > max)
7098 warning ("offset outside bounds of constant string");
7101 /* Use strlen to search for the first zero byte. Since any strings
7102 constructed with build_string will have nulls appended, we win even
7103 if we get handed something like (char[4])"abcd".
7105 Since OFFSET is our starting index into the string, no further
7106 calculation is needed. */
7107 return size_int (strlen (ptr + offset));
7110 /* Expand an expression EXP that calls a built-in function,
7111 with result going to TARGET if that's convenient
7112 (and in mode MODE if that's convenient).
7113 SUBTARGET may be used as the target for computing one of EXP's operands.
7114 IGNORE is nonzero if the value is to be ignored. */
7116 #define CALLED_AS_BUILT_IN(NODE) \
7117 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7120 expand_builtin (exp, target, subtarget, mode, ignore)
7124 enum machine_mode mode;
7127 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7128 tree arglist = TREE_OPERAND (exp, 1);
7131 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7132 optab builtin_optab;
7134 switch (DECL_FUNCTION_CODE (fndecl))
7139 /* build_function_call changes these into ABS_EXPR. */
7144 /* Treat these like sqrt, but only if the user asks for them. */
7145 if (! flag_fast_math)
7147 case BUILT_IN_FSQRT:
7148 /* If not optimizing, call the library function. */
7153 /* Arg could be wrong type if user redeclared this fcn wrong. */
7154 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7157 /* Stabilize and compute the argument. */
7158 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7159 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7161 exp = copy_node (exp);
7162 arglist = copy_node (arglist);
7163 TREE_OPERAND (exp, 1) = arglist;
7164 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7166 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7168 /* Make a suitable register to place result in. */
7169 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7174 switch (DECL_FUNCTION_CODE (fndecl))
7177 builtin_optab = sin_optab; break;
7179 builtin_optab = cos_optab; break;
7180 case BUILT_IN_FSQRT:
7181 builtin_optab = sqrt_optab; break;
7186 /* Compute into TARGET.
7187 Set TARGET to wherever the result comes back. */
7188 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7189 builtin_optab, op0, target, 0);
7191 /* If we were unable to expand via the builtin, stop the
7192 sequence (without outputting the insns) and break, causing
7193 a call the the library function. */
7200 /* Check the results by default. But if flag_fast_math is turned on,
7201 then assume sqrt will always be called with valid arguments. */
7203 if (! flag_fast_math)
7205 /* Don't define the builtin FP instructions
7206 if your machine is not IEEE. */
7207 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7210 lab1 = gen_label_rtx ();
7212 /* Test the result; if it is NaN, set errno=EDOM because
7213 the argument was not in the domain. */
7214 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7215 emit_jump_insn (gen_beq (lab1));
7219 #ifdef GEN_ERRNO_RTX
7220 rtx errno_rtx = GEN_ERRNO_RTX;
7223 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7226 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7229 /* We can't set errno=EDOM directly; let the library call do it.
7230 Pop the arguments right away in case the call gets deleted. */
7232 expand_call (exp, target, 0);
7239 /* Output the entire sequence. */
7240 insns = get_insns ();
7246 /* __builtin_apply_args returns block of memory allocated on
7247 the stack into which is stored the arg pointer, structure
7248 value address, static chain, and all the registers that might
7249 possibly be used in performing a function call. The code is
7250 moved to the start of the function so the incoming values are
7252 case BUILT_IN_APPLY_ARGS:
7253 /* Don't do __builtin_apply_args more than once in a function.
7254 Save the result of the first call and reuse it. */
7255 if (apply_args_value != 0)
7256 return apply_args_value;
7258 /* When this function is called, it means that registers must be
7259 saved on entry to this function. So we migrate the
7260 call to the first insn of this function. */
7265 temp = expand_builtin_apply_args ();
7269 apply_args_value = temp;
7271 /* Put the sequence after the NOTE that starts the function.
7272 If this is inside a SEQUENCE, make the outer-level insn
7273 chain current, so the code is placed at the start of the
7275 push_topmost_sequence ();
7276 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7277 pop_topmost_sequence ();
7281 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7282 FUNCTION with a copy of the parameters described by
7283 ARGUMENTS, and ARGSIZE. It returns a block of memory
7284 allocated on the stack into which is stored all the registers
7285 that might possibly be used for returning the result of a
7286 function. ARGUMENTS is the value returned by
7287 __builtin_apply_args. ARGSIZE is the number of bytes of
7288 arguments that must be copied. ??? How should this value be
7289 computed? We'll also need a safe worst case value for varargs
7291 case BUILT_IN_APPLY:
7293 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7294 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7295 || TREE_CHAIN (arglist) == 0
7296 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7297 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7298 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7306 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7307 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7309 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7312 /* __builtin_return (RESULT) causes the function to return the
7313 value described by RESULT. RESULT is address of the block of
7314 memory returned by __builtin_apply. */
7315 case BUILT_IN_RETURN:
7317 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7318 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7319 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7320 NULL_RTX, VOIDmode, 0));
7323 case BUILT_IN_SAVEREGS:
7324 /* Don't do __builtin_saveregs more than once in a function.
7325 Save the result of the first call and reuse it. */
7326 if (saveregs_value != 0)
7327 return saveregs_value;
7329 /* When this function is called, it means that registers must be
7330 saved on entry to this function. So we migrate the
7331 call to the first insn of this function. */
7335 /* Now really call the function. `expand_call' does not call
7336 expand_builtin, so there is no danger of infinite recursion here. */
7339 #ifdef EXPAND_BUILTIN_SAVEREGS
7340 /* Do whatever the machine needs done in this case. */
7341 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7343 /* The register where the function returns its value
7344 is likely to have something else in it, such as an argument.
7345 So preserve that register around the call. */
7347 if (value_mode != VOIDmode)
7349 rtx valreg = hard_libcall_value (value_mode);
7350 rtx saved_valreg = gen_reg_rtx (value_mode);
7352 emit_move_insn (saved_valreg, valreg);
7353 temp = expand_call (exp, target, ignore);
7354 emit_move_insn (valreg, saved_valreg);
7357 /* Generate the call, putting the value in a pseudo. */
7358 temp = expand_call (exp, target, ignore);
7364 saveregs_value = temp;
7366 /* Put the sequence after the NOTE that starts the function.
7367 If this is inside a SEQUENCE, make the outer-level insn
7368 chain current, so the code is placed at the start of the
7370 push_topmost_sequence ();
7371 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7372 pop_topmost_sequence ();
7376 /* __builtin_args_info (N) returns word N of the arg space info
7377 for the current function. The number and meanings of words
7378 is controlled by the definition of CUMULATIVE_ARGS. */
7379 case BUILT_IN_ARGS_INFO:
7381 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7383 int *word_ptr = (int *) ¤t_function_args_info;
7384 tree type, elts, result;
7386 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7387 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7388 __FILE__, __LINE__);
7392 tree arg = TREE_VALUE (arglist);
7393 if (TREE_CODE (arg) != INTEGER_CST)
7394 error ("argument of `__builtin_args_info' must be constant");
7397 int wordnum = TREE_INT_CST_LOW (arg);
7399 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7400 error ("argument of `__builtin_args_info' out of range");
7402 return GEN_INT (word_ptr[wordnum]);
7406 error ("missing argument in `__builtin_args_info'");
7411 for (i = 0; i < nwords; i++)
7412 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7414 type = build_array_type (integer_type_node,
7415 build_index_type (build_int_2 (nwords, 0)));
7416 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7417 TREE_CONSTANT (result) = 1;
7418 TREE_STATIC (result) = 1;
7419 result = build (INDIRECT_REF, build_pointer_type (type), result);
7420 TREE_CONSTANT (result) = 1;
7421 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7425 /* Return the address of the first anonymous stack arg. */
7426 case BUILT_IN_NEXT_ARG:
7428 tree fntype = TREE_TYPE (current_function_decl);
7430 if ((TYPE_ARG_TYPES (fntype) == 0
7431 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7433 && ! current_function_varargs)
7435 error ("`va_start' used in function with fixed args");
7441 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7442 tree arg = TREE_VALUE (arglist);
7444 /* Strip off all nops for the sake of the comparison. This
7445 is not quite the same as STRIP_NOPS. It does more. */
7446 while (TREE_CODE (arg) == NOP_EXPR
7447 || TREE_CODE (arg) == CONVERT_EXPR
7448 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7449 arg = TREE_OPERAND (arg, 0);
7450 if (arg != last_parm)
7451 warning ("second parameter of `va_start' not last named argument");
7453 else if (! current_function_varargs)
7454 /* Evidently an out of date version of <stdarg.h>; can't validate
7455 va_start's second argument, but can still work as intended. */
7456 warning ("`__builtin_next_arg' called without an argument");
7459 return expand_binop (Pmode, add_optab,
7460 current_function_internal_arg_pointer,
7461 current_function_arg_offset_rtx,
7462 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7464 case BUILT_IN_CLASSIFY_TYPE:
7467 tree type = TREE_TYPE (TREE_VALUE (arglist));
7468 enum tree_code code = TREE_CODE (type);
7469 if (code == VOID_TYPE)
7470 return GEN_INT (void_type_class);
7471 if (code == INTEGER_TYPE)
7472 return GEN_INT (integer_type_class);
7473 if (code == CHAR_TYPE)
7474 return GEN_INT (char_type_class);
7475 if (code == ENUMERAL_TYPE)
7476 return GEN_INT (enumeral_type_class);
7477 if (code == BOOLEAN_TYPE)
7478 return GEN_INT (boolean_type_class);
7479 if (code == POINTER_TYPE)
7480 return GEN_INT (pointer_type_class);
7481 if (code == REFERENCE_TYPE)
7482 return GEN_INT (reference_type_class);
7483 if (code == OFFSET_TYPE)
7484 return GEN_INT (offset_type_class);
7485 if (code == REAL_TYPE)
7486 return GEN_INT (real_type_class);
7487 if (code == COMPLEX_TYPE)
7488 return GEN_INT (complex_type_class);
7489 if (code == FUNCTION_TYPE)
7490 return GEN_INT (function_type_class);
7491 if (code == METHOD_TYPE)
7492 return GEN_INT (method_type_class);
7493 if (code == RECORD_TYPE)
7494 return GEN_INT (record_type_class);
7495 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7496 return GEN_INT (union_type_class);
7497 if (code == ARRAY_TYPE)
7499 if (TYPE_STRING_FLAG (type))
7500 return GEN_INT (string_type_class);
7502 return GEN_INT (array_type_class);
7504 if (code == SET_TYPE)
7505 return GEN_INT (set_type_class);
7506 if (code == FILE_TYPE)
7507 return GEN_INT (file_type_class);
7508 if (code == LANG_TYPE)
7509 return GEN_INT (lang_type_class);
7511 return GEN_INT (no_type_class);
7513 case BUILT_IN_CONSTANT_P:
7518 tree arg = TREE_VALUE (arglist);
7521 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7522 || (TREE_CODE (arg) == ADDR_EXPR
7523 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7524 ? const1_rtx : const0_rtx);
7527 case BUILT_IN_FRAME_ADDRESS:
7528 /* The argument must be a nonnegative integer constant.
7529 It counts the number of frames to scan up the stack.
7530 The value is the address of that frame. */
7531 case BUILT_IN_RETURN_ADDRESS:
7532 /* The argument must be a nonnegative integer constant.
7533 It counts the number of frames to scan up the stack.
7534 The value is the return address saved in that frame. */
7536 /* Warning about missing arg was already issued. */
7538 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7540 error ("invalid arg to `__builtin_return_address'");
7543 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7545 error ("invalid arg to `__builtin_return_address'");
7550 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7551 rtx tem = frame_pointer_rtx;
7554 /* Some machines need special handling before we can access arbitrary
7555 frames. For example, on the sparc, we must first flush all
7556 register windows to the stack. */
7557 #ifdef SETUP_FRAME_ADDRESSES
7558 SETUP_FRAME_ADDRESSES ();
7561 /* On the sparc, the return address is not in the frame, it is
7562 in a register. There is no way to access it off of the current
7563 frame pointer, but it can be accessed off the previous frame
7564 pointer by reading the value from the register window save
7566 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7567 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7571 /* Scan back COUNT frames to the specified frame. */
7572 for (i = 0; i < count; i++)
7574 /* Assume the dynamic chain pointer is in the word that
7575 the frame address points to, unless otherwise specified. */
7576 #ifdef DYNAMIC_CHAIN_ADDRESS
7577 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7579 tem = memory_address (Pmode, tem);
7580 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7583 /* For __builtin_frame_address, return what we've got. */
7584 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7587 /* For __builtin_return_address,
7588 Get the return address from that frame. */
7589 #ifdef RETURN_ADDR_RTX
7590 return RETURN_ADDR_RTX (count, tem);
7592 tem = memory_address (Pmode,
7593 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7594 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7598 case BUILT_IN_ALLOCA:
7600 /* Arg could be non-integer if user redeclared this fcn wrong. */
7601 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7604 /* Compute the argument. */
7605 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7607 /* Allocate the desired space. */
7608 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7611 /* If not optimizing, call the library function. */
7612 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7616 /* Arg could be non-integer if user redeclared this fcn wrong. */
7617 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7620 /* Compute the argument. */
7621 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7622 /* Compute ffs, into TARGET if possible.
7623 Set TARGET to wherever the result comes back. */
7624 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7625 ffs_optab, op0, target, 1);
7630 case BUILT_IN_STRLEN:
7631 /* If not optimizing, call the library function. */
7632 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7636 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7637 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7641 tree src = TREE_VALUE (arglist);
7642 tree len = c_strlen (src);
7645 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7647 rtx result, src_rtx, char_rtx;
7648 enum machine_mode insn_mode = value_mode, char_mode;
7649 enum insn_code icode;
7651 /* If the length is known, just return it. */
7653 return expand_expr (len, target, mode, 0);
7655 /* If SRC is not a pointer type, don't do this operation inline. */
7659 /* Call a function if we can't compute strlen in the right mode. */
7661 while (insn_mode != VOIDmode)
7663 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7664 if (icode != CODE_FOR_nothing)
7667 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7669 if (insn_mode == VOIDmode)
7672 /* Make a place to write the result of the instruction. */
7675 && GET_CODE (result) == REG
7676 && GET_MODE (result) == insn_mode
7677 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7678 result = gen_reg_rtx (insn_mode);
7680 /* Make sure the operands are acceptable to the predicates. */
7682 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7683 result = gen_reg_rtx (insn_mode);
7685 src_rtx = memory_address (BLKmode,
7686 expand_expr (src, NULL_RTX, ptr_mode,
7688 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7689 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7691 char_rtx = const0_rtx;
7692 char_mode = insn_operand_mode[(int)icode][2];
7693 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7694 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7696 emit_insn (GEN_FCN (icode) (result,
7697 gen_rtx (MEM, BLKmode, src_rtx),
7698 char_rtx, GEN_INT (align)));
7700 /* Return the value in the proper mode for this function. */
7701 if (GET_MODE (result) == value_mode)
7703 else if (target != 0)
7705 convert_move (target, result, 0);
7709 return convert_to_mode (value_mode, result, 0);
7712 case BUILT_IN_STRCPY:
7713 /* If not optimizing, call the library function. */
7714 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7718 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7719 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7720 || TREE_CHAIN (arglist) == 0
7721 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7725 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7730 len = size_binop (PLUS_EXPR, len, integer_one_node);
7732 chainon (arglist, build_tree_list (NULL_TREE, len));
7736 case BUILT_IN_MEMCPY:
7737 /* If not optimizing, call the library function. */
7738 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7742 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7743 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7744 || TREE_CHAIN (arglist) == 0
7745 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7746 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7747 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7751 tree dest = TREE_VALUE (arglist);
7752 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7753 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7757 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7759 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7760 rtx dest_rtx, dest_mem, src_mem;
7762 /* If either SRC or DEST is not a pointer type, don't do
7763 this operation in-line. */
7764 if (src_align == 0 || dest_align == 0)
7766 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7767 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7771 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7772 dest_mem = gen_rtx (MEM, BLKmode,
7773 memory_address (BLKmode, dest_rtx));
7774 /* There could be a void* cast on top of the object. */
7775 if (TREE_CODE (dest) == NOP_EXPR)
7776 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (dest, 0)));
7778 type = TREE_TYPE (TREE_TYPE (dest));
7779 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7780 src_mem = gen_rtx (MEM, BLKmode,
7781 memory_address (BLKmode,
7782 expand_expr (src, NULL_RTX,
7785 /* There could be a void* cast on top of the object. */
7786 if (TREE_CODE (src) == NOP_EXPR)
7787 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (src, 0)));
7789 type = TREE_TYPE (TREE_TYPE (src));
7790 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7792 /* Copy word part most expediently. */
7793 emit_block_move (dest_mem, src_mem,
7794 expand_expr (len, NULL_RTX, VOIDmode, 0),
7795 MIN (src_align, dest_align));
7796 return force_operand (dest_rtx, NULL_RTX);
7799 /* These comparison functions need an instruction that returns an actual
7800 index. An ordinary compare that just sets the condition codes
7802 #ifdef HAVE_cmpstrsi
7803 case BUILT_IN_STRCMP:
7804 /* If not optimizing, call the library function. */
7805 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7809 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7810 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7811 || TREE_CHAIN (arglist) == 0
7812 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7814 else if (!HAVE_cmpstrsi)
7817 tree arg1 = TREE_VALUE (arglist);
7818 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7822 len = c_strlen (arg1);
7824 len = size_binop (PLUS_EXPR, integer_one_node, len);
7825 len2 = c_strlen (arg2);
7827 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7829 /* If we don't have a constant length for the first, use the length
7830 of the second, if we know it. We don't require a constant for
7831 this case; some cost analysis could be done if both are available
7832 but neither is constant. For now, assume they're equally cheap.
7834 If both strings have constant lengths, use the smaller. This
7835 could arise if optimization results in strcpy being called with
7836 two fixed strings, or if the code was machine-generated. We should
7837 add some code to the `memcmp' handler below to deal with such
7838 situations, someday. */
7839 if (!len || TREE_CODE (len) != INTEGER_CST)
7846 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7848 if (tree_int_cst_lt (len2, len))
7852 chainon (arglist, build_tree_list (NULL_TREE, len));
7856 case BUILT_IN_MEMCMP:
7857 /* If not optimizing, call the library function. */
7858 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7862 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7863 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7864 || TREE_CHAIN (arglist) == 0
7865 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7866 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7867 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7869 else if (!HAVE_cmpstrsi)
7872 tree arg1 = TREE_VALUE (arglist);
7873 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7874 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7878 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7880 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7881 enum machine_mode insn_mode
7882 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7884 /* If we don't have POINTER_TYPE, call the function. */
7885 if (arg1_align == 0 || arg2_align == 0)
7887 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7888 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7892 /* Make a place to write the result of the instruction. */
7895 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7896 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7897 result = gen_reg_rtx (insn_mode);
7899 emit_insn (gen_cmpstrsi (result,
7900 gen_rtx (MEM, BLKmode,
7901 expand_expr (arg1, NULL_RTX,
7904 gen_rtx (MEM, BLKmode,
7905 expand_expr (arg2, NULL_RTX,
7908 expand_expr (len, NULL_RTX, VOIDmode, 0),
7909 GEN_INT (MIN (arg1_align, arg2_align))));
7911 /* Return the value in the proper mode for this function. */
7912 mode = TYPE_MODE (TREE_TYPE (exp));
7913 if (GET_MODE (result) == mode)
7915 else if (target != 0)
7917 convert_move (target, result, 0);
7921 return convert_to_mode (mode, result, 0);
7924 case BUILT_IN_STRCMP:
7925 case BUILT_IN_MEMCMP:
7929 default: /* just do library call, if unknown builtin */
7930 error ("built-in function `%s' not currently supported",
7931 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7934 /* The switch statement above can drop through to cause the function
7935 to be called normally. */
7937 return expand_call (exp, target, ignore);
7940 /* Built-in functions to perform an untyped call and return. */
7942 /* For each register that may be used for calling a function, this
7943 gives a mode used to copy the register's value. VOIDmode indicates
7944 the register is not used for calling a function. If the machine
7945 has register windows, this gives only the outbound registers.
7946 INCOMING_REGNO gives the corresponding inbound register. */
7947 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7949 /* For each register that may be used for returning values, this gives
7950 a mode used to copy the register's value. VOIDmode indicates the
7951 register is not used for returning values. If the machine has
7952 register windows, this gives only the outbound registers.
7953 INCOMING_REGNO gives the corresponding inbound register. */
7954 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7956 /* For each register that may be used for calling a function, this
7957 gives the offset of that register into the block returned by
7958 __builtin_apply_args. 0 indicates that the register is not
7959 used for calling a function. */
7960 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7962 /* Return the offset of register REGNO into the block returned by
7963 __builtin_apply_args. This is not declared static, since it is
7964 needed in objc-act.c. */
7967 apply_args_register_offset (regno)
7972 /* Arguments are always put in outgoing registers (in the argument
7973 block) if such make sense. */
7974 #ifdef OUTGOING_REGNO
7975 regno = OUTGOING_REGNO(regno);
7977 return apply_args_reg_offset[regno];
7980 /* Return the size required for the block returned by __builtin_apply_args,
7981 and initialize apply_args_mode. */
7986 static int size = -1;
7988 enum machine_mode mode;
7990 /* The values computed by this function never change. */
7993 /* The first value is the incoming arg-pointer. */
7994 size = GET_MODE_SIZE (Pmode);
7996 /* The second value is the structure value address unless this is
7997 passed as an "invisible" first argument. */
7998 if (struct_value_rtx)
7999 size += GET_MODE_SIZE (Pmode);
8001 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8002 if (FUNCTION_ARG_REGNO_P (regno))
8004 /* Search for the proper mode for copying this register's
8005 value. I'm not sure this is right, but it works so far. */
8006 enum machine_mode best_mode = VOIDmode;
8008 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8010 mode = GET_MODE_WIDER_MODE (mode))
8011 if (HARD_REGNO_MODE_OK (regno, mode)
8012 && HARD_REGNO_NREGS (regno, mode) == 1)
8015 if (best_mode == VOIDmode)
8016 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8018 mode = GET_MODE_WIDER_MODE (mode))
8019 if (HARD_REGNO_MODE_OK (regno, mode)
8020 && (mov_optab->handlers[(int) mode].insn_code
8021 != CODE_FOR_nothing))
8025 if (mode == VOIDmode)
8028 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8029 if (size % align != 0)
8030 size = CEIL (size, align) * align;
8031 apply_args_reg_offset[regno] = size;
8032 size += GET_MODE_SIZE (mode);
8033 apply_args_mode[regno] = mode;
8037 apply_args_mode[regno] = VOIDmode;
8038 apply_args_reg_offset[regno] = 0;
8044 /* Return the size required for the block returned by __builtin_apply,
8045 and initialize apply_result_mode. */
8048 apply_result_size ()
8050 static int size = -1;
8052 enum machine_mode mode;
8054 /* The values computed by this function never change. */
8059 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8060 if (FUNCTION_VALUE_REGNO_P (regno))
8062 /* Search for the proper mode for copying this register's
8063 value. I'm not sure this is right, but it works so far. */
8064 enum machine_mode best_mode = VOIDmode;
8066 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8068 mode = GET_MODE_WIDER_MODE (mode))
8069 if (HARD_REGNO_MODE_OK (regno, mode))
8072 if (best_mode == VOIDmode)
8073 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8075 mode = GET_MODE_WIDER_MODE (mode))
8076 if (HARD_REGNO_MODE_OK (regno, mode)
8077 && (mov_optab->handlers[(int) mode].insn_code
8078 != CODE_FOR_nothing))
8082 if (mode == VOIDmode)
8085 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8086 if (size % align != 0)
8087 size = CEIL (size, align) * align;
8088 size += GET_MODE_SIZE (mode);
8089 apply_result_mode[regno] = mode;
8092 apply_result_mode[regno] = VOIDmode;
8094 /* Allow targets that use untyped_call and untyped_return to override
8095 the size so that machine-specific information can be stored here. */
8096 #ifdef APPLY_RESULT_SIZE
8097 size = APPLY_RESULT_SIZE;
8103 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8104 /* Create a vector describing the result block RESULT. If SAVEP is true,
8105 the result block is used to save the values; otherwise it is used to
8106 restore the values. */
8109 result_vector (savep, result)
8113 int regno, size, align, nelts;
8114 enum machine_mode mode;
8116 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8119 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8120 if ((mode = apply_result_mode[regno]) != VOIDmode)
8122 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8123 if (size % align != 0)
8124 size = CEIL (size, align) * align;
8125 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8126 mem = change_address (result, mode,
8127 plus_constant (XEXP (result, 0), size));
8128 savevec[nelts++] = (savep
8129 ? gen_rtx (SET, VOIDmode, mem, reg)
8130 : gen_rtx (SET, VOIDmode, reg, mem));
8131 size += GET_MODE_SIZE (mode);
8133 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8135 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8137 /* Save the state required to perform an untyped call with the same
8138 arguments as were passed to the current function. */
8141 expand_builtin_apply_args ()
8144 int size, align, regno;
8145 enum machine_mode mode;
8147 /* Create a block where the arg-pointer, structure value address,
8148 and argument registers can be saved. */
8149 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8151 /* Walk past the arg-pointer and structure value address. */
8152 size = GET_MODE_SIZE (Pmode);
8153 if (struct_value_rtx)
8154 size += GET_MODE_SIZE (Pmode);
8156 /* Save each register used in calling a function to the block.
8157 Doing this in reverse order makes for much more compact code
8158 for i386 and family. If we do this in reverse order, a simple
8159 series of pops and stores will be generated. If we do this
8160 in ascending order, the pops and stores will be littered with
8161 stack swaps as well. Since the order is largely irrelevant for
8162 all other architectures, we use the optimal order for the i386. */
8163 for (regno = FIRST_PSEUDO_REGISTER; regno--;)
8164 if ((mode = apply_args_mode[regno]) != VOIDmode)
8168 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8169 if (size % align != 0)
8170 size = CEIL (size, align) * align;
8172 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8175 /* For reg-stack.c's stack register household.
8176 Compare with a similar piece of code in function.c. */
8178 emit_insn (gen_rtx (USE, mode, tem));
8181 emit_move_insn (change_address (registers, mode,
8182 plus_constant (XEXP (registers, 0),
8185 size += GET_MODE_SIZE (mode);
8188 /* Save the arg pointer to the block. */
8189 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8190 copy_to_reg (virtual_incoming_args_rtx));
8191 size = GET_MODE_SIZE (Pmode);
8193 /* Save the structure value address unless this is passed as an
8194 "invisible" first argument. */
8195 if (struct_value_incoming_rtx)
8197 emit_move_insn (change_address (registers, Pmode,
8198 plus_constant (XEXP (registers, 0),
8200 copy_to_reg (struct_value_incoming_rtx));
8201 size += GET_MODE_SIZE (Pmode);
8204 /* Return the address of the block. */
8205 return copy_addr_to_reg (XEXP (registers, 0));
8208 /* Perform an untyped call and save the state required to perform an
8209 untyped return of whatever value was returned by the given function. */
8212 expand_builtin_apply (function, arguments, argsize)
8213 rtx function, arguments, argsize;
8215 int size, align, regno;
8216 enum machine_mode mode;
8217 rtx incoming_args, result, reg, dest, call_insn;
8218 rtx old_stack_level = 0;
8219 rtx call_fusage = 0;
8221 /* Create a block where the return registers can be saved. */
8222 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8224 /* ??? The argsize value should be adjusted here. */
8226 /* Fetch the arg pointer from the ARGUMENTS block. */
8227 incoming_args = gen_reg_rtx (Pmode);
8228 emit_move_insn (incoming_args,
8229 gen_rtx (MEM, Pmode, arguments));
8230 #ifndef STACK_GROWS_DOWNWARD
8231 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8232 incoming_args, 0, OPTAB_LIB_WIDEN);
8235 /* Perform postincrements before actually calling the function. */
8238 /* Push a new argument block and copy the arguments. */
8239 do_pending_stack_adjust ();
8240 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8242 /* Push a block of memory onto the stack to store the memory arguments.
8243 Save the address in a register, and copy the memory arguments. ??? I
8244 haven't figured out how the calling convention macros effect this,
8245 but it's likely that the source and/or destination addresses in
8246 the block copy will need updating in machine specific ways. */
8247 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8248 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8249 gen_rtx (MEM, BLKmode, incoming_args),
8251 PARM_BOUNDARY / BITS_PER_UNIT);
8253 /* Refer to the argument block. */
8255 arguments = gen_rtx (MEM, BLKmode, arguments);
8257 /* Walk past the arg-pointer and structure value address. */
8258 size = GET_MODE_SIZE (Pmode);
8259 if (struct_value_rtx)
8260 size += GET_MODE_SIZE (Pmode);
8262 /* Restore each of the registers previously saved. Make USE insns
8263 for each of these registers for use in making the call.
8264 Doing this in reverse order makes for much more compact code
8265 for i386 and family. */
8266 for (regno = FIRST_PSEUDO_REGISTER; regno--; )
8267 if ((mode = apply_args_mode[regno]) != VOIDmode)
8269 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8270 if (size % align != 0)
8271 size = CEIL (size, align) * align;
8272 reg = gen_rtx (REG, mode, regno);
8273 emit_move_insn (reg,
8274 change_address (arguments, mode,
8275 plus_constant (XEXP (arguments, 0),
8278 use_reg (&call_fusage, reg);
8279 size += GET_MODE_SIZE (mode);
8282 /* Restore the structure value address unless this is passed as an
8283 "invisible" first argument. */
8284 size = GET_MODE_SIZE (Pmode);
8285 if (struct_value_rtx)
8287 rtx value = gen_reg_rtx (Pmode);
8288 emit_move_insn (value,
8289 change_address (arguments, Pmode,
8290 plus_constant (XEXP (arguments, 0),
8292 emit_move_insn (struct_value_rtx, value);
8293 if (GET_CODE (struct_value_rtx) == REG)
8294 use_reg (&call_fusage, struct_value_rtx);
8295 size += GET_MODE_SIZE (Pmode);
8298 /* All arguments and registers used for the call are set up by now! */
8299 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8301 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8302 and we don't want to load it into a register as an optimization,
8303 because prepare_call_address already did it if it should be done. */
8304 if (GET_CODE (function) != SYMBOL_REF)
8305 function = memory_address (FUNCTION_MODE, function);
8307 /* Generate the actual call instruction and save the return value. */
8308 #ifdef HAVE_untyped_call
8309 if (HAVE_untyped_call)
8310 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8311 result, result_vector (1, result)));
8314 #ifdef HAVE_call_value
8315 if (HAVE_call_value)
8319 /* Locate the unique return register. It is not possible to
8320 express a call that sets more than one return register using
8321 call_value; use untyped_call for that. In fact, untyped_call
8322 only needs to save the return registers in the given block. */
8323 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8324 if ((mode = apply_result_mode[regno]) != VOIDmode)
8327 abort (); /* HAVE_untyped_call required. */
8328 valreg = gen_rtx (REG, mode, regno);
8331 emit_call_insn (gen_call_value (valreg,
8332 gen_rtx (MEM, FUNCTION_MODE, function),
8333 const0_rtx, NULL_RTX, const0_rtx));
8335 emit_move_insn (change_address (result, GET_MODE (valreg),
8343 /* Find the CALL insn we just emitted. */
8344 for (call_insn = get_last_insn ();
8345 call_insn && GET_CODE (call_insn) != CALL_INSN;
8346 call_insn = PREV_INSN (call_insn))
8352 /* Put the register usage information on the CALL. If there is already
8353 some usage information, put ours at the end. */
8354 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8358 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8359 link = XEXP (link, 1))
8362 XEXP (link, 1) = call_fusage;
8365 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8367 /* Restore the stack. */
8368 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8370 /* Return the address of the result block. */
8371 return copy_addr_to_reg (XEXP (result, 0));
8374 /* Perform an untyped return. */
8377 expand_builtin_return (result)
8380 int size, align, regno;
8381 enum machine_mode mode;
8383 rtx call_fusage = 0;
8385 apply_result_size ();
8386 result = gen_rtx (MEM, BLKmode, result);
8388 #ifdef HAVE_untyped_return
8389 if (HAVE_untyped_return)
8391 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8397 /* Restore the return value and note that each value is used. */
8399 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8400 if ((mode = apply_result_mode[regno]) != VOIDmode)
8402 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8403 if (size % align != 0)
8404 size = CEIL (size, align) * align;
8405 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8406 emit_move_insn (reg,
8407 change_address (result, mode,
8408 plus_constant (XEXP (result, 0),
8411 push_to_sequence (call_fusage);
8412 emit_insn (gen_rtx (USE, VOIDmode, reg));
8413 call_fusage = get_insns ();
8415 size += GET_MODE_SIZE (mode);
8418 /* Put the USE insns before the return. */
8419 emit_insns (call_fusage);
8421 /* Return whatever values was restored by jumping directly to the end
8423 expand_null_return ();
8426 /* Expand code for a post- or pre- increment or decrement
8427 and return the RTX for the result.
8428 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8431 expand_increment (exp, post)
8435 register rtx op0, op1;
8436 register rtx temp, value;
8437 register tree incremented = TREE_OPERAND (exp, 0);
8438 optab this_optab = add_optab;
8440 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8441 int op0_is_copy = 0;
8442 int single_insn = 0;
8443 /* 1 means we can't store into OP0 directly,
8444 because it is a subreg narrower than a word,
8445 and we don't dare clobber the rest of the word. */
8448 if (output_bytecode)
8450 bc_expand_expr (exp);
8454 /* Stabilize any component ref that might need to be
8455 evaluated more than once below. */
8457 || TREE_CODE (incremented) == BIT_FIELD_REF
8458 || (TREE_CODE (incremented) == COMPONENT_REF
8459 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8460 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8461 incremented = stabilize_reference (incremented);
8462 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8463 ones into save exprs so that they don't accidentally get evaluated
8464 more than once by the code below. */
8465 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8466 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8467 incremented = save_expr (incremented);
8469 /* Compute the operands as RTX.
8470 Note whether OP0 is the actual lvalue or a copy of it:
8471 I believe it is a copy iff it is a register or subreg
8472 and insns were generated in computing it. */
8474 temp = get_last_insn ();
8475 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8477 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8478 in place but instead must do sign- or zero-extension during assignment,
8479 so we copy it into a new register and let the code below use it as
8482 Note that we can safely modify this SUBREG since it is know not to be
8483 shared (it was made by the expand_expr call above). */
8485 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8488 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8492 else if (GET_CODE (op0) == SUBREG
8493 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8495 /* We cannot increment this SUBREG in place. If we are
8496 post-incrementing, get a copy of the old value. Otherwise,
8497 just mark that we cannot increment in place. */
8499 op0 = copy_to_reg (op0);
8504 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8505 && temp != get_last_insn ());
8506 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8508 /* Decide whether incrementing or decrementing. */
8509 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8510 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8511 this_optab = sub_optab;
8513 /* Convert decrement by a constant into a negative increment. */
8514 if (this_optab == sub_optab
8515 && GET_CODE (op1) == CONST_INT)
8517 op1 = GEN_INT (- INTVAL (op1));
8518 this_optab = add_optab;
8521 /* For a preincrement, see if we can do this with a single instruction. */
8524 icode = (int) this_optab->handlers[(int) mode].insn_code;
8525 if (icode != (int) CODE_FOR_nothing
8526 /* Make sure that OP0 is valid for operands 0 and 1
8527 of the insn we want to queue. */
8528 && (*insn_operand_predicate[icode][0]) (op0, mode)
8529 && (*insn_operand_predicate[icode][1]) (op0, mode)
8530 && (*insn_operand_predicate[icode][2]) (op1, mode))
8534 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8535 then we cannot just increment OP0. We must therefore contrive to
8536 increment the original value. Then, for postincrement, we can return
8537 OP0 since it is a copy of the old value. For preincrement, expand here
8538 unless we can do it with a single insn.
8540 Likewise if storing directly into OP0 would clobber high bits
8541 we need to preserve (bad_subreg). */
8542 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8544 /* This is the easiest way to increment the value wherever it is.
8545 Problems with multiple evaluation of INCREMENTED are prevented
8546 because either (1) it is a component_ref or preincrement,
8547 in which case it was stabilized above, or (2) it is an array_ref
8548 with constant index in an array in a register, which is
8549 safe to reevaluate. */
8550 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8551 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8552 ? MINUS_EXPR : PLUS_EXPR),
8555 TREE_OPERAND (exp, 1));
8557 while (TREE_CODE (incremented) == NOP_EXPR
8558 || TREE_CODE (incremented) == CONVERT_EXPR)
8560 newexp = convert (TREE_TYPE (incremented), newexp);
8561 incremented = TREE_OPERAND (incremented, 0);
8564 temp = expand_assignment (incremented, newexp, ! post, 0);
8565 return post ? op0 : temp;
8570 /* We have a true reference to the value in OP0.
8571 If there is an insn to add or subtract in this mode, queue it.
8572 Queueing the increment insn avoids the register shuffling
8573 that often results if we must increment now and first save
8574 the old value for subsequent use. */
8576 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8577 op0 = stabilize (op0);
8580 icode = (int) this_optab->handlers[(int) mode].insn_code;
8581 if (icode != (int) CODE_FOR_nothing
8582 /* Make sure that OP0 is valid for operands 0 and 1
8583 of the insn we want to queue. */
8584 && (*insn_operand_predicate[icode][0]) (op0, mode)
8585 && (*insn_operand_predicate[icode][1]) (op0, mode))
8587 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8588 op1 = force_reg (mode, op1);
8590 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8594 /* Preincrement, or we can't increment with one simple insn. */
8596 /* Save a copy of the value before inc or dec, to return it later. */
8597 temp = value = copy_to_reg (op0);
8599 /* Arrange to return the incremented value. */
8600 /* Copy the rtx because expand_binop will protect from the queue,
8601 and the results of that would be invalid for us to return
8602 if our caller does emit_queue before using our result. */
8603 temp = copy_rtx (value = op0);
8605 /* Increment however we can. */
8606 op1 = expand_binop (mode, this_optab, value, op1, op0,
8607 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8608 /* Make sure the value is stored into OP0. */
8610 emit_move_insn (op0, op1);
8615 /* Expand all function calls contained within EXP, innermost ones first.
8616 But don't look within expressions that have sequence points.
8617 For each CALL_EXPR, record the rtx for its value
8618 in the CALL_EXPR_RTL field. */
8621 preexpand_calls (exp)
8624 register int nops, i;
8625 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8627 if (! do_preexpand_calls)
8630 /* Only expressions and references can contain calls. */
8632 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8635 switch (TREE_CODE (exp))
8638 /* Do nothing if already expanded. */
8639 if (CALL_EXPR_RTL (exp) != 0)
8642 /* Do nothing to built-in functions. */
8643 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8644 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8645 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8646 /* Do nothing if the call returns a variable-sized object. */
8647 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8648 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8653 case TRUTH_ANDIF_EXPR:
8654 case TRUTH_ORIF_EXPR:
8655 /* If we find one of these, then we can be sure
8656 the adjust will be done for it (since it makes jumps).
8657 Do it now, so that if this is inside an argument
8658 of a function, we don't get the stack adjustment
8659 after some other args have already been pushed. */
8660 do_pending_stack_adjust ();
8665 case WITH_CLEANUP_EXPR:
8669 if (SAVE_EXPR_RTL (exp) != 0)
8673 nops = tree_code_length[(int) TREE_CODE (exp)];
8674 for (i = 0; i < nops; i++)
8675 if (TREE_OPERAND (exp, i) != 0)
8677 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8678 if (type == 'e' || type == '<' || type == '1' || type == '2'
8680 preexpand_calls (TREE_OPERAND (exp, i));
8684 /* At the start of a function, record that we have no previously-pushed
8685 arguments waiting to be popped. */
8688 init_pending_stack_adjust ()
8690 pending_stack_adjust = 0;
8693 /* When exiting from function, if safe, clear out any pending stack adjust
8694 so the adjustment won't get done. */
8697 clear_pending_stack_adjust ()
8699 #ifdef EXIT_IGNORE_STACK
8700 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8701 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8702 && ! flag_inline_functions)
8703 pending_stack_adjust = 0;
8707 /* Pop any previously-pushed arguments that have not been popped yet. */
8710 do_pending_stack_adjust ()
8712 if (inhibit_defer_pop == 0)
8714 if (pending_stack_adjust != 0)
8715 adjust_stack (GEN_INT (pending_stack_adjust));
8716 pending_stack_adjust = 0;
8720 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8721 Returns the cleanups to be performed. */
8724 defer_cleanups_to (old_cleanups)
8727 tree new_cleanups = NULL_TREE;
8728 tree cleanups = cleanups_this_call;
8729 tree last = NULL_TREE;
8731 while (cleanups_this_call != old_cleanups)
8733 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8734 last = cleanups_this_call;
8735 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8740 /* Remove the list from the chain of cleanups. */
8741 TREE_CHAIN (last) = NULL_TREE;
8743 /* reverse them so that we can build them in the right order. */
8744 cleanups = nreverse (cleanups);
8749 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8750 TREE_VALUE (cleanups), new_cleanups);
8752 new_cleanups = TREE_VALUE (cleanups);
8754 cleanups = TREE_CHAIN (cleanups);
8758 return new_cleanups;
8761 /* Expand all cleanups up to OLD_CLEANUPS.
8762 Needed here, and also for language-dependent calls. */
8765 expand_cleanups_to (old_cleanups)
8768 while (cleanups_this_call != old_cleanups)
8770 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8771 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8772 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8776 /* Expand conditional expressions. */
8778 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8779 LABEL is an rtx of code CODE_LABEL, in this function and all the
8783 jumpifnot (exp, label)
8787 do_jump (exp, label, NULL_RTX);
8790 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8797 do_jump (exp, NULL_RTX, label);
8800 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8801 the result is zero, or IF_TRUE_LABEL if the result is one.
8802 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8803 meaning fall through in that case.
8805 do_jump always does any pending stack adjust except when it does not
8806 actually perform a jump. An example where there is no jump
8807 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8809 This function is responsible for optimizing cases such as
8810 &&, || and comparison operators in EXP. */
8813 do_jump (exp, if_false_label, if_true_label)
8815 rtx if_false_label, if_true_label;
8817 register enum tree_code code = TREE_CODE (exp);
8818 /* Some cases need to create a label to jump to
8819 in order to properly fall through.
8820 These cases set DROP_THROUGH_LABEL nonzero. */
8821 rtx drop_through_label = 0;
8826 enum machine_mode mode;
8836 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8842 /* This is not true with #pragma weak */
8844 /* The address of something can never be zero. */
8846 emit_jump (if_true_label);
8851 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8852 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8853 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8856 /* If we are narrowing the operand, we have to do the compare in the
8858 if ((TYPE_PRECISION (TREE_TYPE (exp))
8859 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8861 case NON_LVALUE_EXPR:
8862 case REFERENCE_EXPR:
8867 /* These cannot change zero->non-zero or vice versa. */
8868 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8872 /* This is never less insns than evaluating the PLUS_EXPR followed by
8873 a test and can be longer if the test is eliminated. */
8875 /* Reduce to minus. */
8876 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8877 TREE_OPERAND (exp, 0),
8878 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8879 TREE_OPERAND (exp, 1))));
8880 /* Process as MINUS. */
8884 /* Non-zero iff operands of minus differ. */
8885 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8886 TREE_OPERAND (exp, 0),
8887 TREE_OPERAND (exp, 1)),
8892 /* If we are AND'ing with a small constant, do this comparison in the
8893 smallest type that fits. If the machine doesn't have comparisons
8894 that small, it will be converted back to the wider comparison.
8895 This helps if we are testing the sign bit of a narrower object.
8896 combine can't do this for us because it can't know whether a
8897 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8899 if (! SLOW_BYTE_ACCESS
8900 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8901 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8902 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8903 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8904 && (type = type_for_mode (mode, 1)) != 0
8905 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8906 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8907 != CODE_FOR_nothing))
8909 do_jump (convert (type, exp), if_false_label, if_true_label);
8914 case TRUTH_NOT_EXPR:
8915 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8918 case TRUTH_ANDIF_EXPR:
8921 tree cleanups, old_cleanups;
8923 if (if_false_label == 0)
8924 if_false_label = drop_through_label = gen_label_rtx ();
8926 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8927 seq1 = get_insns ();
8930 old_cleanups = cleanups_this_call;
8932 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8933 seq2 = get_insns ();
8936 cleanups = defer_cleanups_to (old_cleanups);
8939 rtx flag = gen_reg_rtx (word_mode);
8943 /* Flag cleanups as not needed. */
8944 emit_move_insn (flag, const0_rtx);
8947 /* Flag cleanups as needed. */
8948 emit_move_insn (flag, const1_rtx);
8951 /* convert flag, which is an rtx, into a tree. */
8952 cond = make_node (RTL_EXPR);
8953 TREE_TYPE (cond) = integer_type_node;
8954 RTL_EXPR_RTL (cond) = flag;
8955 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8957 new_cleanups = build (COND_EXPR, void_type_node,
8958 truthvalue_conversion (cond),
8959 cleanups, integer_zero_node);
8960 new_cleanups = fold (new_cleanups);
8962 /* Now add in the conditionalized cleanups. */
8964 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8965 (*interim_eh_hook) (NULL_TREE);
8975 case TRUTH_ORIF_EXPR:
8978 tree cleanups, old_cleanups;
8980 if (if_true_label == 0)
8981 if_true_label = drop_through_label = gen_label_rtx ();
8983 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8984 seq1 = get_insns ();
8987 old_cleanups = cleanups_this_call;
8989 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8990 seq2 = get_insns ();
8993 cleanups = defer_cleanups_to (old_cleanups);
8996 rtx flag = gen_reg_rtx (word_mode);
9000 /* Flag cleanups as not needed. */
9001 emit_move_insn (flag, const0_rtx);
9004 /* Flag cleanups as needed. */
9005 emit_move_insn (flag, const1_rtx);
9008 /* convert flag, which is an rtx, into a tree. */
9009 cond = make_node (RTL_EXPR);
9010 TREE_TYPE (cond) = integer_type_node;
9011 RTL_EXPR_RTL (cond) = flag;
9012 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9014 new_cleanups = build (COND_EXPR, void_type_node,
9015 truthvalue_conversion (cond),
9016 cleanups, integer_zero_node);
9017 new_cleanups = fold (new_cleanups);
9019 /* Now add in the conditionalized cleanups. */
9021 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9022 (*interim_eh_hook) (NULL_TREE);
9034 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9038 do_pending_stack_adjust ();
9039 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9046 int bitsize, bitpos, unsignedp;
9047 enum machine_mode mode;
9052 /* Get description of this reference. We don't actually care
9053 about the underlying object here. */
9054 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9055 &mode, &unsignedp, &volatilep);
9057 type = type_for_size (bitsize, unsignedp);
9058 if (! SLOW_BYTE_ACCESS
9059 && type != 0 && bitsize >= 0
9060 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9061 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9062 != CODE_FOR_nothing))
9064 do_jump (convert (type, exp), if_false_label, if_true_label);
9071 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9072 if (integer_onep (TREE_OPERAND (exp, 1))
9073 && integer_zerop (TREE_OPERAND (exp, 2)))
9074 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9076 else if (integer_zerop (TREE_OPERAND (exp, 1))
9077 && integer_onep (TREE_OPERAND (exp, 2)))
9078 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9082 register rtx label1 = gen_label_rtx ();
9083 drop_through_label = gen_label_rtx ();
9084 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9085 /* Now the THEN-expression. */
9086 do_jump (TREE_OPERAND (exp, 1),
9087 if_false_label ? if_false_label : drop_through_label,
9088 if_true_label ? if_true_label : drop_through_label);
9089 /* In case the do_jump just above never jumps. */
9090 do_pending_stack_adjust ();
9091 emit_label (label1);
9092 /* Now the ELSE-expression. */
9093 do_jump (TREE_OPERAND (exp, 2),
9094 if_false_label ? if_false_label : drop_through_label,
9095 if_true_label ? if_true_label : drop_through_label);
9101 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9103 if (integer_zerop (TREE_OPERAND (exp, 1)))
9104 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9105 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9106 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9109 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9110 fold (build (EQ_EXPR, TREE_TYPE (exp),
9111 fold (build1 (REALPART_EXPR,
9112 TREE_TYPE (inner_type),
9113 TREE_OPERAND (exp, 0))),
9114 fold (build1 (REALPART_EXPR,
9115 TREE_TYPE (inner_type),
9116 TREE_OPERAND (exp, 1))))),
9117 fold (build (EQ_EXPR, TREE_TYPE (exp),
9118 fold (build1 (IMAGPART_EXPR,
9119 TREE_TYPE (inner_type),
9120 TREE_OPERAND (exp, 0))),
9121 fold (build1 (IMAGPART_EXPR,
9122 TREE_TYPE (inner_type),
9123 TREE_OPERAND (exp, 1))))))),
9124 if_false_label, if_true_label);
9125 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9126 && !can_compare_p (TYPE_MODE (inner_type)))
9127 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9129 comparison = compare (exp, EQ, EQ);
9135 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9137 if (integer_zerop (TREE_OPERAND (exp, 1)))
9138 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9139 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9140 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9143 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9144 fold (build (NE_EXPR, TREE_TYPE (exp),
9145 fold (build1 (REALPART_EXPR,
9146 TREE_TYPE (inner_type),
9147 TREE_OPERAND (exp, 0))),
9148 fold (build1 (REALPART_EXPR,
9149 TREE_TYPE (inner_type),
9150 TREE_OPERAND (exp, 1))))),
9151 fold (build (NE_EXPR, TREE_TYPE (exp),
9152 fold (build1 (IMAGPART_EXPR,
9153 TREE_TYPE (inner_type),
9154 TREE_OPERAND (exp, 0))),
9155 fold (build1 (IMAGPART_EXPR,
9156 TREE_TYPE (inner_type),
9157 TREE_OPERAND (exp, 1))))))),
9158 if_false_label, if_true_label);
9159 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9160 && !can_compare_p (TYPE_MODE (inner_type)))
9161 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9163 comparison = compare (exp, NE, NE);
9168 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9170 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9171 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9173 comparison = compare (exp, LT, LTU);
9177 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9179 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9180 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9182 comparison = compare (exp, LE, LEU);
9186 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9188 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9189 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9191 comparison = compare (exp, GT, GTU);
9195 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9197 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9198 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9200 comparison = compare (exp, GE, GEU);
9205 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9207 /* This is not needed any more and causes poor code since it causes
9208 comparisons and tests from non-SI objects to have different code
9210 /* Copy to register to avoid generating bad insns by cse
9211 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9212 if (!cse_not_expected && GET_CODE (temp) == MEM)
9213 temp = copy_to_reg (temp);
9215 do_pending_stack_adjust ();
9216 if (GET_CODE (temp) == CONST_INT)
9217 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9218 else if (GET_CODE (temp) == LABEL_REF)
9219 comparison = const_true_rtx;
9220 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9221 && !can_compare_p (GET_MODE (temp)))
9222 /* Note swapping the labels gives us not-equal. */
9223 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9224 else if (GET_MODE (temp) != VOIDmode)
9225 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9226 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9227 GET_MODE (temp), NULL_RTX, 0);
9232 /* Do any postincrements in the expression that was tested. */
9235 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9236 straight into a conditional jump instruction as the jump condition.
9237 Otherwise, all the work has been done already. */
9239 if (comparison == const_true_rtx)
9242 emit_jump (if_true_label);
9244 else if (comparison == const0_rtx)
9247 emit_jump (if_false_label);
9249 else if (comparison)
9250 do_jump_for_compare (comparison, if_false_label, if_true_label);
9252 if (drop_through_label)
9254 /* If do_jump produces code that might be jumped around,
9255 do any stack adjusts from that code, before the place
9256 where control merges in. */
9257 do_pending_stack_adjust ();
9258 emit_label (drop_through_label);
9262 /* Given a comparison expression EXP for values too wide to be compared
9263 with one insn, test the comparison and jump to the appropriate label.
9264 The code of EXP is ignored; we always test GT if SWAP is 0,
9265 and LT if SWAP is 1. */
9268 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9271 rtx if_false_label, if_true_label;
9273 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9274 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9275 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9276 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9277 rtx drop_through_label = 0;
9278 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9281 if (! if_true_label || ! if_false_label)
9282 drop_through_label = gen_label_rtx ();
9283 if (! if_true_label)
9284 if_true_label = drop_through_label;
9285 if (! if_false_label)
9286 if_false_label = drop_through_label;
9288 /* Compare a word at a time, high order first. */
9289 for (i = 0; i < nwords; i++)
9292 rtx op0_word, op1_word;
9294 if (WORDS_BIG_ENDIAN)
9296 op0_word = operand_subword_force (op0, i, mode);
9297 op1_word = operand_subword_force (op1, i, mode);
9301 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9302 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9305 /* All but high-order word must be compared as unsigned. */
9306 comp = compare_from_rtx (op0_word, op1_word,
9307 (unsignedp || i > 0) ? GTU : GT,
9308 unsignedp, word_mode, NULL_RTX, 0);
9309 if (comp == const_true_rtx)
9310 emit_jump (if_true_label);
9311 else if (comp != const0_rtx)
9312 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9314 /* Consider lower words only if these are equal. */
9315 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9317 if (comp == const_true_rtx)
9318 emit_jump (if_false_label);
9319 else if (comp != const0_rtx)
9320 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9324 emit_jump (if_false_label);
9325 if (drop_through_label)
9326 emit_label (drop_through_label);
9329 /* Compare OP0 with OP1, word at a time, in mode MODE.
9330 UNSIGNEDP says to do unsigned comparison.
9331 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9334 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9335 enum machine_mode mode;
9338 rtx if_false_label, if_true_label;
9340 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9341 rtx drop_through_label = 0;
9344 if (! if_true_label || ! if_false_label)
9345 drop_through_label = gen_label_rtx ();
9346 if (! if_true_label)
9347 if_true_label = drop_through_label;
9348 if (! if_false_label)
9349 if_false_label = drop_through_label;
9351 /* Compare a word at a time, high order first. */
9352 for (i = 0; i < nwords; i++)
9355 rtx op0_word, op1_word;
9357 if (WORDS_BIG_ENDIAN)
9359 op0_word = operand_subword_force (op0, i, mode);
9360 op1_word = operand_subword_force (op1, i, mode);
9364 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9365 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9368 /* All but high-order word must be compared as unsigned. */
9369 comp = compare_from_rtx (op0_word, op1_word,
9370 (unsignedp || i > 0) ? GTU : GT,
9371 unsignedp, word_mode, NULL_RTX, 0);
9372 if (comp == const_true_rtx)
9373 emit_jump (if_true_label);
9374 else if (comp != const0_rtx)
9375 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9377 /* Consider lower words only if these are equal. */
9378 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9380 if (comp == const_true_rtx)
9381 emit_jump (if_false_label);
9382 else if (comp != const0_rtx)
9383 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9387 emit_jump (if_false_label);
9388 if (drop_through_label)
9389 emit_label (drop_through_label);
9392 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9393 with one insn, test the comparison and jump to the appropriate label. */
9396 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9398 rtx if_false_label, if_true_label;
9400 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9401 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9402 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9403 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9405 rtx drop_through_label = 0;
9407 if (! if_false_label)
9408 drop_through_label = if_false_label = gen_label_rtx ();
9410 for (i = 0; i < nwords; i++)
9412 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9413 operand_subword_force (op1, i, mode),
9414 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9415 word_mode, NULL_RTX, 0);
9416 if (comp == const_true_rtx)
9417 emit_jump (if_false_label);
9418 else if (comp != const0_rtx)
9419 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9423 emit_jump (if_true_label);
9424 if (drop_through_label)
9425 emit_label (drop_through_label);
9428 /* Jump according to whether OP0 is 0.
9429 We assume that OP0 has an integer mode that is too wide
9430 for the available compare insns. */
9433 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9435 rtx if_false_label, if_true_label;
9437 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9439 rtx drop_through_label = 0;
9441 if (! if_false_label)
9442 drop_through_label = if_false_label = gen_label_rtx ();
9444 for (i = 0; i < nwords; i++)
9446 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9448 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9449 if (comp == const_true_rtx)
9450 emit_jump (if_false_label);
9451 else if (comp != const0_rtx)
9452 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9456 emit_jump (if_true_label);
9457 if (drop_through_label)
9458 emit_label (drop_through_label);
9461 /* Given a comparison expression in rtl form, output conditional branches to
9462 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9465 do_jump_for_compare (comparison, if_false_label, if_true_label)
9466 rtx comparison, if_false_label, if_true_label;
9470 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9471 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9476 emit_jump (if_false_label);
9478 else if (if_false_label)
9481 rtx prev = get_last_insn ();
9484 /* Output the branch with the opposite condition. Then try to invert
9485 what is generated. If more than one insn is a branch, or if the
9486 branch is not the last insn written, abort. If we can't invert
9487 the branch, emit make a true label, redirect this jump to that,
9488 emit a jump to the false label and define the true label. */
9490 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9491 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9495 /* Here we get the first insn that was just emitted. It used to be the
9496 case that, on some machines, emitting the branch would discard
9497 the previous compare insn and emit a replacement. This isn't
9498 done anymore, but abort if we see that PREV is deleted. */
9501 insn = get_insns ();
9502 else if (INSN_DELETED_P (prev))
9505 insn = NEXT_INSN (prev);
9507 for (; insn; insn = NEXT_INSN (insn))
9508 if (GET_CODE (insn) == JUMP_INSN)
9515 if (branch != get_last_insn ())
9518 JUMP_LABEL (branch) = if_false_label;
9519 if (! invert_jump (branch, if_false_label))
9521 if_true_label = gen_label_rtx ();
9522 redirect_jump (branch, if_true_label);
9523 emit_jump (if_false_label);
9524 emit_label (if_true_label);
9529 /* Generate code for a comparison expression EXP
9530 (including code to compute the values to be compared)
9531 and set (CC0) according to the result.
9532 SIGNED_CODE should be the rtx operation for this comparison for
9533 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9535 We force a stack adjustment unless there are currently
9536 things pushed on the stack that aren't yet used. */
9539 compare (exp, signed_code, unsigned_code)
9541 enum rtx_code signed_code, unsigned_code;
9544 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9546 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9547 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9548 register enum machine_mode mode = TYPE_MODE (type);
9549 int unsignedp = TREE_UNSIGNED (type);
9550 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9552 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9554 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9555 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9558 /* Like compare but expects the values to compare as two rtx's.
9559 The decision as to signed or unsigned comparison must be made by the caller.
9561 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9564 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9565 size of MODE should be used. */
9568 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9569 register rtx op0, op1;
9572 enum machine_mode mode;
9578 /* If one operand is constant, make it the second one. Only do this
9579 if the other operand is not constant as well. */
9581 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9582 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9587 code = swap_condition (code);
9592 op0 = force_not_mem (op0);
9593 op1 = force_not_mem (op1);
9596 do_pending_stack_adjust ();
9598 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9599 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9603 /* There's no need to do this now that combine.c can eliminate lots of
9604 sign extensions. This can be less efficient in certain cases on other
9607 /* If this is a signed equality comparison, we can do it as an
9608 unsigned comparison since zero-extension is cheaper than sign
9609 extension and comparisons with zero are done as unsigned. This is
9610 the case even on machines that can do fast sign extension, since
9611 zero-extension is easier to combine with other operations than
9612 sign-extension is. If we are comparing against a constant, we must
9613 convert it to what it would look like unsigned. */
9614 if ((code == EQ || code == NE) && ! unsignedp
9615 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9617 if (GET_CODE (op1) == CONST_INT
9618 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9619 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9624 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9626 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9629 /* Generate code to calculate EXP using a store-flag instruction
9630 and return an rtx for the result. EXP is either a comparison
9631 or a TRUTH_NOT_EXPR whose operand is a comparison.
9633 If TARGET is nonzero, store the result there if convenient.
9635 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9638 Return zero if there is no suitable set-flag instruction
9639 available on this machine.
9641 Once expand_expr has been called on the arguments of the comparison,
9642 we are committed to doing the store flag, since it is not safe to
9643 re-evaluate the expression. We emit the store-flag insn by calling
9644 emit_store_flag, but only expand the arguments if we have a reason
9645 to believe that emit_store_flag will be successful. If we think that
9646 it will, but it isn't, we have to simulate the store-flag with a
9647 set/jump/set sequence. */
9650 do_store_flag (exp, target, mode, only_cheap)
9653 enum machine_mode mode;
9657 tree arg0, arg1, type;
9659 enum machine_mode operand_mode;
9663 enum insn_code icode;
9664 rtx subtarget = target;
9665 rtx result, label, pattern, jump_pat;
9667 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9668 result at the end. We can't simply invert the test since it would
9669 have already been inverted if it were valid. This case occurs for
9670 some floating-point comparisons. */
9672 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9673 invert = 1, exp = TREE_OPERAND (exp, 0);
9675 arg0 = TREE_OPERAND (exp, 0);
9676 arg1 = TREE_OPERAND (exp, 1);
9677 type = TREE_TYPE (arg0);
9678 operand_mode = TYPE_MODE (type);
9679 unsignedp = TREE_UNSIGNED (type);
9681 /* We won't bother with BLKmode store-flag operations because it would mean
9682 passing a lot of information to emit_store_flag. */
9683 if (operand_mode == BLKmode)
9689 /* Get the rtx comparison code to use. We know that EXP is a comparison
9690 operation of some type. Some comparisons against 1 and -1 can be
9691 converted to comparisons with zero. Do so here so that the tests
9692 below will be aware that we have a comparison with zero. These
9693 tests will not catch constants in the first operand, but constants
9694 are rarely passed as the first operand. */
9696 switch (TREE_CODE (exp))
9705 if (integer_onep (arg1))
9706 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9708 code = unsignedp ? LTU : LT;
9711 if (! unsignedp && integer_all_onesp (arg1))
9712 arg1 = integer_zero_node, code = LT;
9714 code = unsignedp ? LEU : LE;
9717 if (! unsignedp && integer_all_onesp (arg1))
9718 arg1 = integer_zero_node, code = GE;
9720 code = unsignedp ? GTU : GT;
9723 if (integer_onep (arg1))
9724 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9726 code = unsignedp ? GEU : GE;
9732 /* Put a constant second. */
9733 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9735 tem = arg0; arg0 = arg1; arg1 = tem;
9736 code = swap_condition (code);
9739 /* If this is an equality or inequality test of a single bit, we can
9740 do this by shifting the bit being tested to the low-order bit and
9741 masking the result with the constant 1. If the condition was EQ,
9742 we xor it with 1. This does not require an scc insn and is faster
9743 than an scc insn even if we have it. */
9745 if ((code == NE || code == EQ)
9746 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9747 && integer_pow2p (TREE_OPERAND (arg0, 1))
9748 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9750 tree inner = TREE_OPERAND (arg0, 0);
9751 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9752 NULL_RTX, VOIDmode, 0)));
9755 /* If INNER is a right shift of a constant and it plus BITNUM does
9756 not overflow, adjust BITNUM and INNER. */
9758 if (TREE_CODE (inner) == RSHIFT_EXPR
9759 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9760 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9761 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9762 < TYPE_PRECISION (type)))
9764 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9765 inner = TREE_OPERAND (inner, 0);
9768 /* If we are going to be able to omit the AND below, we must do our
9769 operations as unsigned. If we must use the AND, we have a choice.
9770 Normally unsigned is faster, but for some machines signed is. */
9771 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9772 #ifdef LOAD_EXTEND_OP
9773 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9779 if (subtarget == 0 || GET_CODE (subtarget) != REG
9780 || GET_MODE (subtarget) != operand_mode
9781 || ! safe_from_p (subtarget, inner))
9784 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9787 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9788 size_int (bitnum), subtarget, ops_unsignedp);
9790 if (GET_MODE (op0) != mode)
9791 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9793 if ((code == EQ && ! invert) || (code == NE && invert))
9794 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9795 ops_unsignedp, OPTAB_LIB_WIDEN);
9797 /* Put the AND last so it can combine with more things. */
9798 if (bitnum != TYPE_PRECISION (type) - 1)
9799 op0 = expand_and (op0, const1_rtx, subtarget);
9804 /* Now see if we are likely to be able to do this. Return if not. */
9805 if (! can_compare_p (operand_mode))
9807 icode = setcc_gen_code[(int) code];
9808 if (icode == CODE_FOR_nothing
9809 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9811 /* We can only do this if it is one of the special cases that
9812 can be handled without an scc insn. */
9813 if ((code == LT && integer_zerop (arg1))
9814 || (! only_cheap && code == GE && integer_zerop (arg1)))
9816 else if (BRANCH_COST >= 0
9817 && ! only_cheap && (code == NE || code == EQ)
9818 && TREE_CODE (type) != REAL_TYPE
9819 && ((abs_optab->handlers[(int) operand_mode].insn_code
9820 != CODE_FOR_nothing)
9821 || (ffs_optab->handlers[(int) operand_mode].insn_code
9822 != CODE_FOR_nothing)))
9828 preexpand_calls (exp);
9829 if (subtarget == 0 || GET_CODE (subtarget) != REG
9830 || GET_MODE (subtarget) != operand_mode
9831 || ! safe_from_p (subtarget, arg1))
9834 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9835 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9838 target = gen_reg_rtx (mode);
9840 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9841 because, if the emit_store_flag does anything it will succeed and
9842 OP0 and OP1 will not be used subsequently. */
9844 result = emit_store_flag (target, code,
9845 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9846 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9847 operand_mode, unsignedp, 1);
9852 result = expand_binop (mode, xor_optab, result, const1_rtx,
9853 result, 0, OPTAB_LIB_WIDEN);
9857 /* If this failed, we have to do this with set/compare/jump/set code. */
9858 if (target == 0 || GET_CODE (target) != REG
9859 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9860 target = gen_reg_rtx (GET_MODE (target));
9862 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9863 result = compare_from_rtx (op0, op1, code, unsignedp,
9864 operand_mode, NULL_RTX, 0);
9865 if (GET_CODE (result) == CONST_INT)
9866 return (((result == const0_rtx && ! invert)
9867 || (result != const0_rtx && invert))
9868 ? const0_rtx : const1_rtx);
9870 label = gen_label_rtx ();
9871 if (bcc_gen_fctn[(int) code] == 0)
9874 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9875 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9881 /* Generate a tablejump instruction (used for switch statements). */
9883 #ifdef HAVE_tablejump
9885 /* INDEX is the value being switched on, with the lowest value
9886 in the table already subtracted.
9887 MODE is its expected mode (needed if INDEX is constant).
9888 RANGE is the length of the jump table.
9889 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9891 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9892 index value is out of range. */
9895 do_tablejump (index, mode, range, table_label, default_label)
9896 rtx index, range, table_label, default_label;
9897 enum machine_mode mode;
9899 register rtx temp, vector;
9901 /* Do an unsigned comparison (in the proper mode) between the index
9902 expression and the value which represents the length of the range.
9903 Since we just finished subtracting the lower bound of the range
9904 from the index expression, this comparison allows us to simultaneously
9905 check that the original index expression value is both greater than
9906 or equal to the minimum value of the range and less than or equal to
9907 the maximum value of the range. */
9909 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9910 emit_jump_insn (gen_bgtu (default_label));
9912 /* If index is in range, it must fit in Pmode.
9913 Convert to Pmode so we can index with it. */
9915 index = convert_to_mode (Pmode, index, 1);
9917 /* Don't let a MEM slip thru, because then INDEX that comes
9918 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9919 and break_out_memory_refs will go to work on it and mess it up. */
9920 #ifdef PIC_CASE_VECTOR_ADDRESS
9921 if (flag_pic && GET_CODE (index) != REG)
9922 index = copy_to_mode_reg (Pmode, index);
9925 /* If flag_force_addr were to affect this address
9926 it could interfere with the tricky assumptions made
9927 about addresses that contain label-refs,
9928 which may be valid only very near the tablejump itself. */
9929 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9930 GET_MODE_SIZE, because this indicates how large insns are. The other
9931 uses should all be Pmode, because they are addresses. This code
9932 could fail if addresses and insns are not the same size. */
9933 index = gen_rtx (PLUS, Pmode,
9934 gen_rtx (MULT, Pmode, index,
9935 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9936 gen_rtx (LABEL_REF, Pmode, table_label));
9937 #ifdef PIC_CASE_VECTOR_ADDRESS
9939 index = PIC_CASE_VECTOR_ADDRESS (index);
9942 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9943 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9944 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9945 RTX_UNCHANGING_P (vector) = 1;
9946 convert_move (temp, vector, 0);
9948 emit_jump_insn (gen_tablejump (temp, table_label));
9950 #ifndef CASE_VECTOR_PC_RELATIVE
9951 /* If we are generating PIC code or if the table is PC-relative, the
9952 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9958 #endif /* HAVE_tablejump */
9961 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9962 to that value is on the top of the stack. The resulting type is TYPE, and
9963 the source declaration is DECL. */
9966 bc_load_memory (type, decl)
9969 enum bytecode_opcode opcode;
9972 /* Bit fields are special. We only know about signed and
9973 unsigned ints, and enums. The latter are treated as
9976 if (DECL_BIT_FIELD (decl))
9977 if (TREE_CODE (type) == ENUMERAL_TYPE
9978 || TREE_CODE (type) == INTEGER_TYPE)
9979 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9983 /* See corresponding comment in bc_store_memory(). */
9984 if (TYPE_MODE (type) == BLKmode
9985 || TYPE_MODE (type) == VOIDmode)
9988 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9990 if (opcode == neverneverland)
9993 bc_emit_bytecode (opcode);
9995 #ifdef DEBUG_PRINT_CODE
9996 fputc ('\n', stderr);
10001 /* Store the contents of the second stack slot to the address in the
10002 top stack slot. DECL is the declaration of the destination and is used
10003 to determine whether we're dealing with a bitfield. */
10006 bc_store_memory (type, decl)
10009 enum bytecode_opcode opcode;
10012 if (DECL_BIT_FIELD (decl))
10014 if (TREE_CODE (type) == ENUMERAL_TYPE
10015 || TREE_CODE (type) == INTEGER_TYPE)
10021 if (TYPE_MODE (type) == BLKmode)
10023 /* Copy structure. This expands to a block copy instruction, storeBLK.
10024 In addition to the arguments expected by the other store instructions,
10025 it also expects a type size (SImode) on top of the stack, which is the
10026 structure size in size units (usually bytes). The two first arguments
10027 are already on the stack; so we just put the size on level 1. For some
10028 other languages, the size may be variable, this is why we don't encode
10029 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10031 bc_expand_expr (TYPE_SIZE (type));
10035 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10037 if (opcode == neverneverland)
10040 bc_emit_bytecode (opcode);
10042 #ifdef DEBUG_PRINT_CODE
10043 fputc ('\n', stderr);
10048 /* Allocate local stack space sufficient to hold a value of the given
10049 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10050 integral power of 2. A special case is locals of type VOID, which
10051 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10052 remapped into the corresponding attribute of SI. */
10055 bc_allocate_local (size, alignment)
10056 int size, alignment;
10059 int byte_alignment;
10064 /* Normalize size and alignment */
10066 size = UNITS_PER_WORD;
10068 if (alignment < BITS_PER_UNIT)
10069 byte_alignment = 1 << (INT_ALIGN - 1);
10072 byte_alignment = alignment / BITS_PER_UNIT;
10074 if (local_vars_size & (byte_alignment - 1))
10075 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10077 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10078 local_vars_size += size;
10084 /* Allocate variable-sized local array. Variable-sized arrays are
10085 actually pointers to the address in memory where they are stored. */
10088 bc_allocate_variable_array (size)
10092 const int ptralign = (1 << (PTR_ALIGN - 1));
10094 /* Align pointer */
10095 if (local_vars_size & ptralign)
10096 local_vars_size += ptralign - (local_vars_size & ptralign);
10098 /* Note down local space needed: pointer to block; also return
10101 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10102 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10107 /* Push the machine address for the given external variable offset. */
10109 bc_load_externaddr (externaddr)
10112 bc_emit_bytecode (constP);
10113 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10114 BYTECODE_BC_LABEL (externaddr)->offset);
10116 #ifdef DEBUG_PRINT_CODE
10117 fputc ('\n', stderr);
10126 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10132 /* Like above, but expects an IDENTIFIER. */
10134 bc_load_externaddr_id (id, offset)
10138 if (!IDENTIFIER_POINTER (id))
10141 bc_emit_bytecode (constP);
10142 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10144 #ifdef DEBUG_PRINT_CODE
10145 fputc ('\n', stderr);
10150 /* Push the machine address for the given local variable offset. */
10152 bc_load_localaddr (localaddr)
10155 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10159 /* Push the machine address for the given parameter offset.
10160 NOTE: offset is in bits. */
10162 bc_load_parmaddr (parmaddr)
10165 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10170 /* Convert a[i] into *(a + i). */
10172 bc_canonicalize_array_ref (exp)
10175 tree type = TREE_TYPE (exp);
10176 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10177 TREE_OPERAND (exp, 0));
10178 tree index = TREE_OPERAND (exp, 1);
10181 /* Convert the integer argument to a type the same size as a pointer
10182 so the multiply won't overflow spuriously. */
10184 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10185 index = convert (type_for_size (POINTER_SIZE, 0), index);
10187 /* The array address isn't volatile even if the array is.
10188 (Of course this isn't terribly relevant since the bytecode
10189 translator treats nearly everything as volatile anyway.) */
10190 TREE_THIS_VOLATILE (array_adr) = 0;
10192 return build1 (INDIRECT_REF, type,
10193 fold (build (PLUS_EXPR,
10194 TYPE_POINTER_TO (type),
10196 fold (build (MULT_EXPR,
10197 TYPE_POINTER_TO (type),
10199 size_in_bytes (type))))));
10203 /* Load the address of the component referenced by the given
10204 COMPONENT_REF expression.
10206 Returns innermost lvalue. */
10209 bc_expand_component_address (exp)
10213 enum machine_mode mode;
10215 HOST_WIDE_INT SIval;
10218 tem = TREE_OPERAND (exp, 1);
10219 mode = DECL_MODE (tem);
10222 /* Compute cumulative bit offset for nested component refs
10223 and array refs, and find the ultimate containing object. */
10225 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10227 if (TREE_CODE (tem) == COMPONENT_REF)
10228 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10230 if (TREE_CODE (tem) == ARRAY_REF
10231 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10232 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10234 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10235 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10236 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10241 bc_expand_expr (tem);
10244 /* For bitfields also push their offset and size */
10245 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10246 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10248 if (SIval = bitpos / BITS_PER_UNIT)
10249 bc_emit_instruction (addconstPSI, SIval);
10251 return (TREE_OPERAND (exp, 1));
10255 /* Emit code to push two SI constants */
10257 bc_push_offset_and_size (offset, size)
10258 HOST_WIDE_INT offset, size;
10260 bc_emit_instruction (constSI, offset);
10261 bc_emit_instruction (constSI, size);
10265 /* Emit byte code to push the address of the given lvalue expression to
10266 the stack. If it's a bit field, we also push offset and size info.
10268 Returns innermost component, which allows us to determine not only
10269 its type, but also whether it's a bitfield. */
10272 bc_expand_address (exp)
10276 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10280 switch (TREE_CODE (exp))
10284 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10286 case COMPONENT_REF:
10288 return (bc_expand_component_address (exp));
10292 bc_expand_expr (TREE_OPERAND (exp, 0));
10294 /* For variable-sized types: retrieve pointer. Sometimes the
10295 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10296 also make sure we have an operand, just in case... */
10298 if (TREE_OPERAND (exp, 0)
10299 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10301 bc_emit_instruction (loadP);
10303 /* If packed, also return offset and size */
10304 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10306 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10307 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10309 return (TREE_OPERAND (exp, 0));
10311 case FUNCTION_DECL:
10313 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10314 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10319 bc_load_parmaddr (DECL_RTL (exp));
10321 /* For variable-sized types: retrieve pointer */
10322 if (TYPE_SIZE (TREE_TYPE (exp))
10323 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10324 bc_emit_instruction (loadP);
10326 /* If packed, also return offset and size */
10327 if (DECL_BIT_FIELD (exp))
10328 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10329 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10335 bc_emit_instruction (returnP);
10341 if (BYTECODE_LABEL (DECL_RTL (exp)))
10342 bc_load_externaddr (DECL_RTL (exp));
10345 if (DECL_EXTERNAL (exp))
10346 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10347 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10349 bc_load_localaddr (DECL_RTL (exp));
10351 /* For variable-sized types: retrieve pointer */
10352 if (TYPE_SIZE (TREE_TYPE (exp))
10353 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10354 bc_emit_instruction (loadP);
10356 /* If packed, also return offset and size */
10357 if (DECL_BIT_FIELD (exp))
10358 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10359 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10367 bc_emit_bytecode (constP);
10368 r = output_constant_def (exp);
10369 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10371 #ifdef DEBUG_PRINT_CODE
10372 fputc ('\n', stderr);
10383 /* Most lvalues don't have components. */
10388 /* Emit a type code to be used by the runtime support in handling
10389 parameter passing. The type code consists of the machine mode
10390 plus the minimal alignment shifted left 8 bits. */
10393 bc_runtime_type_code (type)
10398 switch (TREE_CODE (type))
10404 case ENUMERAL_TYPE:
10408 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10420 return build_int_2 (val, 0);
10424 /* Generate constructor label */
10426 bc_gen_constr_label ()
10428 static int label_counter;
10429 static char label[20];
10431 sprintf (label, "*LR%d", label_counter++);
10433 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10437 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10438 expand the constructor data as static data, and push a pointer to it.
10439 The pointer is put in the pointer table and is retrieved by a constP
10440 bytecode instruction. We then loop and store each constructor member in
10441 the corresponding component. Finally, we return the original pointer on
10445 bc_expand_constructor (constr)
10449 HOST_WIDE_INT ptroffs;
10453 /* Literal constructors are handled as constants, whereas
10454 non-literals are evaluated and stored element by element
10455 into the data segment. */
10457 /* Allocate space in proper segment and push pointer to space on stack.
10460 l = bc_gen_constr_label ();
10462 if (TREE_CONSTANT (constr))
10466 bc_emit_const_labeldef (l);
10467 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10473 bc_emit_data_labeldef (l);
10474 bc_output_data_constructor (constr);
10478 /* Add reference to pointer table and recall pointer to stack;
10479 this code is common for both types of constructors: literals
10480 and non-literals. */
10482 ptroffs = bc_define_pointer (l);
10483 bc_emit_instruction (constP, ptroffs);
10485 /* This is all that has to be done if it's a literal. */
10486 if (TREE_CONSTANT (constr))
10490 /* At this point, we have the pointer to the structure on top of the stack.
10491 Generate sequences of store_memory calls for the constructor. */
10493 /* constructor type is structure */
10494 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10498 /* If the constructor has fewer fields than the structure,
10499 clear the whole structure first. */
10501 if (list_length (CONSTRUCTOR_ELTS (constr))
10502 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10504 bc_emit_instruction (duplicate);
10505 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10506 bc_emit_instruction (clearBLK);
10509 /* Store each element of the constructor into the corresponding
10510 field of TARGET. */
10512 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10514 register tree field = TREE_PURPOSE (elt);
10515 register enum machine_mode mode;
10520 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10521 mode = DECL_MODE (field);
10522 unsignedp = TREE_UNSIGNED (field);
10524 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10526 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10527 /* The alignment of TARGET is
10528 at least what its type requires. */
10530 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10531 int_size_in_bytes (TREE_TYPE (constr)));
10536 /* Constructor type is array */
10537 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10541 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10542 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10543 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10544 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10546 /* If the constructor has fewer fields than the structure,
10547 clear the whole structure first. */
10549 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10551 bc_emit_instruction (duplicate);
10552 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10553 bc_emit_instruction (clearBLK);
10557 /* Store each element of the constructor into the corresponding
10558 element of TARGET, determined by counting the elements. */
10560 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10562 elt = TREE_CHAIN (elt), i++)
10564 register enum machine_mode mode;
10569 mode = TYPE_MODE (elttype);
10570 bitsize = GET_MODE_BITSIZE (mode);
10571 unsignedp = TREE_UNSIGNED (elttype);
10573 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10574 /* * TYPE_SIZE_UNIT (elttype) */ );
10576 bc_store_field (elt, bitsize, bitpos, mode,
10577 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10578 /* The alignment of TARGET is
10579 at least what its type requires. */
10581 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10582 int_size_in_bytes (TREE_TYPE (constr)));
10589 /* Store the value of EXP (an expression tree) into member FIELD of
10590 structure at address on stack, which has type TYPE, mode MODE and
10591 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10594 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10595 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10598 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10599 value_mode, unsignedp, align, total_size)
10600 int bitsize, bitpos;
10601 enum machine_mode mode;
10602 tree field, exp, type;
10603 enum machine_mode value_mode;
10609 /* Expand expression and copy pointer */
10610 bc_expand_expr (exp);
10611 bc_emit_instruction (over);
10614 /* If the component is a bit field, we cannot use addressing to access
10615 it. Use bit-field techniques to store in it. */
10617 if (DECL_BIT_FIELD (field))
10619 bc_store_bit_field (bitpos, bitsize, unsignedp);
10623 /* Not bit field */
10625 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10627 /* Advance pointer to the desired member */
10629 bc_emit_instruction (addconstPSI, offset);
10632 bc_store_memory (type, field);
10637 /* Store SI/SU in bitfield */
10639 bc_store_bit_field (offset, size, unsignedp)
10640 int offset, size, unsignedp;
10642 /* Push bitfield offset and size */
10643 bc_push_offset_and_size (offset, size);
10646 bc_emit_instruction (sstoreBI);
10650 /* Load SI/SU from bitfield */
10652 bc_load_bit_field (offset, size, unsignedp)
10653 int offset, size, unsignedp;
10655 /* Push bitfield offset and size */
10656 bc_push_offset_and_size (offset, size);
10658 /* Load: sign-extend if signed, else zero-extend */
10659 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10663 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10664 (adjust stack pointer upwards), negative means add that number of
10665 levels (adjust the stack pointer downwards). Only positive values
10666 normally make sense. */
10669 bc_adjust_stack (nlevels)
10678 bc_emit_instruction (drop);
10681 bc_emit_instruction (drop);
10686 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10687 stack_depth -= nlevels;
10690 #if defined (VALIDATE_STACK_FOR_BC)
10691 VALIDATE_STACK_FOR_BC ();