1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "insn-flags.h"
30 #include "insn-codes.h"
32 #include "insn-config.h"
35 #include "typeclass.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
125 int explicit_inc_from;
132 /* Used to generate bytecodes: keep track of size of local variables,
133 as well as depth of arithmetic stack. (Notice that variables are
134 stored on the machine's stack, not the arithmetic stack.) */
136 extern int local_vars_size;
137 extern int stack_depth;
138 extern int max_stack_depth;
139 extern struct obstack permanent_obstack;
142 static rtx enqueue_insn PROTO((rtx, rtx));
143 static int queued_subexp_p PROTO((rtx));
144 static void init_queue PROTO((void));
145 static void move_by_pieces PROTO((rtx, rtx, int, int));
146 static int move_by_pieces_ninsns PROTO((unsigned int, int));
147 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
148 struct move_by_pieces *));
149 static void store_constructor PROTO((tree, rtx));
150 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
151 enum machine_mode, int, int, int));
152 static int get_inner_unaligned_p PROTO((tree));
153 static tree save_noncopied_parts PROTO((tree, tree));
154 static tree init_noncopied_parts PROTO((tree, tree));
155 static int safe_from_p PROTO((rtx, tree));
156 static int fixed_type_p PROTO((tree));
157 static int get_pointer_alignment PROTO((tree, unsigned));
158 static tree string_constant PROTO((tree, tree *));
159 static tree c_strlen PROTO((tree));
160 static rtx expand_builtin PROTO((tree, rtx, rtx,
161 enum machine_mode, int));
162 static int apply_args_size PROTO((void));
163 static int apply_result_size PROTO((void));
164 static rtx result_vector PROTO((int, rtx));
165 static rtx expand_builtin_apply_args PROTO((void));
166 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
167 static void expand_builtin_return PROTO((rtx));
168 static rtx expand_increment PROTO((tree, int));
169 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
170 tree bc_runtime_type_code PROTO((tree));
171 rtx bc_allocate_local PROTO((int, int));
172 void bc_store_memory PROTO((tree, tree));
173 tree bc_expand_component_address PROTO((tree));
174 tree bc_expand_address PROTO((tree));
175 void bc_expand_constructor PROTO((tree));
176 void bc_adjust_stack PROTO((int));
177 tree bc_canonicalize_array_ref PROTO((tree));
178 void bc_load_memory PROTO((tree, tree));
179 void bc_load_externaddr PROTO((rtx));
180 void bc_load_externaddr_id PROTO((tree, int));
181 void bc_load_localaddr PROTO((rtx));
182 void bc_load_parmaddr PROTO((rtx));
183 static void preexpand_calls PROTO((tree));
184 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
185 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
186 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
187 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
188 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
189 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
190 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
191 static tree defer_cleanups_to PROTO((tree));
192 extern void (*interim_eh_hook) PROTO((tree));
194 /* Record for each mode whether we can move a register directly to or
195 from an object of that mode in memory. If we can't, we won't try
196 to use that mode directly when accessing a field of that mode. */
198 static char direct_load[NUM_MACHINE_MODES];
199 static char direct_store[NUM_MACHINE_MODES];
201 /* MOVE_RATIO is the number of move instructions that is better than
205 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
208 /* A value of around 6 would minimize code size; infinity would minimize
210 #define MOVE_RATIO 15
214 /* This array records the insn_code of insns to perform block moves. */
215 enum insn_code movstr_optab[NUM_MACHINE_MODES];
217 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
219 #ifndef SLOW_UNALIGNED_ACCESS
220 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
223 /* Register mappings for target machines without register windows. */
224 #ifndef INCOMING_REGNO
225 #define INCOMING_REGNO(OUT) (OUT)
227 #ifndef OUTGOING_REGNO
228 #define OUTGOING_REGNO(IN) (IN)
231 /* Maps used to convert modes to const, load, and store bytecodes. */
232 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
233 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
234 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
236 /* Initialize maps used to convert modes to const, load, and store
239 bc_init_mode_to_opcode_maps ()
243 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
244 mode_to_const_map[mode] =
245 mode_to_load_map[mode] =
246 mode_to_store_map[mode] = neverneverland;
248 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
249 mode_to_const_map[(int) SYM] = CONST; \
250 mode_to_load_map[(int) SYM] = LOAD; \
251 mode_to_store_map[(int) SYM] = STORE;
253 #include "modemap.def"
257 /* This is run once per compilation to set up which modes can be used
258 directly in memory and to initialize the block move optab. */
264 enum machine_mode mode;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
269 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
272 insn = emit_insn (gen_rtx (SET, 0, 0));
273 pat = PATTERN (insn);
275 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
276 mode = (enum machine_mode) ((int) mode + 1))
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
286 /* See if there is some register that can be used in this mode and
287 directly loaded or stored from memory. */
289 if (mode != VOIDmode && mode != BLKmode)
290 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
291 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
294 if (! HARD_REGNO_MODE_OK (regno, mode))
297 reg = gen_rtx (REG, mode, regno);
300 SET_DEST (pat) = reg;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_load[(int) mode] = 1;
304 SET_SRC (pat) = mem1;
305 SET_DEST (pat) = reg;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_load[(int) mode] = 1;
310 SET_DEST (pat) = mem;
311 if (recog (pat, insn, &num_clobbers) >= 0)
312 direct_store[(int) mode] = 1;
315 SET_DEST (pat) = mem1;
316 if (recog (pat, insn, &num_clobbers) >= 0)
317 direct_store[(int) mode] = 1;
324 /* This is run at the start of compiling a function. */
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
333 cleanups_this_call = 0;
335 apply_args_value = 0;
339 /* Save all variables describing the current status into the structure *P.
340 This is used before starting a nested function. */
346 /* Instead of saving the postincrement queue, empty it. */
349 p->pending_stack_adjust = pending_stack_adjust;
350 p->inhibit_defer_pop = inhibit_defer_pop;
351 p->cleanups_this_call = cleanups_this_call;
352 p->saveregs_value = saveregs_value;
353 p->apply_args_value = apply_args_value;
354 p->forced_labels = forced_labels;
356 pending_stack_adjust = 0;
357 inhibit_defer_pop = 0;
358 cleanups_this_call = 0;
360 apply_args_value = 0;
364 /* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
368 restore_expr_status (p)
371 pending_stack_adjust = p->pending_stack_adjust;
372 inhibit_defer_pop = p->inhibit_defer_pop;
373 cleanups_this_call = p->cleanups_this_call;
374 saveregs_value = p->saveregs_value;
375 apply_args_value = p->apply_args_value;
376 forced_labels = p->forced_labels;
379 /* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
382 static rtx pending_chain;
384 /* Queue up to increment (or change) VAR later. BODY says how:
385 BODY should be the same thing you would pass to emit_insn
386 to increment right away. It will go to emit_insn later on.
388 The value is a QUEUED expression to be used in place of VAR
389 where you want to guarantee the pre-incrementation value of VAR. */
392 enqueue_insn (var, body)
395 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
396 var, NULL_RTX, NULL_RTX, body, pending_chain);
397 return pending_chain;
400 /* Use protect_from_queue to convert a QUEUED expression
401 into something that you can put immediately into an instruction.
402 If the queued incrementation has not happened yet,
403 protect_from_queue returns the variable itself.
404 If the incrementation has happened, protect_from_queue returns a temp
405 that contains a copy of the old value of the variable.
407 Any time an rtx which might possibly be a QUEUED is to be put
408 into an instruction, it must be passed through protect_from_queue first.
409 QUEUED expressions are not meaningful in instructions.
411 Do not pass a value through protect_from_queue and then hold
412 on to it for a while before putting it in an instruction!
413 If the queue is flushed in between, incorrect code will result. */
416 protect_from_queue (x, modify)
420 register RTX_CODE code = GET_CODE (x);
422 #if 0 /* A QUEUED can hang around after the queue is forced out. */
423 /* Shortcut for most common case. */
424 if (pending_chain == 0)
430 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
431 use of autoincrement. Make a copy of the contents of the memory
432 location rather than a copy of the address, but not if the value is
433 of mode BLKmode. Don't modify X in place since it might be
435 if (code == MEM && GET_MODE (x) != BLKmode
436 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
438 register rtx y = XEXP (x, 0);
439 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
441 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
442 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
443 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
447 register rtx temp = gen_reg_rtx (GET_MODE (new));
448 emit_insn_before (gen_move_insn (temp, new),
454 /* Otherwise, recursively protect the subexpressions of all
455 the kinds of rtx's that can contain a QUEUED. */
458 rtx tem = protect_from_queue (XEXP (x, 0), 0);
459 if (tem != XEXP (x, 0))
465 else if (code == PLUS || code == MULT)
467 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
468 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
469 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
478 /* If the increment has not happened, use the variable itself. */
479 if (QUEUED_INSN (x) == 0)
480 return QUEUED_VAR (x);
481 /* If the increment has happened and a pre-increment copy exists,
483 if (QUEUED_COPY (x) != 0)
484 return QUEUED_COPY (x);
485 /* The increment has happened but we haven't set up a pre-increment copy.
486 Set one up now, and use it. */
487 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
488 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
490 return QUEUED_COPY (x);
493 /* Return nonzero if X contains a QUEUED expression:
494 if it contains anything that will be altered by a queued increment.
495 We handle only combinations of MEM, PLUS, MINUS and MULT operators
496 since memory addresses generally contain only those. */
502 register enum rtx_code code = GET_CODE (x);
508 return queued_subexp_p (XEXP (x, 0));
512 return queued_subexp_p (XEXP (x, 0))
513 || queued_subexp_p (XEXP (x, 1));
518 /* Perform all the pending incrementations. */
524 while (p = pending_chain)
526 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
527 pending_chain = QUEUED_NEXT (p);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
544 convert_move (to, from, unsignedp)
545 register rtx to, from;
548 enum machine_mode to_mode = GET_MODE (to);
549 enum machine_mode from_mode = GET_MODE (from);
550 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
551 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
555 /* rtx code for making an equivalent value. */
556 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
558 to = protect_from_queue (to, 1);
559 from = protect_from_queue (from, 0);
561 if (to_real != from_real)
564 /* If FROM is a SUBREG that indicates that we have already done at least
565 the required extension, strip it. We don't handle such SUBREGs as
568 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
569 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
570 >= GET_MODE_SIZE (to_mode))
571 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
572 from = gen_lowpart (to_mode, from), from_mode = to_mode;
574 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
577 if (to_mode == from_mode
578 || (from_mode == VOIDmode && CONSTANT_P (from)))
580 emit_move_insn (to, from);
588 #ifdef HAVE_extendqfhf2
589 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
591 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
595 #ifdef HAVE_extendqfsf2
596 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
598 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
602 #ifdef HAVE_extendqfdf2
603 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
605 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
609 #ifdef HAVE_extendqfxf2
610 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
612 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
616 #ifdef HAVE_extendqftf2
617 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
619 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
624 #ifdef HAVE_extendhftqf2
625 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
627 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
632 #ifdef HAVE_extendhfsf2
633 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
635 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
639 #ifdef HAVE_extendhfdf2
640 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
642 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
646 #ifdef HAVE_extendhfxf2
647 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
649 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
653 #ifdef HAVE_extendhftf2
654 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
656 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
661 #ifdef HAVE_extendsfdf2
662 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
664 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
668 #ifdef HAVE_extendsfxf2
669 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
671 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
675 #ifdef HAVE_extendsftf2
676 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
678 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
682 #ifdef HAVE_extenddfxf2
683 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
685 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
689 #ifdef HAVE_extenddftf2
690 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
692 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
697 #ifdef HAVE_trunchfqf2
698 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
700 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncsfqf2
705 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
707 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncdfqf2
712 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
714 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
718 #ifdef HAVE_truncxfqf2
719 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
721 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
725 #ifdef HAVE_trunctfqf2
726 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
728 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
733 #ifdef HAVE_trunctqfhf2
734 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
736 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
740 #ifdef HAVE_truncsfhf2
741 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
743 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncdfhf2
748 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
750 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
754 #ifdef HAVE_truncxfhf2
755 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
757 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
761 #ifdef HAVE_trunctfhf2
762 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
764 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
768 #ifdef HAVE_truncdfsf2
769 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
771 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
775 #ifdef HAVE_truncxfsf2
776 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
778 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
782 #ifdef HAVE_trunctfsf2
783 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
785 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
789 #ifdef HAVE_truncxfdf2
790 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
792 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
796 #ifdef HAVE_trunctfdf2
797 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
799 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
811 libcall = extendsfdf2_libfunc;
815 libcall = extendsfxf2_libfunc;
819 libcall = extendsftf2_libfunc;
828 libcall = truncdfsf2_libfunc;
832 libcall = extenddfxf2_libfunc;
836 libcall = extenddftf2_libfunc;
845 libcall = truncxfsf2_libfunc;
849 libcall = truncxfdf2_libfunc;
858 libcall = trunctfsf2_libfunc;
862 libcall = trunctfdf2_libfunc;
868 if (libcall == (rtx) 0)
869 /* This conversion is not implemented yet. */
872 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
874 emit_move_insn (to, value);
878 /* Now both modes are integers. */
880 /* Handle expanding beyond a word. */
881 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
882 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
889 enum machine_mode lowpart_mode;
890 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
892 /* Try converting directly if the insn is supported. */
893 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
896 /* If FROM is a SUBREG, put it into a register. Do this
897 so that we always generate the same set of insns for
898 better cse'ing; if an intermediate assignment occurred,
899 we won't be doing the operation directly on the SUBREG. */
900 if (optimize > 0 && GET_CODE (from) == SUBREG)
901 from = force_reg (from_mode, from);
902 emit_unop_insn (code, to, from, equiv_code);
905 /* Next, try converting via full word. */
906 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
907 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
908 != CODE_FOR_nothing))
910 if (GET_CODE (to) == REG)
911 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
912 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
913 emit_unop_insn (code, to,
914 gen_lowpart (word_mode, to), equiv_code);
918 /* No special multiword conversion insn; do it by hand. */
921 /* Since we will turn this into a no conflict block, we must ensure
922 that the source does not overlap the target. */
924 if (reg_overlap_mentioned_p (to, from))
925 from = force_reg (from_mode, from);
927 /* Get a copy of FROM widened to a word, if necessary. */
928 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
929 lowpart_mode = word_mode;
931 lowpart_mode = from_mode;
933 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
935 lowpart = gen_lowpart (lowpart_mode, to);
936 emit_move_insn (lowpart, lowfrom);
938 /* Compute the value to put in each remaining word. */
940 fill_value = const0_rtx;
945 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
946 && STORE_FLAG_VALUE == -1)
948 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
950 fill_value = gen_reg_rtx (word_mode);
951 emit_insn (gen_slt (fill_value));
957 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
958 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
960 fill_value = convert_to_mode (word_mode, fill_value, 1);
964 /* Fill the remaining words. */
965 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
967 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
968 rtx subword = operand_subword (to, index, 1, to_mode);
973 if (fill_value != subword)
974 emit_move_insn (subword, fill_value);
977 insns = get_insns ();
980 emit_no_conflict_block (insns, to, from, NULL_RTX,
981 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
985 /* Truncating multi-word to a word or less. */
986 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
987 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
989 if (!((GET_CODE (from) == MEM
990 && ! MEM_VOLATILE_P (from)
991 && direct_load[(int) to_mode]
992 && ! mode_dependent_address_p (XEXP (from, 0)))
993 || GET_CODE (from) == REG
994 || GET_CODE (from) == SUBREG))
995 from = force_reg (from_mode, from);
996 convert_move (to, gen_lowpart (word_mode, from), 0);
1000 /* Handle pointer conversion */ /* SPEE 900220 */
1001 if (to_mode == PSImode)
1003 if (from_mode != SImode)
1004 from = convert_to_mode (SImode, from, unsignedp);
1006 #ifdef HAVE_truncsipsi2
1007 if (HAVE_truncsipsi2)
1009 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1012 #endif /* HAVE_truncsipsi2 */
1016 if (from_mode == PSImode)
1018 if (to_mode != SImode)
1020 from = convert_to_mode (SImode, from, unsignedp);
1025 #ifdef HAVE_extendpsisi2
1026 if (HAVE_extendpsisi2)
1028 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1031 #endif /* HAVE_extendpsisi2 */
1036 if (to_mode == PDImode)
1038 if (from_mode != DImode)
1039 from = convert_to_mode (DImode, from, unsignedp);
1041 #ifdef HAVE_truncdipdi2
1042 if (HAVE_truncdipdi2)
1044 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1047 #endif /* HAVE_truncdipdi2 */
1051 if (from_mode == PDImode)
1053 if (to_mode != DImode)
1055 from = convert_to_mode (DImode, from, unsignedp);
1060 #ifdef HAVE_extendpdidi2
1061 if (HAVE_extendpdidi2)
1063 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1066 #endif /* HAVE_extendpdidi2 */
1071 /* Now follow all the conversions between integers
1072 no more than a word long. */
1074 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1075 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1076 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1077 GET_MODE_BITSIZE (from_mode)))
1079 if (!((GET_CODE (from) == MEM
1080 && ! MEM_VOLATILE_P (from)
1081 && direct_load[(int) to_mode]
1082 && ! mode_dependent_address_p (XEXP (from, 0)))
1083 || GET_CODE (from) == REG
1084 || GET_CODE (from) == SUBREG))
1085 from = force_reg (from_mode, from);
1086 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1087 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1088 from = copy_to_reg (from);
1089 emit_move_insn (to, gen_lowpart (to_mode, from));
1093 /* Handle extension. */
1094 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1096 /* Convert directly if that works. */
1097 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1098 != CODE_FOR_nothing)
1100 emit_unop_insn (code, to, from, equiv_code);
1105 enum machine_mode intermediate;
1107 /* Search for a mode to convert via. */
1108 for (intermediate = from_mode; intermediate != VOIDmode;
1109 intermediate = GET_MODE_WIDER_MODE (intermediate))
1110 if (((can_extend_p (to_mode, intermediate, unsignedp)
1111 != CODE_FOR_nothing)
1112 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1113 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1114 && (can_extend_p (intermediate, from_mode, unsignedp)
1115 != CODE_FOR_nothing))
1117 convert_move (to, convert_to_mode (intermediate, from,
1118 unsignedp), unsignedp);
1122 /* No suitable intermediate mode. */
1127 /* Support special truncate insns for certain modes. */
1129 if (from_mode == DImode && to_mode == SImode)
1131 #ifdef HAVE_truncdisi2
1132 if (HAVE_truncdisi2)
1134 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1138 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 if (from_mode == DImode && to_mode == HImode)
1144 #ifdef HAVE_truncdihi2
1145 if (HAVE_truncdihi2)
1147 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1151 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 if (from_mode == DImode && to_mode == QImode)
1157 #ifdef HAVE_truncdiqi2
1158 if (HAVE_truncdiqi2)
1160 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == SImode && to_mode == HImode)
1170 #ifdef HAVE_truncsihi2
1171 if (HAVE_truncsihi2)
1173 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == SImode && to_mode == QImode)
1183 #ifdef HAVE_truncsiqi2
1184 if (HAVE_truncsiqi2)
1186 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == HImode && to_mode == QImode)
1196 #ifdef HAVE_trunchiqi2
1197 if (HAVE_trunchiqi2)
1199 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == TImode && to_mode == DImode)
1209 #ifdef HAVE_trunctidi2
1210 if (HAVE_trunctidi2)
1212 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == TImode && to_mode == SImode)
1222 #ifdef HAVE_trunctisi2
1223 if (HAVE_trunctisi2)
1225 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == HImode)
1235 #ifdef HAVE_trunctihi2
1236 if (HAVE_trunctihi2)
1238 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 if (from_mode == TImode && to_mode == QImode)
1248 #ifdef HAVE_trunctiqi2
1249 if (HAVE_trunctiqi2)
1251 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1259 /* Handle truncation of volatile memrefs, and so on;
1260 the things that couldn't be truncated directly,
1261 and for which there was no special instruction. */
1262 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1264 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1265 emit_move_insn (to, temp);
1269 /* Mode combination is not recognized. */
1273 /* Return an rtx for a value that would result
1274 from converting X to mode MODE.
1275 Both X and MODE may be floating, or both integer.
1276 UNSIGNEDP is nonzero if X is an unsigned value.
1277 This can be done by referring to a part of X in place
1278 or by copying to a new temporary with conversion.
1280 This function *must not* call protect_from_queue
1281 except when putting X into an insn (in which case convert_move does it). */
1284 convert_to_mode (mode, x, unsignedp)
1285 enum machine_mode mode;
1289 return convert_modes (mode, VOIDmode, x, unsignedp);
1292 /* Return an rtx for a value that would result
1293 from converting X from mode OLDMODE to mode MODE.
1294 Both modes may be floating, or both integer.
1295 UNSIGNEDP is nonzero if X is an unsigned value.
1297 This can be done by referring to a part of X in place
1298 or by copying to a new temporary with conversion.
1300 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1302 This function *must not* call protect_from_queue
1303 except when putting X into an insn (in which case convert_move does it). */
1306 convert_modes (mode, oldmode, x, unsignedp)
1307 enum machine_mode mode, oldmode;
1313 /* If FROM is a SUBREG that indicates that we have already done at least
1314 the required extension, strip it. */
1316 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1317 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1318 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1319 x = gen_lowpart (mode, x);
1321 if (GET_MODE (x) != VOIDmode)
1322 oldmode = GET_MODE (x);
1324 if (mode == oldmode)
1327 /* There is one case that we must handle specially: If we are converting
1328 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1329 we are to interpret the constant as unsigned, gen_lowpart will do
1330 the wrong if the constant appears negative. What we want to do is
1331 make the high-order word of the constant zero, not all ones. */
1333 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1334 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1335 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1336 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1338 /* We can do this with a gen_lowpart if both desired and current modes
1339 are integer, and this is either a constant integer, a register, or a
1340 non-volatile MEM. Except for the constant case where MODE is no
1341 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1343 if ((GET_CODE (x) == CONST_INT
1344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1345 || (GET_MODE_CLASS (mode) == MODE_INT
1346 && GET_MODE_CLASS (oldmode) == MODE_INT
1347 && (GET_CODE (x) == CONST_DOUBLE
1348 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1349 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1350 && direct_load[(int) mode])
1351 || (GET_CODE (x) == REG
1352 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1353 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1355 /* ?? If we don't know OLDMODE, we have to assume here that
1356 X does not need sign- or zero-extension. This may not be
1357 the case, but it's the best we can do. */
1358 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1359 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1361 HOST_WIDE_INT val = INTVAL (x);
1362 int width = GET_MODE_BITSIZE (oldmode);
1364 /* We must sign or zero-extend in this case. Start by
1365 zero-extending, then sign extend if we need to. */
1366 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1368 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1369 val |= (HOST_WIDE_INT) (-1) << width;
1371 return GEN_INT (val);
1374 return gen_lowpart (mode, x);
1377 temp = gen_reg_rtx (mode);
1378 convert_move (temp, x, unsignedp);
1382 /* Generate several move instructions to copy LEN bytes
1383 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1384 The caller must pass FROM and TO
1385 through protect_from_queue before calling.
1386 ALIGN (in bytes) is maximum alignment we can assume. */
1389 move_by_pieces (to, from, len, align)
1393 struct move_by_pieces data;
1394 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1395 int max_size = MOVE_MAX + 1;
1398 data.to_addr = to_addr;
1399 data.from_addr = from_addr;
1403 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1404 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1406 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1407 || GET_CODE (from_addr) == POST_INC
1408 || GET_CODE (from_addr) == POST_DEC);
1410 data.explicit_inc_from = 0;
1411 data.explicit_inc_to = 0;
1413 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1414 if (data.reverse) data.offset = len;
1417 data.to_struct = MEM_IN_STRUCT_P (to);
1418 data.from_struct = MEM_IN_STRUCT_P (from);
1420 /* If copying requires more than two move insns,
1421 copy addresses to registers (to make displacements shorter)
1422 and use post-increment if available. */
1423 if (!(data.autinc_from && data.autinc_to)
1424 && move_by_pieces_ninsns (len, align) > 2)
1426 #ifdef HAVE_PRE_DECREMENT
1427 if (data.reverse && ! data.autinc_from)
1429 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1430 data.autinc_from = 1;
1431 data.explicit_inc_from = -1;
1434 #ifdef HAVE_POST_INCREMENT
1435 if (! data.autinc_from)
1437 data.from_addr = copy_addr_to_reg (from_addr);
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = 1;
1442 if (!data.autinc_from && CONSTANT_P (from_addr))
1443 data.from_addr = copy_addr_to_reg (from_addr);
1444 #ifdef HAVE_PRE_DECREMENT
1445 if (data.reverse && ! data.autinc_to)
1447 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1449 data.explicit_inc_to = -1;
1452 #ifdef HAVE_POST_INCREMENT
1453 if (! data.reverse && ! data.autinc_to)
1455 data.to_addr = copy_addr_to_reg (to_addr);
1457 data.explicit_inc_to = 1;
1460 if (!data.autinc_to && CONSTANT_P (to_addr))
1461 data.to_addr = copy_addr_to_reg (to_addr);
1464 if (! SLOW_UNALIGNED_ACCESS
1465 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1468 /* First move what we can in the largest integer mode, then go to
1469 successively smaller modes. */
1471 while (max_size > 1)
1473 enum machine_mode mode = VOIDmode, tmode;
1474 enum insn_code icode;
1476 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1477 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1478 if (GET_MODE_SIZE (tmode) < max_size)
1481 if (mode == VOIDmode)
1484 icode = mov_optab->handlers[(int) mode].insn_code;
1485 if (icode != CODE_FOR_nothing
1486 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1487 GET_MODE_SIZE (mode)))
1488 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1490 max_size = GET_MODE_SIZE (mode);
1493 /* The code above should have handled everything. */
1498 /* Return number of insns required to move L bytes by pieces.
1499 ALIGN (in bytes) is maximum alignment we can assume. */
1502 move_by_pieces_ninsns (l, align)
1506 register int n_insns = 0;
1507 int max_size = MOVE_MAX + 1;
1509 if (! SLOW_UNALIGNED_ACCESS
1510 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1513 while (max_size > 1)
1515 enum machine_mode mode = VOIDmode, tmode;
1516 enum insn_code icode;
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1523 if (mode == VOIDmode)
1526 icode = mov_optab->handlers[(int) mode].insn_code;
1527 if (icode != CODE_FOR_nothing
1528 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1529 GET_MODE_SIZE (mode)))
1530 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1532 max_size = GET_MODE_SIZE (mode);
1538 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1539 with move instructions for mode MODE. GENFUN is the gen_... function
1540 to make a move insn for that mode. DATA has all the other info. */
1543 move_by_pieces_1 (genfun, mode, data)
1545 enum machine_mode mode;
1546 struct move_by_pieces *data;
1548 register int size = GET_MODE_SIZE (mode);
1549 register rtx to1, from1;
1551 while (data->len >= size)
1553 if (data->reverse) data->offset -= size;
1555 to1 = (data->autinc_to
1556 ? gen_rtx (MEM, mode, data->to_addr)
1557 : change_address (data->to, mode,
1558 plus_constant (data->to_addr, data->offset)));
1559 MEM_IN_STRUCT_P (to1) = data->to_struct;
1562 ? gen_rtx (MEM, mode, data->from_addr)
1563 : change_address (data->from, mode,
1564 plus_constant (data->from_addr, data->offset)));
1565 MEM_IN_STRUCT_P (from1) = data->from_struct;
1567 #ifdef HAVE_PRE_DECREMENT
1568 if (data->explicit_inc_to < 0)
1569 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1570 if (data->explicit_inc_from < 0)
1571 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1574 emit_insn ((*genfun) (to1, from1));
1575 #ifdef HAVE_POST_INCREMENT
1576 if (data->explicit_inc_to > 0)
1577 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1578 if (data->explicit_inc_from > 0)
1579 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1582 if (! data->reverse) data->offset += size;
1588 /* Emit code to move a block Y to a block X.
1589 This may be done with string-move instructions,
1590 with multiple scalar move instructions, or with a library call.
1592 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1594 SIZE is an rtx that says how long they are.
1595 ALIGN is the maximum alignment we can assume they have,
1596 measured in bytes. */
1599 emit_block_move (x, y, size, align)
1604 if (GET_MODE (x) != BLKmode)
1607 if (GET_MODE (y) != BLKmode)
1610 x = protect_from_queue (x, 1);
1611 y = protect_from_queue (y, 0);
1612 size = protect_from_queue (size, 0);
1614 if (GET_CODE (x) != MEM)
1616 if (GET_CODE (y) != MEM)
1621 if (GET_CODE (size) == CONST_INT
1622 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1623 move_by_pieces (x, y, INTVAL (size), align);
1626 /* Try the most limited insn first, because there's no point
1627 including more than one in the machine description unless
1628 the more limited one has some advantage. */
1630 rtx opalign = GEN_INT (align);
1631 enum machine_mode mode;
1633 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1634 mode = GET_MODE_WIDER_MODE (mode))
1636 enum insn_code code = movstr_optab[(int) mode];
1638 if (code != CODE_FOR_nothing
1639 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1640 here because if SIZE is less than the mode mask, as it is
1641 returned by the macro, it will definitely be less than the
1642 actual mode mask. */
1643 && ((GET_CODE (size) == CONST_INT
1644 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1645 <= GET_MODE_MASK (mode)))
1646 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1647 && (insn_operand_predicate[(int) code][0] == 0
1648 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1649 && (insn_operand_predicate[(int) code][1] == 0
1650 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1651 && (insn_operand_predicate[(int) code][3] == 0
1652 || (*insn_operand_predicate[(int) code][3]) (opalign,
1656 rtx last = get_last_insn ();
1659 op2 = convert_to_mode (mode, size, 1);
1660 if (insn_operand_predicate[(int) code][2] != 0
1661 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1662 op2 = copy_to_mode_reg (mode, op2);
1664 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1671 delete_insns_since (last);
1675 #ifdef TARGET_MEM_FUNCTIONS
1676 emit_library_call (memcpy_libfunc, 0,
1677 VOIDmode, 3, XEXP (x, 0), Pmode,
1679 convert_to_mode (TYPE_MODE (sizetype), size,
1680 TREE_UNSIGNED (sizetype)),
1681 TYPE_MODE (sizetype));
1683 emit_library_call (bcopy_libfunc, 0,
1684 VOIDmode, 3, XEXP (y, 0), Pmode,
1686 convert_to_mode (TYPE_MODE (sizetype), size,
1687 TREE_UNSIGNED (sizetype)),
1688 TYPE_MODE (sizetype));
1693 /* Copy all or part of a value X into registers starting at REGNO.
1694 The number of registers to be filled is NREGS. */
1697 move_block_to_reg (regno, x, nregs, mode)
1701 enum machine_mode mode;
1709 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1710 x = validize_mem (force_const_mem (mode, x));
1712 /* See if the machine can do this with a load multiple insn. */
1713 #ifdef HAVE_load_multiple
1714 if (HAVE_load_multiple)
1716 last = get_last_insn ();
1717 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1725 delete_insns_since (last);
1729 for (i = 0; i < nregs; i++)
1730 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1731 operand_subword_force (x, i, mode));
1734 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1735 The number of registers to be filled is NREGS. SIZE indicates the number
1736 of bytes in the object X. */
1740 move_block_from_reg (regno, x, nregs, size)
1749 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1750 to the left before storing to memory. */
1751 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1753 rtx tem = operand_subword (x, 0, 1, BLKmode);
1759 shift = expand_shift (LSHIFT_EXPR, word_mode,
1760 gen_rtx (REG, word_mode, regno),
1761 build_int_2 ((UNITS_PER_WORD - size)
1762 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1763 emit_move_insn (tem, shift);
1767 /* See if the machine can do this with a store multiple insn. */
1768 #ifdef HAVE_store_multiple
1769 if (HAVE_store_multiple)
1771 last = get_last_insn ();
1772 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1780 delete_insns_since (last);
1784 for (i = 0; i < nregs; i++)
1786 rtx tem = operand_subword (x, i, 1, BLKmode);
1791 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1795 /* Add a USE expression for REG to the (possibly empty) list pointed
1796 to by CALL_FUSAGE. REG must denote a hard register. */
1799 use_reg (call_fusage, reg)
1800 rtx *call_fusage, reg;
1802 if (GET_CODE (reg) != REG
1803 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1807 = gen_rtx (EXPR_LIST, VOIDmode,
1808 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1811 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1812 starting at REGNO. All of these registers must be hard registers. */
1815 use_regs (call_fusage, regno, nregs)
1822 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1825 for (i = 0; i < nregs; i++)
1826 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1829 /* Write zeros through the storage of OBJECT.
1830 If OBJECT has BLKmode, SIZE is its length in bytes. */
1833 clear_storage (object, size)
1837 if (GET_MODE (object) == BLKmode)
1839 #ifdef TARGET_MEM_FUNCTIONS
1840 emit_library_call (memset_libfunc, 0,
1842 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1843 GEN_INT (size), ptr_mode);
1845 emit_library_call (bzero_libfunc, 0,
1847 XEXP (object, 0), Pmode,
1848 GEN_INT (size), ptr_mode);
1852 emit_move_insn (object, const0_rtx);
1855 /* Generate code to copy Y into X.
1856 Both Y and X must have the same mode, except that
1857 Y can be a constant with VOIDmode.
1858 This mode cannot be BLKmode; use emit_block_move for that.
1860 Return the last instruction emitted. */
1863 emit_move_insn (x, y)
1866 enum machine_mode mode = GET_MODE (x);
1868 x = protect_from_queue (x, 1);
1869 y = protect_from_queue (y, 0);
1871 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1874 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1875 y = force_const_mem (mode, y);
1877 /* If X or Y are memory references, verify that their addresses are valid
1879 if (GET_CODE (x) == MEM
1880 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1881 && ! push_operand (x, GET_MODE (x)))
1883 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1884 x = change_address (x, VOIDmode, XEXP (x, 0));
1886 if (GET_CODE (y) == MEM
1887 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1889 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1890 y = change_address (y, VOIDmode, XEXP (y, 0));
1892 if (mode == BLKmode)
1895 return emit_move_insn_1 (x, y);
1898 /* Low level part of emit_move_insn.
1899 Called just like emit_move_insn, but assumes X and Y
1900 are basically valid. */
1903 emit_move_insn_1 (x, y)
1906 enum machine_mode mode = GET_MODE (x);
1907 enum machine_mode submode;
1908 enum mode_class class = GET_MODE_CLASS (mode);
1911 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1913 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1915 /* Expand complex moves by moving real part and imag part, if possible. */
1916 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1917 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1919 (class == MODE_COMPLEX_INT
1920 ? MODE_INT : MODE_FLOAT),
1922 && (mov_optab->handlers[(int) submode].insn_code
1923 != CODE_FOR_nothing))
1925 /* Don't split destination if it is a stack push. */
1926 int stack = push_operand (x, GET_MODE (x));
1929 /* If this is a stack, push the highpart first, so it
1930 will be in the argument order.
1932 In that case, change_address is used only to convert
1933 the mode, not to change the address. */
1936 /* Note that the real part always precedes the imag part in memory
1937 regardless of machine's endianness. */
1938 #ifdef STACK_GROWS_DOWNWARD
1939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1940 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1941 gen_imagpart (submode, y)));
1942 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1943 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1944 gen_realpart (submode, y)));
1946 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1947 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1948 gen_realpart (submode, y)));
1949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1950 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1951 gen_imagpart (submode, y)));
1956 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1957 (gen_realpart (submode, x), gen_realpart (submode, y)));
1958 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1959 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1962 return get_last_insn ();
1965 /* This will handle any multi-word mode that lacks a move_insn pattern.
1966 However, you will get better code if you define such patterns,
1967 even if they must turn into multiple assembler instructions. */
1968 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1973 #ifdef PUSH_ROUNDING
1975 /* If X is a push on the stack, do the push now and replace
1976 X with a reference to the stack pointer. */
1977 if (push_operand (x, GET_MODE (x)))
1979 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1980 x = change_address (x, VOIDmode, stack_pointer_rtx);
1985 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1988 rtx xpart = operand_subword (x, i, 1, mode);
1989 rtx ypart = operand_subword (y, i, 1, mode);
1991 /* If we can't get a part of Y, put Y into memory if it is a
1992 constant. Otherwise, force it into a register. If we still
1993 can't get a part of Y, abort. */
1994 if (ypart == 0 && CONSTANT_P (y))
1996 y = force_const_mem (mode, y);
1997 ypart = operand_subword (y, i, 1, mode);
1999 else if (ypart == 0)
2000 ypart = operand_subword_force (y, i, mode);
2002 if (xpart == 0 || ypart == 0)
2005 last_insn = emit_move_insn (xpart, ypart);
2014 /* Pushing data onto the stack. */
2016 /* Push a block of length SIZE (perhaps variable)
2017 and return an rtx to address the beginning of the block.
2018 Note that it is not possible for the value returned to be a QUEUED.
2019 The value may be virtual_outgoing_args_rtx.
2021 EXTRA is the number of bytes of padding to push in addition to SIZE.
2022 BELOW nonzero means this padding comes at low addresses;
2023 otherwise, the padding comes at high addresses. */
2026 push_block (size, extra, below)
2032 size = convert_modes (Pmode, ptr_mode, size, 1);
2033 if (CONSTANT_P (size))
2034 anti_adjust_stack (plus_constant (size, extra));
2035 else if (GET_CODE (size) == REG && extra == 0)
2036 anti_adjust_stack (size);
2039 rtx temp = copy_to_mode_reg (Pmode, size);
2041 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2042 temp, 0, OPTAB_LIB_WIDEN);
2043 anti_adjust_stack (temp);
2046 #ifdef STACK_GROWS_DOWNWARD
2047 temp = virtual_outgoing_args_rtx;
2048 if (extra != 0 && below)
2049 temp = plus_constant (temp, extra);
2051 if (GET_CODE (size) == CONST_INT)
2052 temp = plus_constant (virtual_outgoing_args_rtx,
2053 - INTVAL (size) - (below ? 0 : extra));
2054 else if (extra != 0 && !below)
2055 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2056 negate_rtx (Pmode, plus_constant (size, extra)));
2058 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2059 negate_rtx (Pmode, size));
2062 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2068 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2071 /* Generate code to push X onto the stack, assuming it has mode MODE and
2073 MODE is redundant except when X is a CONST_INT (since they don't
2075 SIZE is an rtx for the size of data to be copied (in bytes),
2076 needed only if X is BLKmode.
2078 ALIGN (in bytes) is maximum alignment we can assume.
2080 If PARTIAL and REG are both nonzero, then copy that many of the first
2081 words of X into registers starting with REG, and push the rest of X.
2082 The amount of space pushed is decreased by PARTIAL words,
2083 rounded *down* to a multiple of PARM_BOUNDARY.
2084 REG must be a hard register in this case.
2085 If REG is zero but PARTIAL is not, take any all others actions for an
2086 argument partially in registers, but do not actually load any
2089 EXTRA is the amount in bytes of extra space to leave next to this arg.
2090 This is ignored if an argument block has already been allocated.
2092 On a machine that lacks real push insns, ARGS_ADDR is the address of
2093 the bottom of the argument block for this call. We use indexing off there
2094 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2095 argument block has not been preallocated.
2097 ARGS_SO_FAR is the size of args previously pushed for this call. */
2100 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2101 args_addr, args_so_far)
2103 enum machine_mode mode;
2114 enum direction stack_direction
2115 #ifdef STACK_GROWS_DOWNWARD
2121 /* Decide where to pad the argument: `downward' for below,
2122 `upward' for above, or `none' for don't pad it.
2123 Default is below for small data on big-endian machines; else above. */
2124 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2126 /* Invert direction if stack is post-update. */
2127 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2128 if (where_pad != none)
2129 where_pad = (where_pad == downward ? upward : downward);
2131 xinner = x = protect_from_queue (x, 0);
2133 if (mode == BLKmode)
2135 /* Copy a block into the stack, entirely or partially. */
2138 int used = partial * UNITS_PER_WORD;
2139 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2147 /* USED is now the # of bytes we need not copy to the stack
2148 because registers will take care of them. */
2151 xinner = change_address (xinner, BLKmode,
2152 plus_constant (XEXP (xinner, 0), used));
2154 /* If the partial register-part of the arg counts in its stack size,
2155 skip the part of stack space corresponding to the registers.
2156 Otherwise, start copying to the beginning of the stack space,
2157 by setting SKIP to 0. */
2158 #ifndef REG_PARM_STACK_SPACE
2164 #ifdef PUSH_ROUNDING
2165 /* Do it with several push insns if that doesn't take lots of insns
2166 and if there is no difficulty with push insns that skip bytes
2167 on the stack for alignment purposes. */
2169 && GET_CODE (size) == CONST_INT
2171 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2173 /* Here we avoid the case of a structure whose weak alignment
2174 forces many pushes of a small amount of data,
2175 and such small pushes do rounding that causes trouble. */
2176 && ((! SLOW_UNALIGNED_ACCESS)
2177 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2178 || PUSH_ROUNDING (align) == align)
2179 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2181 /* Push padding now if padding above and stack grows down,
2182 or if padding below and stack grows up.
2183 But if space already allocated, this has already been done. */
2184 if (extra && args_addr == 0
2185 && where_pad != none && where_pad != stack_direction)
2186 anti_adjust_stack (GEN_INT (extra));
2188 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2189 INTVAL (size) - used, align);
2192 #endif /* PUSH_ROUNDING */
2194 /* Otherwise make space on the stack and copy the data
2195 to the address of that space. */
2197 /* Deduct words put into registers from the size we must copy. */
2200 if (GET_CODE (size) == CONST_INT)
2201 size = GEN_INT (INTVAL (size) - used);
2203 size = expand_binop (GET_MODE (size), sub_optab, size,
2204 GEN_INT (used), NULL_RTX, 0,
2208 /* Get the address of the stack space.
2209 In this case, we do not deal with EXTRA separately.
2210 A single stack adjust will do. */
2213 temp = push_block (size, extra, where_pad == downward);
2216 else if (GET_CODE (args_so_far) == CONST_INT)
2217 temp = memory_address (BLKmode,
2218 plus_constant (args_addr,
2219 skip + INTVAL (args_so_far)));
2221 temp = memory_address (BLKmode,
2222 plus_constant (gen_rtx (PLUS, Pmode,
2223 args_addr, args_so_far),
2226 /* TEMP is the address of the block. Copy the data there. */
2227 if (GET_CODE (size) == CONST_INT
2228 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2231 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2232 INTVAL (size), align);
2235 /* Try the most limited insn first, because there's no point
2236 including more than one in the machine description unless
2237 the more limited one has some advantage. */
2238 #ifdef HAVE_movstrqi
2240 && GET_CODE (size) == CONST_INT
2241 && ((unsigned) INTVAL (size)
2242 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2244 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2245 xinner, size, GEN_INT (align));
2253 #ifdef HAVE_movstrhi
2255 && GET_CODE (size) == CONST_INT
2256 && ((unsigned) INTVAL (size)
2257 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2259 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2260 xinner, size, GEN_INT (align));
2268 #ifdef HAVE_movstrsi
2271 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2272 xinner, size, GEN_INT (align));
2280 #ifdef HAVE_movstrdi
2283 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2284 xinner, size, GEN_INT (align));
2293 #ifndef ACCUMULATE_OUTGOING_ARGS
2294 /* If the source is referenced relative to the stack pointer,
2295 copy it to another register to stabilize it. We do not need
2296 to do this if we know that we won't be changing sp. */
2298 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2299 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2300 temp = copy_to_reg (temp);
2303 /* Make inhibit_defer_pop nonzero around the library call
2304 to force it to pop the bcopy-arguments right away. */
2306 #ifdef TARGET_MEM_FUNCTIONS
2307 emit_library_call (memcpy_libfunc, 0,
2308 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2309 convert_to_mode (TYPE_MODE (sizetype),
2310 size, TREE_UNSIGNED (sizetype)),
2311 TYPE_MODE (sizetype));
2313 emit_library_call (bcopy_libfunc, 0,
2314 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2315 convert_to_mode (TYPE_MODE (sizetype),
2316 size, TREE_UNSIGNED (sizetype)),
2317 TYPE_MODE (sizetype));
2322 else if (partial > 0)
2324 /* Scalar partly in registers. */
2326 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2329 /* # words of start of argument
2330 that we must make space for but need not store. */
2331 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2332 int args_offset = INTVAL (args_so_far);
2335 /* Push padding now if padding above and stack grows down,
2336 or if padding below and stack grows up.
2337 But if space already allocated, this has already been done. */
2338 if (extra && args_addr == 0
2339 && where_pad != none && where_pad != stack_direction)
2340 anti_adjust_stack (GEN_INT (extra));
2342 /* If we make space by pushing it, we might as well push
2343 the real data. Otherwise, we can leave OFFSET nonzero
2344 and leave the space uninitialized. */
2348 /* Now NOT_STACK gets the number of words that we don't need to
2349 allocate on the stack. */
2350 not_stack = partial - offset;
2352 /* If the partial register-part of the arg counts in its stack size,
2353 skip the part of stack space corresponding to the registers.
2354 Otherwise, start copying to the beginning of the stack space,
2355 by setting SKIP to 0. */
2356 #ifndef REG_PARM_STACK_SPACE
2362 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2363 x = validize_mem (force_const_mem (mode, x));
2365 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2366 SUBREGs of such registers are not allowed. */
2367 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2368 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2369 x = copy_to_reg (x);
2371 /* Loop over all the words allocated on the stack for this arg. */
2372 /* We can do it by words, because any scalar bigger than a word
2373 has a size a multiple of a word. */
2374 #ifndef PUSH_ARGS_REVERSED
2375 for (i = not_stack; i < size; i++)
2377 for (i = size - 1; i >= not_stack; i--)
2379 if (i >= not_stack + offset)
2380 emit_push_insn (operand_subword_force (x, i, mode),
2381 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2383 GEN_INT (args_offset + ((i - not_stack + skip)
2384 * UNITS_PER_WORD)));
2390 /* Push padding now if padding above and stack grows down,
2391 or if padding below and stack grows up.
2392 But if space already allocated, this has already been done. */
2393 if (extra && args_addr == 0
2394 && where_pad != none && where_pad != stack_direction)
2395 anti_adjust_stack (GEN_INT (extra));
2397 #ifdef PUSH_ROUNDING
2399 addr = gen_push_operand ();
2402 if (GET_CODE (args_so_far) == CONST_INT)
2404 = memory_address (mode,
2405 plus_constant (args_addr, INTVAL (args_so_far)));
2407 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2410 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2414 /* If part should go in registers, copy that part
2415 into the appropriate registers. Do this now, at the end,
2416 since mem-to-mem copies above may do function calls. */
2417 if (partial > 0 && reg != 0)
2418 move_block_to_reg (REGNO (reg), x, partial, mode);
2420 if (extra && args_addr == 0 && where_pad == stack_direction)
2421 anti_adjust_stack (GEN_INT (extra));
2424 /* Expand an assignment that stores the value of FROM into TO.
2425 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2426 (This may contain a QUEUED rtx;
2427 if the value is constant, this rtx is a constant.)
2428 Otherwise, the returned value is NULL_RTX.
2430 SUGGEST_REG is no longer actually used.
2431 It used to mean, copy the value through a register
2432 and return that register, if that is possible.
2433 We now use WANT_VALUE to decide whether to do this. */
2436 expand_assignment (to, from, want_value, suggest_reg)
2441 register rtx to_rtx = 0;
2444 /* Don't crash if the lhs of the assignment was erroneous. */
2446 if (TREE_CODE (to) == ERROR_MARK)
2448 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2449 return want_value ? result : NULL_RTX;
2452 if (output_bytecode)
2454 tree dest_innermost;
2456 bc_expand_expr (from);
2457 bc_emit_instruction (duplicate);
2459 dest_innermost = bc_expand_address (to);
2461 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2462 take care of it here. */
2464 bc_store_memory (TREE_TYPE (to), dest_innermost);
2468 /* Assignment of a structure component needs special treatment
2469 if the structure component's rtx is not simply a MEM.
2470 Assignment of an array element at a constant index, and assignment of
2471 an array element in an unaligned packed structure field, has the same
2474 if (TREE_CODE (to) == COMPONENT_REF
2475 || TREE_CODE (to) == BIT_FIELD_REF
2476 || (TREE_CODE (to) == ARRAY_REF
2477 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2479 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2481 enum machine_mode mode1;
2491 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2492 &mode1, &unsignedp, &volatilep);
2494 /* If we are going to use store_bit_field and extract_bit_field,
2495 make sure to_rtx will be safe for multiple use. */
2497 if (mode1 == VOIDmode && want_value)
2498 tem = stabilize_reference (tem);
2500 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2501 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2504 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2506 if (GET_CODE (to_rtx) != MEM)
2508 to_rtx = change_address (to_rtx, VOIDmode,
2509 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2510 force_reg (ptr_mode, offset_rtx)));
2511 /* If we have a variable offset, the known alignment
2512 is only that of the innermost structure containing the field.
2513 (Actually, we could sometimes do better by using the
2514 align of an element of the innermost array, but no need.) */
2515 if (TREE_CODE (to) == COMPONENT_REF
2516 || TREE_CODE (to) == BIT_FIELD_REF)
2518 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2522 if (GET_CODE (to_rtx) == MEM)
2524 /* When the offset is zero, to_rtx is the address of the
2525 structure we are storing into, and hence may be shared.
2526 We must make a new MEM before setting the volatile bit. */
2528 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2529 MEM_VOLATILE_P (to_rtx) = 1;
2531 #if 0 /* This was turned off because, when a field is volatile
2532 in an object which is not volatile, the object may be in a register,
2533 and then we would abort over here. */
2539 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2541 /* Spurious cast makes HPUX compiler happy. */
2542 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2545 /* Required alignment of containing datum. */
2547 int_size_in_bytes (TREE_TYPE (tem)));
2548 preserve_temp_slots (result);
2552 /* If the value is meaningful, convert RESULT to the proper mode.
2553 Otherwise, return nothing. */
2554 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2555 TYPE_MODE (TREE_TYPE (from)),
2557 TREE_UNSIGNED (TREE_TYPE (to)))
2561 /* If the rhs is a function call and its value is not an aggregate,
2562 call the function before we start to compute the lhs.
2563 This is needed for correct code for cases such as
2564 val = setjmp (buf) on machines where reference to val
2565 requires loading up part of an address in a separate insn.
2567 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2568 a promoted variable where the zero- or sign- extension needs to be done.
2569 Handling this in the normal way is safe because no computation is done
2571 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2572 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2577 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2579 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2581 if (GET_MODE (to_rtx) == BLKmode)
2583 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2584 emit_block_move (to_rtx, value, expr_size (from), align);
2587 emit_move_insn (to_rtx, value);
2588 preserve_temp_slots (to_rtx);
2591 return want_value ? to_rtx : NULL_RTX;
2594 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2595 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2598 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2600 /* Don't move directly into a return register. */
2601 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2606 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2607 emit_move_insn (to_rtx, temp);
2608 preserve_temp_slots (to_rtx);
2611 return want_value ? to_rtx : NULL_RTX;
2614 /* In case we are returning the contents of an object which overlaps
2615 the place the value is being stored, use a safe function when copying
2616 a value through a pointer into a structure value return block. */
2617 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2618 && current_function_returns_struct
2619 && !current_function_returns_pcc_struct)
2624 size = expr_size (from);
2625 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2627 #ifdef TARGET_MEM_FUNCTIONS
2628 emit_library_call (memcpy_libfunc, 0,
2629 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2630 XEXP (from_rtx, 0), Pmode,
2631 convert_to_mode (TYPE_MODE (sizetype),
2632 size, TREE_UNSIGNED (sizetype)),
2633 TYPE_MODE (sizetype));
2635 emit_library_call (bcopy_libfunc, 0,
2636 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2637 XEXP (to_rtx, 0), Pmode,
2638 convert_to_mode (TYPE_MODE (sizetype),
2639 size, TREE_UNSIGNED (sizetype)),
2640 TYPE_MODE (sizetype));
2643 preserve_temp_slots (to_rtx);
2646 return want_value ? to_rtx : NULL_RTX;
2649 /* Compute FROM and store the value in the rtx we got. */
2652 result = store_expr (from, to_rtx, want_value);
2653 preserve_temp_slots (result);
2656 return want_value ? result : NULL_RTX;
2659 /* Generate code for computing expression EXP,
2660 and storing the value into TARGET.
2661 TARGET may contain a QUEUED rtx.
2663 If WANT_VALUE is nonzero, return a copy of the value
2664 not in TARGET, so that we can be sure to use the proper
2665 value in a containing expression even if TARGET has something
2666 else stored in it. If possible, we copy the value through a pseudo
2667 and return that pseudo. Or, if the value is constant, we try to
2668 return the constant. In some cases, we return a pseudo
2669 copied *from* TARGET.
2671 If the mode is BLKmode then we may return TARGET itself.
2672 It turns out that in BLKmode it doesn't cause a problem.
2673 because C has no operators that could combine two different
2674 assignments into the same BLKmode object with different values
2675 with no sequence point. Will other languages need this to
2678 If WANT_VALUE is 0, we return NULL, to make sure
2679 to catch quickly any cases where the caller uses the value
2680 and fails to set WANT_VALUE. */
2683 store_expr (exp, target, want_value)
2685 register rtx target;
2689 int dont_return_target = 0;
2691 if (TREE_CODE (exp) == COMPOUND_EXPR)
2693 /* Perform first part of compound expression, then assign from second
2695 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2697 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2699 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2701 /* For conditional expression, get safe form of the target. Then
2702 test the condition, doing the appropriate assignment on either
2703 side. This avoids the creation of unnecessary temporaries.
2704 For non-BLKmode, it is more efficient not to do this. */
2706 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2709 target = protect_from_queue (target, 1);
2712 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2713 store_expr (TREE_OPERAND (exp, 1), target, 0);
2715 emit_jump_insn (gen_jump (lab2));
2718 store_expr (TREE_OPERAND (exp, 2), target, 0);
2722 return want_value ? target : NULL_RTX;
2724 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2725 && GET_MODE (target) != BLKmode)
2726 /* If target is in memory and caller wants value in a register instead,
2727 arrange that. Pass TARGET as target for expand_expr so that,
2728 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2729 We know expand_expr will not use the target in that case.
2730 Don't do this if TARGET is volatile because we are supposed
2731 to write it and then read it. */
2733 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2734 GET_MODE (target), 0);
2735 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2736 temp = copy_to_reg (temp);
2737 dont_return_target = 1;
2739 else if (queued_subexp_p (target))
2740 /* If target contains a postincrement, let's not risk
2741 using it as the place to generate the rhs. */
2743 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2745 /* Expand EXP into a new pseudo. */
2746 temp = gen_reg_rtx (GET_MODE (target));
2747 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2750 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2752 /* If target is volatile, ANSI requires accessing the value
2753 *from* the target, if it is accessed. So make that happen.
2754 In no case return the target itself. */
2755 if (! MEM_VOLATILE_P (target) && want_value)
2756 dont_return_target = 1;
2758 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2759 /* If this is an scalar in a register that is stored in a wider mode
2760 than the declared mode, compute the result into its declared mode
2761 and then convert to the wider mode. Our value is the computed
2764 /* If we don't want a value, we can do the conversion inside EXP,
2765 which will often result in some optimizations. Do the conversion
2766 in two steps: first change the signedness, if needed, then
2770 if (TREE_UNSIGNED (TREE_TYPE (exp))
2771 != SUBREG_PROMOTED_UNSIGNED_P (target))
2774 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2778 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2779 SUBREG_PROMOTED_UNSIGNED_P (target)),
2783 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2785 /* If TEMP is a volatile MEM and we want a result value, make
2786 the access now so it gets done only once. */
2787 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2788 temp = copy_to_reg (temp);
2790 /* If TEMP is a VOIDmode constant, use convert_modes to make
2791 sure that we properly convert it. */
2792 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2793 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2794 TYPE_MODE (TREE_TYPE (exp)), temp,
2795 SUBREG_PROMOTED_UNSIGNED_P (target));
2797 convert_move (SUBREG_REG (target), temp,
2798 SUBREG_PROMOTED_UNSIGNED_P (target));
2799 return want_value ? temp : NULL_RTX;
2803 temp = expand_expr (exp, target, GET_MODE (target), 0);
2804 /* Return TARGET if it's a specified hardware register.
2805 If TARGET is a volatile mem ref, either return TARGET
2806 or return a reg copied *from* TARGET; ANSI requires this.
2808 Otherwise, if TEMP is not TARGET, return TEMP
2809 if it is constant (for efficiency),
2810 or if we really want the correct value. */
2811 if (!(target && GET_CODE (target) == REG
2812 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2813 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2815 && (CONSTANT_P (temp) || want_value))
2816 dont_return_target = 1;
2819 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2820 the same as that of TARGET, adjust the constant. This is needed, for
2821 example, in case it is a CONST_DOUBLE and we want only a word-sized
2823 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2824 && TREE_CODE (exp) != ERROR_MARK
2825 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2826 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2827 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2829 /* If value was not generated in the target, store it there.
2830 Convert the value to TARGET's type first if nec. */
2832 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2834 target = protect_from_queue (target, 1);
2835 if (GET_MODE (temp) != GET_MODE (target)
2836 && GET_MODE (temp) != VOIDmode)
2838 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2839 if (dont_return_target)
2841 /* In this case, we will return TEMP,
2842 so make sure it has the proper mode.
2843 But don't forget to store the value into TARGET. */
2844 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2845 emit_move_insn (target, temp);
2848 convert_move (target, temp, unsignedp);
2851 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2853 /* Handle copying a string constant into an array.
2854 The string constant may be shorter than the array.
2855 So copy just the string's actual length, and clear the rest. */
2859 /* Get the size of the data type of the string,
2860 which is actually the size of the target. */
2861 size = expr_size (exp);
2862 if (GET_CODE (size) == CONST_INT
2863 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2864 emit_block_move (target, temp, size,
2865 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2868 /* Compute the size of the data to copy from the string. */
2870 = size_binop (MIN_EXPR,
2871 make_tree (sizetype, size),
2873 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2874 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2878 /* Copy that much. */
2879 emit_block_move (target, temp, copy_size_rtx,
2880 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2882 /* Figure out how much is left in TARGET that we have to clear.
2883 Do all calculations in ptr_mode. */
2885 addr = XEXP (target, 0);
2886 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2888 if (GET_CODE (copy_size_rtx) == CONST_INT)
2890 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2891 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2895 addr = force_reg (ptr_mode, addr);
2896 addr = expand_binop (ptr_mode, add_optab, addr,
2897 copy_size_rtx, NULL_RTX, 0,
2900 size = expand_binop (ptr_mode, sub_optab, size,
2901 copy_size_rtx, NULL_RTX, 0,
2904 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2905 GET_MODE (size), 0, 0);
2906 label = gen_label_rtx ();
2907 emit_jump_insn (gen_blt (label));
2910 if (size != const0_rtx)
2912 #ifdef TARGET_MEM_FUNCTIONS
2913 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2914 Pmode, const0_rtx, Pmode, size, ptr_mode);
2916 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2917 addr, Pmode, size, ptr_mode);
2925 else if (GET_MODE (temp) == BLKmode)
2926 emit_block_move (target, temp, expr_size (exp),
2927 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2929 emit_move_insn (target, temp);
2932 /* If we don't want a value, return NULL_RTX. */
2936 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2937 ??? The latter test doesn't seem to make sense. */
2938 else if (dont_return_target && GET_CODE (temp) != MEM)
2941 /* Return TARGET itself if it is a hard register. */
2942 else if (want_value && GET_MODE (target) != BLKmode
2943 && ! (GET_CODE (target) == REG
2944 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2945 return copy_to_reg (target);
2951 /* Store the value of constructor EXP into the rtx TARGET.
2952 TARGET is either a REG or a MEM. */
2955 store_constructor (exp, target)
2959 tree type = TREE_TYPE (exp);
2961 /* We know our target cannot conflict, since safe_from_p has been called. */
2963 /* Don't try copying piece by piece into a hard register
2964 since that is vulnerable to being clobbered by EXP.
2965 Instead, construct in a pseudo register and then copy it all. */
2966 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2968 rtx temp = gen_reg_rtx (GET_MODE (target));
2969 store_constructor (exp, temp);
2970 emit_move_insn (target, temp);
2975 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2976 || TREE_CODE (type) == QUAL_UNION_TYPE)
2980 /* Inform later passes that the whole union value is dead. */
2981 if (TREE_CODE (type) == UNION_TYPE
2982 || TREE_CODE (type) == QUAL_UNION_TYPE)
2983 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2985 /* If we are building a static constructor into a register,
2986 set the initial value as zero so we can fold the value into
2987 a constant. But if more than one register is involved,
2988 this probably loses. */
2989 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
2990 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
2991 emit_move_insn (target, const0_rtx);
2993 /* If the constructor has fewer fields than the structure,
2994 clear the whole structure first. */
2995 else if (list_length (CONSTRUCTOR_ELTS (exp))
2996 != list_length (TYPE_FIELDS (type)))
2997 clear_storage (target, int_size_in_bytes (type));
2999 /* Inform later passes that the old value is dead. */
3000 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3002 /* Store each element of the constructor into
3003 the corresponding field of TARGET. */
3005 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3007 register tree field = TREE_PURPOSE (elt);
3008 register enum machine_mode mode;
3012 tree pos, constant = 0, offset = 0;
3013 rtx to_rtx = target;
3015 /* Just ignore missing fields.
3016 We cleared the whole structure, above,
3017 if any fields are missing. */
3021 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3022 unsignedp = TREE_UNSIGNED (field);
3023 mode = DECL_MODE (field);
3024 if (DECL_BIT_FIELD (field))
3027 pos = DECL_FIELD_BITPOS (field);
3028 if (TREE_CODE (pos) == INTEGER_CST)
3030 else if (TREE_CODE (pos) == PLUS_EXPR
3031 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3032 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3037 bitpos = TREE_INT_CST_LOW (constant);
3043 if (contains_placeholder_p (offset))
3044 offset = build (WITH_RECORD_EXPR, sizetype,
3047 offset = size_binop (FLOOR_DIV_EXPR, offset,
3048 size_int (BITS_PER_UNIT));
3050 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3051 if (GET_CODE (to_rtx) != MEM)
3055 = change_address (to_rtx, VOIDmode,
3056 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3057 force_reg (ptr_mode, offset_rtx)));
3060 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3061 /* The alignment of TARGET is
3062 at least what its type requires. */
3064 TYPE_ALIGN (type) / BITS_PER_UNIT,
3065 int_size_in_bytes (type));
3068 else if (TREE_CODE (type) == ARRAY_TYPE)
3072 tree domain = TYPE_DOMAIN (type);
3073 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3074 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3075 tree elttype = TREE_TYPE (type);
3077 /* If the constructor has fewer fields than the structure,
3078 clear the whole structure first. Similarly if this this is
3079 static constructor of a non-BLKmode object. */
3081 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3082 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3083 clear_storage (target, int_size_in_bytes (type));
3085 /* Inform later passes that the old value is dead. */
3086 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3088 /* Store each element of the constructor into
3089 the corresponding element of TARGET, determined
3090 by counting the elements. */
3091 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3093 elt = TREE_CHAIN (elt), i++)
3095 register enum machine_mode mode;
3099 tree index = TREE_PURPOSE (elt);
3100 rtx xtarget = target;
3102 mode = TYPE_MODE (elttype);
3103 bitsize = GET_MODE_BITSIZE (mode);
3104 unsignedp = TREE_UNSIGNED (elttype);
3106 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3107 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3109 rtx pos_rtx, addr, xtarget;
3113 index = size_int (i);
3115 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3116 size_int (BITS_PER_UNIT));
3117 position = size_binop (MULT_EXPR, index, position);
3118 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3119 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3120 xtarget = change_address (target, mode, addr);
3121 store_expr (TREE_VALUE (elt), xtarget, 0);
3126 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3127 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3129 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3131 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3132 /* The alignment of TARGET is
3133 at least what its type requires. */
3135 TYPE_ALIGN (type) / BITS_PER_UNIT,
3136 int_size_in_bytes (type));
3140 /* set constructor assignments */
3141 else if (TREE_CODE (type) == SET_TYPE)
3144 rtx xtarget = XEXP (target, 0);
3145 int set_word_size = TYPE_ALIGN (type);
3146 int nbytes = int_size_in_bytes (type);
3147 tree non_const_elements;
3148 int need_to_clear_first;
3149 tree domain = TYPE_DOMAIN (type);
3150 tree domain_min, domain_max, bitlength;
3152 /* The default implementation stategy is to extract the constant
3153 parts of the constructor, use that to initialize the target,
3154 and then "or" in whatever non-constant ranges we need in addition.
3156 If a large set is all zero or all ones, it is
3157 probably better to set it using memset (if available) or bzero.
3158 Also, if a large set has just a single range, it may also be
3159 better to first clear all the first clear the set (using
3160 bzero/memset), and set the bits we want. */
3162 /* Check for all zeros. */
3163 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3165 clear_storage (target, nbytes);
3172 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3173 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3174 bitlength = size_binop (PLUS_EXPR,
3175 size_binop (MINUS_EXPR, domain_max, domain_min),
3178 /* Check for range all ones, or at most a single range.
3179 (This optimization is only a win for big sets.) */
3180 if (GET_MODE (target) == BLKmode && nbytes > 16
3181 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3183 need_to_clear_first = 1;
3184 non_const_elements = CONSTRUCTOR_ELTS (exp);
3188 int nbits = nbytes * BITS_PER_UNIT;
3189 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3190 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3191 char *bit_buffer = (char*) alloca (nbits);
3192 HOST_WIDE_INT word = 0;
3195 int offset = 0; /* In bytes from beginning of set. */
3196 non_const_elements = get_set_constructor_bits (exp,
3200 if (bit_buffer[ibit])
3202 if (BYTES_BIG_ENDIAN)
3203 word |= (1 << (set_word_size - 1 - bit_pos));
3205 word |= 1 << bit_pos;
3208 if (bit_pos >= set_word_size || ibit == nbits)
3210 rtx datum = GEN_INT (word);
3212 /* The assumption here is that it is safe to use XEXP if
3213 the set is multi-word, but not if it's single-word. */
3214 if (GET_CODE (target) == MEM)
3215 to_rtx = change_address (target, mode,
3216 plus_constant (XEXP (target, 0),
3218 else if (offset == 0)
3222 emit_move_insn (to_rtx, datum);
3227 offset += set_word_size / BITS_PER_UNIT;
3230 need_to_clear_first = 0;
3233 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3235 /* start of range of element or NULL */
3236 tree startbit = TREE_PURPOSE (elt);
3237 /* end of range of element, or element value */
3238 tree endbit = TREE_VALUE (elt);
3239 HOST_WIDE_INT startb, endb;
3240 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3242 bitlength_rtx = expand_expr (bitlength,
3243 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3245 /* handle non-range tuple element like [ expr ] */
3246 if (startbit == NULL_TREE)
3248 startbit = save_expr (endbit);
3251 startbit = convert (sizetype, startbit);
3252 endbit = convert (sizetype, endbit);
3253 if (! integer_zerop (domain_min))
3255 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3256 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3258 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3259 EXPAND_CONST_ADDRESS);
3260 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3261 EXPAND_CONST_ADDRESS);
3265 targetx = assign_stack_temp (GET_MODE (target),
3266 GET_MODE_SIZE (GET_MODE (target)),
3268 emit_move_insn (targetx, target);
3270 else if (GET_CODE (target) == MEM)
3275 #ifdef TARGET_MEM_FUNCTIONS
3276 /* Optimization: If startbit and endbit are
3277 constants divisble by BITS_PER_UNIT,
3278 call memset instead. */
3279 if (TREE_CODE (startbit) == INTEGER_CST
3280 && TREE_CODE (endbit) == INTEGER_CST
3281 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3282 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3285 if (need_to_clear_first
3286 && endb - startb != nbytes * BITS_PER_UNIT)
3287 clear_storage (target, nbytes);
3288 need_to_clear_first = 0;
3289 emit_library_call (memset_libfunc, 0,
3291 plus_constant (XEXP (targetx, 0), startb),
3294 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3300 if (need_to_clear_first)
3302 clear_storage (target, nbytes);
3303 need_to_clear_first = 0;
3305 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3306 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3307 bitlength_rtx, TYPE_MODE (sizetype),
3308 startbit_rtx, TYPE_MODE (sizetype),
3309 endbit_rtx, TYPE_MODE (sizetype));
3312 emit_move_insn (target, targetx);
3320 /* Store the value of EXP (an expression tree)
3321 into a subfield of TARGET which has mode MODE and occupies
3322 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3323 If MODE is VOIDmode, it means that we are storing into a bit-field.
3325 If VALUE_MODE is VOIDmode, return nothing in particular.
3326 UNSIGNEDP is not used in this case.
3328 Otherwise, return an rtx for the value stored. This rtx
3329 has mode VALUE_MODE if that is convenient to do.
3330 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3332 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3333 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3336 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3337 unsignedp, align, total_size)
3339 int bitsize, bitpos;
3340 enum machine_mode mode;
3342 enum machine_mode value_mode;
3347 HOST_WIDE_INT width_mask = 0;
3349 if (bitsize < HOST_BITS_PER_WIDE_INT)
3350 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3352 /* If we are storing into an unaligned field of an aligned union that is
3353 in a register, we may have the mode of TARGET being an integer mode but
3354 MODE == BLKmode. In that case, get an aligned object whose size and
3355 alignment are the same as TARGET and store TARGET into it (we can avoid
3356 the store if the field being stored is the entire width of TARGET). Then
3357 call ourselves recursively to store the field into a BLKmode version of
3358 that object. Finally, load from the object into TARGET. This is not
3359 very efficient in general, but should only be slightly more expensive
3360 than the otherwise-required unaligned accesses. Perhaps this can be
3361 cleaned up later. */
3364 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3366 rtx object = assign_stack_temp (GET_MODE (target),
3367 GET_MODE_SIZE (GET_MODE (target)), 0);
3368 rtx blk_object = copy_rtx (object);
3370 MEM_IN_STRUCT_P (object) = 1;
3371 MEM_IN_STRUCT_P (blk_object) = 1;
3372 PUT_MODE (blk_object, BLKmode);
3374 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3375 emit_move_insn (object, target);
3377 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3380 /* Even though we aren't returning target, we need to
3381 give it the updated value. */
3382 emit_move_insn (target, object);
3387 /* If the structure is in a register or if the component
3388 is a bit field, we cannot use addressing to access it.
3389 Use bit-field techniques or SUBREG to store in it. */
3391 if (mode == VOIDmode
3392 || (mode != BLKmode && ! direct_store[(int) mode])
3393 || GET_CODE (target) == REG
3394 || GET_CODE (target) == SUBREG
3395 /* If the field isn't aligned enough to store as an ordinary memref,
3396 store it as a bit field. */
3397 || (SLOW_UNALIGNED_ACCESS
3398 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3399 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3401 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3403 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3405 if (mode != VOIDmode && mode != BLKmode
3406 && mode != TYPE_MODE (TREE_TYPE (exp)))
3407 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3409 /* Store the value in the bitfield. */
3410 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3411 if (value_mode != VOIDmode)
3413 /* The caller wants an rtx for the value. */
3414 /* If possible, avoid refetching from the bitfield itself. */
3416 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3419 enum machine_mode tmode;
3422 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3423 tmode = GET_MODE (temp);
3424 if (tmode == VOIDmode)
3426 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3427 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3428 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3430 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3431 NULL_RTX, value_mode, 0, align,
3438 rtx addr = XEXP (target, 0);
3441 /* If a value is wanted, it must be the lhs;
3442 so make the address stable for multiple use. */
3444 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3445 && ! CONSTANT_ADDRESS_P (addr)
3446 /* A frame-pointer reference is already stable. */
3447 && ! (GET_CODE (addr) == PLUS
3448 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3449 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3450 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3451 addr = copy_to_reg (addr);
3453 /* Now build a reference to just the desired component. */
3455 to_rtx = change_address (target, mode,
3456 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3457 MEM_IN_STRUCT_P (to_rtx) = 1;
3459 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3463 /* Return true if any object containing the innermost array is an unaligned
3464 packed structure field. */
3467 get_inner_unaligned_p (exp)
3470 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3474 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3476 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3480 else if (TREE_CODE (exp) != ARRAY_REF
3481 && TREE_CODE (exp) != NON_LVALUE_EXPR
3482 && ! ((TREE_CODE (exp) == NOP_EXPR
3483 || TREE_CODE (exp) == CONVERT_EXPR)
3484 && (TYPE_MODE (TREE_TYPE (exp))
3485 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3488 exp = TREE_OPERAND (exp, 0);
3494 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3495 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3496 ARRAY_REFs and find the ultimate containing object, which we return.
3498 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3499 bit position, and *PUNSIGNEDP to the signedness of the field.
3500 If the position of the field is variable, we store a tree
3501 giving the variable offset (in units) in *POFFSET.
3502 This offset is in addition to the bit position.
3503 If the position is not variable, we store 0 in *POFFSET.
3505 If any of the extraction expressions is volatile,
3506 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3508 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3509 is a mode that can be used to access the field. In that case, *PBITSIZE
3512 If the field describes a variable-sized object, *PMODE is set to
3513 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3514 this case, but the address of the object can be found. */
3517 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3518 punsignedp, pvolatilep)
3523 enum machine_mode *pmode;
3527 tree orig_exp = exp;
3529 enum machine_mode mode = VOIDmode;
3530 tree offset = integer_zero_node;
3532 if (TREE_CODE (exp) == COMPONENT_REF)
3534 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3535 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3536 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3537 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3539 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3541 size_tree = TREE_OPERAND (exp, 1);
3542 *punsignedp = TREE_UNSIGNED (exp);
3546 mode = TYPE_MODE (TREE_TYPE (exp));
3547 *pbitsize = GET_MODE_BITSIZE (mode);
3548 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3553 if (TREE_CODE (size_tree) != INTEGER_CST)
3554 mode = BLKmode, *pbitsize = -1;
3556 *pbitsize = TREE_INT_CST_LOW (size_tree);
3559 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3560 and find the ultimate containing object. */
3566 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3568 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3569 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3570 : TREE_OPERAND (exp, 2));
3571 tree constant = integer_zero_node, var = pos;
3573 /* If this field hasn't been filled in yet, don't go
3574 past it. This should only happen when folding expressions
3575 made during type construction. */
3579 /* Assume here that the offset is a multiple of a unit.
3580 If not, there should be an explicitly added constant. */
3581 if (TREE_CODE (pos) == PLUS_EXPR
3582 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3583 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3584 else if (TREE_CODE (pos) == INTEGER_CST)
3585 constant = pos, var = integer_zero_node;
3587 *pbitpos += TREE_INT_CST_LOW (constant);
3590 offset = size_binop (PLUS_EXPR, offset,
3591 size_binop (EXACT_DIV_EXPR, var,
3592 size_int (BITS_PER_UNIT)));
3595 else if (TREE_CODE (exp) == ARRAY_REF)
3597 /* This code is based on the code in case ARRAY_REF in expand_expr
3598 below. We assume here that the size of an array element is
3599 always an integral multiple of BITS_PER_UNIT. */
3601 tree index = TREE_OPERAND (exp, 1);
3602 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3604 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3605 tree index_type = TREE_TYPE (index);
3607 if (! integer_zerop (low_bound))
3608 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3610 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3612 index = convert (type_for_size (POINTER_SIZE, 0), index);
3613 index_type = TREE_TYPE (index);
3616 index = fold (build (MULT_EXPR, index_type, index,
3617 TYPE_SIZE (TREE_TYPE (exp))));
3619 if (TREE_CODE (index) == INTEGER_CST
3620 && TREE_INT_CST_HIGH (index) == 0)
3621 *pbitpos += TREE_INT_CST_LOW (index);
3623 offset = size_binop (PLUS_EXPR, offset,
3624 size_binop (FLOOR_DIV_EXPR, index,
3625 size_int (BITS_PER_UNIT)));
3627 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3628 && ! ((TREE_CODE (exp) == NOP_EXPR
3629 || TREE_CODE (exp) == CONVERT_EXPR)
3630 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3631 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3633 && (TYPE_MODE (TREE_TYPE (exp))
3634 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3637 /* If any reference in the chain is volatile, the effect is volatile. */
3638 if (TREE_THIS_VOLATILE (exp))
3640 exp = TREE_OPERAND (exp, 0);
3643 /* If this was a bit-field, see if there is a mode that allows direct
3644 access in case EXP is in memory. */
3645 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3647 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3648 if (mode == BLKmode)
3652 if (integer_zerop (offset))
3655 if (offset != 0 && contains_placeholder_p (offset))
3656 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3663 /* Given an rtx VALUE that may contain additions and multiplications,
3664 return an equivalent value that just refers to a register or memory.
3665 This is done by generating instructions to perform the arithmetic
3666 and returning a pseudo-register containing the value.
3668 The returned value may be a REG, SUBREG, MEM or constant. */
3671 force_operand (value, target)
3674 register optab binoptab = 0;
3675 /* Use a temporary to force order of execution of calls to
3679 /* Use subtarget as the target for operand 0 of a binary operation. */
3680 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3682 if (GET_CODE (value) == PLUS)
3683 binoptab = add_optab;
3684 else if (GET_CODE (value) == MINUS)
3685 binoptab = sub_optab;
3686 else if (GET_CODE (value) == MULT)
3688 op2 = XEXP (value, 1);
3689 if (!CONSTANT_P (op2)
3690 && !(GET_CODE (op2) == REG && op2 != subtarget))
3692 tmp = force_operand (XEXP (value, 0), subtarget);
3693 return expand_mult (GET_MODE (value), tmp,
3694 force_operand (op2, NULL_RTX),
3700 op2 = XEXP (value, 1);
3701 if (!CONSTANT_P (op2)
3702 && !(GET_CODE (op2) == REG && op2 != subtarget))
3704 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3706 binoptab = add_optab;
3707 op2 = negate_rtx (GET_MODE (value), op2);
3710 /* Check for an addition with OP2 a constant integer and our first
3711 operand a PLUS of a virtual register and something else. In that
3712 case, we want to emit the sum of the virtual register and the
3713 constant first and then add the other value. This allows virtual
3714 register instantiation to simply modify the constant rather than
3715 creating another one around this addition. */
3716 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3717 && GET_CODE (XEXP (value, 0)) == PLUS
3718 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3719 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3720 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3722 rtx temp = expand_binop (GET_MODE (value), binoptab,
3723 XEXP (XEXP (value, 0), 0), op2,
3724 subtarget, 0, OPTAB_LIB_WIDEN);
3725 return expand_binop (GET_MODE (value), binoptab, temp,
3726 force_operand (XEXP (XEXP (value, 0), 1), 0),
3727 target, 0, OPTAB_LIB_WIDEN);
3730 tmp = force_operand (XEXP (value, 0), subtarget);
3731 return expand_binop (GET_MODE (value), binoptab, tmp,
3732 force_operand (op2, NULL_RTX),
3733 target, 0, OPTAB_LIB_WIDEN);
3734 /* We give UNSIGNEDP = 0 to expand_binop
3735 because the only operations we are expanding here are signed ones. */
3740 /* Subroutine of expand_expr:
3741 save the non-copied parts (LIST) of an expr (LHS), and return a list
3742 which can restore these values to their previous values,
3743 should something modify their storage. */
3746 save_noncopied_parts (lhs, list)
3753 for (tail = list; tail; tail = TREE_CHAIN (tail))
3754 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3755 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3758 tree part = TREE_VALUE (tail);
3759 tree part_type = TREE_TYPE (part);
3760 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3761 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3762 int_size_in_bytes (part_type), 0);
3763 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3764 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3765 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3766 parts = tree_cons (to_be_saved,
3767 build (RTL_EXPR, part_type, NULL_TREE,
3770 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3775 /* Subroutine of expand_expr:
3776 record the non-copied parts (LIST) of an expr (LHS), and return a list
3777 which specifies the initial values of these parts. */
3780 init_noncopied_parts (lhs, list)
3787 for (tail = list; tail; tail = TREE_CHAIN (tail))
3788 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3789 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3792 tree part = TREE_VALUE (tail);
3793 tree part_type = TREE_TYPE (part);
3794 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3795 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3800 /* Subroutine of expand_expr: return nonzero iff there is no way that
3801 EXP can reference X, which is being modified. */
3804 safe_from_p (x, exp)
3812 /* If EXP has varying size, we MUST use a target since we currently
3813 have no way of allocating temporaries of variable size. So we
3814 assume here that something at a higher level has prevented a
3815 clash. This is somewhat bogus, but the best we can do. */
3816 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3817 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3820 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3821 find the underlying pseudo. */
3822 if (GET_CODE (x) == SUBREG)
3825 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3829 /* If X is a location in the outgoing argument area, it is always safe. */
3830 if (GET_CODE (x) == MEM
3831 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3832 || (GET_CODE (XEXP (x, 0)) == PLUS
3833 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3836 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3839 exp_rtl = DECL_RTL (exp);
3846 if (TREE_CODE (exp) == TREE_LIST)
3847 return ((TREE_VALUE (exp) == 0
3848 || safe_from_p (x, TREE_VALUE (exp)))
3849 && (TREE_CHAIN (exp) == 0
3850 || safe_from_p (x, TREE_CHAIN (exp))));
3855 return safe_from_p (x, TREE_OPERAND (exp, 0));
3859 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3860 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3864 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3865 the expression. If it is set, we conflict iff we are that rtx or
3866 both are in memory. Otherwise, we check all operands of the
3867 expression recursively. */
3869 switch (TREE_CODE (exp))
3872 return (staticp (TREE_OPERAND (exp, 0))
3873 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3876 if (GET_CODE (x) == MEM)
3881 exp_rtl = CALL_EXPR_RTL (exp);
3884 /* Assume that the call will clobber all hard registers and
3886 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3887 || GET_CODE (x) == MEM)
3894 exp_rtl = RTL_EXPR_RTL (exp);
3896 /* We don't know what this can modify. */
3901 case WITH_CLEANUP_EXPR:
3902 exp_rtl = RTL_EXPR_RTL (exp);
3905 case CLEANUP_POINT_EXPR:
3906 return safe_from_p (x, TREE_OPERAND (exp, 0));
3909 exp_rtl = SAVE_EXPR_RTL (exp);
3913 /* The only operand we look at is operand 1. The rest aren't
3914 part of the expression. */
3915 return safe_from_p (x, TREE_OPERAND (exp, 1));
3917 case METHOD_CALL_EXPR:
3918 /* This takes a rtx argument, but shouldn't appear here. */
3922 /* If we have an rtx, we do not need to scan our operands. */
3926 nops = tree_code_length[(int) TREE_CODE (exp)];
3927 for (i = 0; i < nops; i++)
3928 if (TREE_OPERAND (exp, i) != 0
3929 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3933 /* If we have an rtl, find any enclosed object. Then see if we conflict
3937 if (GET_CODE (exp_rtl) == SUBREG)
3939 exp_rtl = SUBREG_REG (exp_rtl);
3940 if (GET_CODE (exp_rtl) == REG
3941 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3945 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3946 are memory and EXP is not readonly. */
3947 return ! (rtx_equal_p (x, exp_rtl)
3948 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3949 && ! TREE_READONLY (exp)));
3952 /* If we reach here, it is safe. */
3956 /* Subroutine of expand_expr: return nonzero iff EXP is an
3957 expression whose type is statically determinable. */
3963 if (TREE_CODE (exp) == PARM_DECL
3964 || TREE_CODE (exp) == VAR_DECL
3965 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3966 || TREE_CODE (exp) == COMPONENT_REF
3967 || TREE_CODE (exp) == ARRAY_REF)
3972 /* expand_expr: generate code for computing expression EXP.
3973 An rtx for the computed value is returned. The value is never null.
3974 In the case of a void EXP, const0_rtx is returned.
3976 The value may be stored in TARGET if TARGET is nonzero.
3977 TARGET is just a suggestion; callers must assume that
3978 the rtx returned may not be the same as TARGET.
3980 If TARGET is CONST0_RTX, it means that the value will be ignored.
3982 If TMODE is not VOIDmode, it suggests generating the
3983 result in mode TMODE. But this is done only when convenient.
3984 Otherwise, TMODE is ignored and the value generated in its natural mode.
3985 TMODE is just a suggestion; callers must assume that
3986 the rtx returned may not have mode TMODE.
3988 Note that TARGET may have neither TMODE nor MODE. In that case, it
3989 probably will not be used.
3991 If MODIFIER is EXPAND_SUM then when EXP is an addition
3992 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3993 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3994 products as above, or REG or MEM, or constant.
3995 Ordinarily in such cases we would output mul or add instructions
3996 and then return a pseudo reg containing the sum.
3998 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3999 it also marks a label as absolutely required (it can't be dead).
4000 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4001 This is used for outputting expressions used in initializers.
4003 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4004 with a constant address even if that address is not normally legitimate.
4005 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4008 expand_expr (exp, target, tmode, modifier)
4011 enum machine_mode tmode;
4012 enum expand_modifier modifier;
4014 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4015 This is static so it will be accessible to our recursive callees. */
4016 static tree placeholder_list = 0;
4017 register rtx op0, op1, temp;
4018 tree type = TREE_TYPE (exp);
4019 int unsignedp = TREE_UNSIGNED (type);
4020 register enum machine_mode mode = TYPE_MODE (type);
4021 register enum tree_code code = TREE_CODE (exp);
4023 /* Use subtarget as the target for operand 0 of a binary operation. */
4024 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4025 rtx original_target = target;
4026 /* Maybe defer this until sure not doing bytecode? */
4027 int ignore = (target == const0_rtx
4028 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4029 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4030 || code == COND_EXPR)
4031 && TREE_CODE (type) == VOID_TYPE));
4035 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4037 bc_expand_expr (exp);
4041 /* Don't use hard regs as subtargets, because the combiner
4042 can only handle pseudo regs. */
4043 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4045 /* Avoid subtargets inside loops,
4046 since they hide some invariant expressions. */
4047 if (preserve_subexpressions_p ())
4050 /* If we are going to ignore this result, we need only do something
4051 if there is a side-effect somewhere in the expression. If there
4052 is, short-circuit the most common cases here. Note that we must
4053 not call expand_expr with anything but const0_rtx in case this
4054 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4058 if (! TREE_SIDE_EFFECTS (exp))
4061 /* Ensure we reference a volatile object even if value is ignored. */
4062 if (TREE_THIS_VOLATILE (exp)
4063 && TREE_CODE (exp) != FUNCTION_DECL
4064 && mode != VOIDmode && mode != BLKmode)
4066 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4067 if (GET_CODE (temp) == MEM)
4068 temp = copy_to_reg (temp);
4072 if (TREE_CODE_CLASS (code) == '1')
4073 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4074 VOIDmode, modifier);
4075 else if (TREE_CODE_CLASS (code) == '2'
4076 || TREE_CODE_CLASS (code) == '<')
4078 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4079 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4082 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4083 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4084 /* If the second operand has no side effects, just evaluate
4086 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4087 VOIDmode, modifier);
4092 /* If will do cse, generate all results into pseudo registers
4093 since 1) that allows cse to find more things
4094 and 2) otherwise cse could produce an insn the machine
4097 if (! cse_not_expected && mode != BLKmode && target
4098 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4105 tree function = decl_function_context (exp);
4106 /* Handle using a label in a containing function. */
4107 if (function != current_function_decl && function != 0)
4109 struct function *p = find_function_data (function);
4110 /* Allocate in the memory associated with the function
4111 that the label is in. */
4112 push_obstacks (p->function_obstack,
4113 p->function_maybepermanent_obstack);
4115 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4116 label_rtx (exp), p->forced_labels);
4119 else if (modifier == EXPAND_INITIALIZER)
4120 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4121 label_rtx (exp), forced_labels);
4122 temp = gen_rtx (MEM, FUNCTION_MODE,
4123 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4124 if (function != current_function_decl && function != 0)
4125 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4130 if (DECL_RTL (exp) == 0)
4132 error_with_decl (exp, "prior parameter's size depends on `%s'");
4133 return CONST0_RTX (mode);
4136 /* ... fall through ... */
4139 /* If a static var's type was incomplete when the decl was written,
4140 but the type is complete now, lay out the decl now. */
4141 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4142 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4144 push_obstacks_nochange ();
4145 end_temporary_allocation ();
4146 layout_decl (exp, 0);
4147 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4151 /* ... fall through ... */
4155 if (DECL_RTL (exp) == 0)
4158 /* Ensure variable marked as used even if it doesn't go through
4159 a parser. If it hasn't be used yet, write out an external
4161 if (! TREE_USED (exp))
4163 assemble_external (exp);
4164 TREE_USED (exp) = 1;
4167 /* Handle variables inherited from containing functions. */
4168 context = decl_function_context (exp);
4170 /* We treat inline_function_decl as an alias for the current function
4171 because that is the inline function whose vars, types, etc.
4172 are being merged into the current function.
4173 See expand_inline_function. */
4175 if (context != 0 && context != current_function_decl
4176 && context != inline_function_decl
4177 /* If var is static, we don't need a static chain to access it. */
4178 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4179 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4183 /* Mark as non-local and addressable. */
4184 DECL_NONLOCAL (exp) = 1;
4185 mark_addressable (exp);
4186 if (GET_CODE (DECL_RTL (exp)) != MEM)
4188 addr = XEXP (DECL_RTL (exp), 0);
4189 if (GET_CODE (addr) == MEM)
4190 addr = gen_rtx (MEM, Pmode,
4191 fix_lexical_addr (XEXP (addr, 0), exp));
4193 addr = fix_lexical_addr (addr, exp);
4194 return change_address (DECL_RTL (exp), mode, addr);
4197 /* This is the case of an array whose size is to be determined
4198 from its initializer, while the initializer is still being parsed.
4201 if (GET_CODE (DECL_RTL (exp)) == MEM
4202 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4203 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4204 XEXP (DECL_RTL (exp), 0));
4206 /* If DECL_RTL is memory, we are in the normal case and either
4207 the address is not valid or it is not a register and -fforce-addr
4208 is specified, get the address into a register. */
4210 if (GET_CODE (DECL_RTL (exp)) == MEM
4211 && modifier != EXPAND_CONST_ADDRESS
4212 && modifier != EXPAND_SUM
4213 && modifier != EXPAND_INITIALIZER
4214 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4216 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4217 return change_address (DECL_RTL (exp), VOIDmode,
4218 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4220 /* If the mode of DECL_RTL does not match that of the decl, it
4221 must be a promoted value. We return a SUBREG of the wanted mode,
4222 but mark it so that we know that it was already extended. */
4224 if (GET_CODE (DECL_RTL (exp)) == REG
4225 && GET_MODE (DECL_RTL (exp)) != mode)
4227 /* Get the signedness used for this variable. Ensure we get the
4228 same mode we got when the variable was declared. */
4229 if (GET_MODE (DECL_RTL (exp))
4230 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4233 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4234 SUBREG_PROMOTED_VAR_P (temp) = 1;
4235 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4239 return DECL_RTL (exp);
4242 return immed_double_const (TREE_INT_CST_LOW (exp),
4243 TREE_INT_CST_HIGH (exp),
4247 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4250 /* If optimized, generate immediate CONST_DOUBLE
4251 which will be turned into memory by reload if necessary.
4253 We used to force a register so that loop.c could see it. But
4254 this does not allow gen_* patterns to perform optimizations with
4255 the constants. It also produces two insns in cases like "x = 1.0;".
4256 On most machines, floating-point constants are not permitted in
4257 many insns, so we'd end up copying it to a register in any case.
4259 Now, we do the copying in expand_binop, if appropriate. */
4260 return immed_real_const (exp);
4264 if (! TREE_CST_RTL (exp))
4265 output_constant_def (exp);
4267 /* TREE_CST_RTL probably contains a constant address.
4268 On RISC machines where a constant address isn't valid,
4269 make some insns to get that address into a register. */
4270 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4271 && modifier != EXPAND_CONST_ADDRESS
4272 && modifier != EXPAND_INITIALIZER
4273 && modifier != EXPAND_SUM
4274 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4276 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4277 return change_address (TREE_CST_RTL (exp), VOIDmode,
4278 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4279 return TREE_CST_RTL (exp);
4282 context = decl_function_context (exp);
4284 /* We treat inline_function_decl as an alias for the current function
4285 because that is the inline function whose vars, types, etc.
4286 are being merged into the current function.
4287 See expand_inline_function. */
4288 if (context == current_function_decl || context == inline_function_decl)
4291 /* If this is non-local, handle it. */
4294 temp = SAVE_EXPR_RTL (exp);
4295 if (temp && GET_CODE (temp) == REG)
4297 put_var_into_stack (exp);
4298 temp = SAVE_EXPR_RTL (exp);
4300 if (temp == 0 || GET_CODE (temp) != MEM)
4302 return change_address (temp, mode,
4303 fix_lexical_addr (XEXP (temp, 0), exp));
4305 if (SAVE_EXPR_RTL (exp) == 0)
4307 if (mode == BLKmode)
4310 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4311 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4313 else if (mode == VOIDmode)
4316 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4318 SAVE_EXPR_RTL (exp) = temp;
4319 if (!optimize && GET_CODE (temp) == REG)
4320 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4323 /* If the mode of TEMP does not match that of the expression, it
4324 must be a promoted value. We pass store_expr a SUBREG of the
4325 wanted mode but mark it so that we know that it was already
4326 extended. Note that `unsignedp' was modified above in
4329 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4331 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4332 SUBREG_PROMOTED_VAR_P (temp) = 1;
4333 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4336 if (temp == const0_rtx)
4337 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4339 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4342 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4343 must be a promoted value. We return a SUBREG of the wanted mode,
4344 but mark it so that we know that it was already extended. */
4346 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4347 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4349 /* Compute the signedness and make the proper SUBREG. */
4350 promote_mode (type, mode, &unsignedp, 0);
4351 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4352 SUBREG_PROMOTED_VAR_P (temp) = 1;
4353 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4357 return SAVE_EXPR_RTL (exp);
4359 case PLACEHOLDER_EXPR:
4360 /* If there is an object on the head of the placeholder list,
4361 see if some object in it's references is of type TYPE. For
4362 further information, see tree.def. */
4363 if (placeholder_list)
4366 tree old_list = placeholder_list;
4368 for (object = TREE_PURPOSE (placeholder_list);
4369 TREE_TYPE (object) != type
4370 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4371 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4372 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4373 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4374 object = TREE_OPERAND (object, 0))
4377 if (object && TREE_TYPE (object) == type)
4379 /* Expand this object skipping the list entries before
4380 it was found in case it is also a PLACEHOLDER_EXPR.
4381 In that case, we want to translate it using subsequent
4383 placeholder_list = TREE_CHAIN (placeholder_list);
4384 temp = expand_expr (object, original_target, tmode, modifier);
4385 placeholder_list = old_list;
4390 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4393 case WITH_RECORD_EXPR:
4394 /* Put the object on the placeholder list, expand our first operand,
4395 and pop the list. */
4396 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4398 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4400 placeholder_list = TREE_CHAIN (placeholder_list);
4404 expand_exit_loop_if_false (NULL_PTR,
4405 invert_truthvalue (TREE_OPERAND (exp, 0)));
4410 expand_start_loop (1);
4411 expand_expr_stmt (TREE_OPERAND (exp, 0));
4419 tree vars = TREE_OPERAND (exp, 0);
4420 int vars_need_expansion = 0;
4422 /* Need to open a binding contour here because
4423 if there are any cleanups they most be contained here. */
4424 expand_start_bindings (0);
4426 /* Mark the corresponding BLOCK for output in its proper place. */
4427 if (TREE_OPERAND (exp, 2) != 0
4428 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4429 insert_block (TREE_OPERAND (exp, 2));
4431 /* If VARS have not yet been expanded, expand them now. */
4434 if (DECL_RTL (vars) == 0)
4436 vars_need_expansion = 1;
4439 expand_decl_init (vars);
4440 vars = TREE_CHAIN (vars);
4443 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4445 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4451 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4453 emit_insns (RTL_EXPR_SEQUENCE (exp));
4454 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4455 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4456 free_temps_for_rtl_expr (exp);
4457 return RTL_EXPR_RTL (exp);
4460 /* If we don't need the result, just ensure we evaluate any
4465 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4466 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4470 /* All elts simple constants => refer to a constant in memory. But
4471 if this is a non-BLKmode mode, let it store a field at a time
4472 since that should make a CONST_INT or CONST_DOUBLE when we
4473 fold. Likewise, if we have a target we can use, it is best to
4474 store directly into the target unless the type is large enough
4475 that memcpy will be used. If we are making an initializer and
4476 all operands are constant, put it in memory as well. */
4477 else if ((TREE_STATIC (exp)
4478 && ((mode == BLKmode
4479 && ! (target != 0 && safe_from_p (target, exp)))
4480 || TREE_ADDRESSABLE (exp)
4481 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4482 && (move_by_pieces_ninsns
4483 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4484 TYPE_ALIGN (type) / BITS_PER_UNIT)
4486 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4488 rtx constructor = output_constant_def (exp);
4489 if (modifier != EXPAND_CONST_ADDRESS
4490 && modifier != EXPAND_INITIALIZER
4491 && modifier != EXPAND_SUM
4492 && (! memory_address_p (GET_MODE (constructor),
4493 XEXP (constructor, 0))
4495 && GET_CODE (XEXP (constructor, 0)) != REG)))
4496 constructor = change_address (constructor, VOIDmode,
4497 XEXP (constructor, 0));
4503 if (target == 0 || ! safe_from_p (target, exp))
4505 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4506 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4510 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4511 if (AGGREGATE_TYPE_P (type))
4512 MEM_IN_STRUCT_P (target) = 1;
4515 store_constructor (exp, target);
4521 tree exp1 = TREE_OPERAND (exp, 0);
4524 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4525 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4526 This code has the same general effect as simply doing
4527 expand_expr on the save expr, except that the expression PTR
4528 is computed for use as a memory address. This means different
4529 code, suitable for indexing, may be generated. */
4530 if (TREE_CODE (exp1) == SAVE_EXPR
4531 && SAVE_EXPR_RTL (exp1) == 0
4532 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4534 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4535 VOIDmode, EXPAND_SUM);
4536 op0 = memory_address (mode, temp);
4537 op0 = copy_all_regs (op0);
4538 SAVE_EXPR_RTL (exp1) = op0;
4542 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4543 op0 = memory_address (mode, op0);
4546 temp = gen_rtx (MEM, mode, op0);
4547 /* If address was computed by addition,
4548 mark this as an element of an aggregate. */
4549 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4550 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4551 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4552 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4553 || (TREE_CODE (exp1) == ADDR_EXPR
4554 && (exp2 = TREE_OPERAND (exp1, 0))
4555 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4556 MEM_IN_STRUCT_P (temp) = 1;
4557 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4558 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4559 a location is accessed through a pointer to const does not mean
4560 that the value there can never change. */
4561 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4567 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4571 tree array = TREE_OPERAND (exp, 0);
4572 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4573 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4574 tree index = TREE_OPERAND (exp, 1);
4575 tree index_type = TREE_TYPE (index);
4578 if (TREE_CODE (low_bound) != INTEGER_CST
4579 && contains_placeholder_p (low_bound))
4580 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4582 /* Optimize the special-case of a zero lower bound.
4584 We convert the low_bound to sizetype to avoid some problems
4585 with constant folding. (E.g. suppose the lower bound is 1,
4586 and its mode is QI. Without the conversion, (ARRAY
4587 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4588 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4590 But sizetype isn't quite right either (especially if
4591 the lowbound is negative). FIXME */
4593 if (! integer_zerop (low_bound))
4594 index = fold (build (MINUS_EXPR, index_type, index,
4595 convert (sizetype, low_bound)));
4597 if ((TREE_CODE (index) != INTEGER_CST
4598 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4599 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4601 /* Nonconstant array index or nonconstant element size, and
4602 not an array in an unaligned (packed) structure field.
4603 Generate the tree for *(&array+index) and expand that,
4604 except do it in a language-independent way
4605 and don't complain about non-lvalue arrays.
4606 `mark_addressable' should already have been called
4607 for any array for which this case will be reached. */
4609 /* Don't forget the const or volatile flag from the array
4611 tree variant_type = build_type_variant (type,
4612 TREE_READONLY (exp),
4613 TREE_THIS_VOLATILE (exp));
4614 tree array_adr = build1 (ADDR_EXPR,
4615 build_pointer_type (variant_type), array);
4617 tree size = size_in_bytes (type);
4619 /* Convert the integer argument to a type the same size as a
4620 pointer so the multiply won't overflow spuriously. */
4621 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4622 index = convert (type_for_size (POINTER_SIZE, 0), index);
4624 if (TREE_CODE (size) != INTEGER_CST
4625 && contains_placeholder_p (size))
4626 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4628 /* Don't think the address has side effects
4629 just because the array does.
4630 (In some cases the address might have side effects,
4631 and we fail to record that fact here. However, it should not
4632 matter, since expand_expr should not care.) */
4633 TREE_SIDE_EFFECTS (array_adr) = 0;
4635 elt = build1 (INDIRECT_REF, type,
4636 fold (build (PLUS_EXPR,
4637 TYPE_POINTER_TO (variant_type),
4639 fold (build (MULT_EXPR,
4640 TYPE_POINTER_TO (variant_type),
4643 /* Volatility, etc., of new expression is same as old
4645 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4646 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4647 TREE_READONLY (elt) = TREE_READONLY (exp);
4649 return expand_expr (elt, target, tmode, modifier);
4652 /* Fold an expression like: "foo"[2].
4653 This is not done in fold so it won't happen inside &.
4654 Don't fold if this is for wide characters since it's too
4655 difficult to do correctly and this is a very rare case. */
4657 if (TREE_CODE (array) == STRING_CST
4658 && TREE_CODE (index) == INTEGER_CST
4659 && !TREE_INT_CST_HIGH (index)
4660 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4661 && GET_MODE_CLASS (mode) == MODE_INT
4662 && GET_MODE_SIZE (mode) == 1)
4663 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4665 /* If this is a constant index into a constant array,
4666 just get the value from the array. Handle both the cases when
4667 we have an explicit constructor and when our operand is a variable
4668 that was declared const. */
4670 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4672 if (TREE_CODE (index) == INTEGER_CST
4673 && TREE_INT_CST_HIGH (index) == 0)
4675 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4677 i = TREE_INT_CST_LOW (index);
4679 elem = TREE_CHAIN (elem);
4681 return expand_expr (fold (TREE_VALUE (elem)), target,
4686 else if (optimize >= 1
4687 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4688 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4689 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4691 if (TREE_CODE (index) == INTEGER_CST
4692 && TREE_INT_CST_HIGH (index) == 0)
4694 tree init = DECL_INITIAL (array);
4696 i = TREE_INT_CST_LOW (index);
4697 if (TREE_CODE (init) == CONSTRUCTOR)
4699 tree elem = CONSTRUCTOR_ELTS (init);
4702 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4703 elem = TREE_CHAIN (elem);
4705 return expand_expr (fold (TREE_VALUE (elem)), target,
4708 else if (TREE_CODE (init) == STRING_CST
4709 && i < TREE_STRING_LENGTH (init))
4710 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4715 /* Treat array-ref with constant index as a component-ref. */
4719 /* If the operand is a CONSTRUCTOR, we can just extract the
4720 appropriate field if it is present. Don't do this if we have
4721 already written the data since we want to refer to that copy
4722 and varasm.c assumes that's what we'll do. */
4723 if (code != ARRAY_REF
4724 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4725 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4729 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4730 elt = TREE_CHAIN (elt))
4731 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4732 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4736 enum machine_mode mode1;
4741 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4742 &mode1, &unsignedp, &volatilep);
4745 /* If we got back the original object, something is wrong. Perhaps
4746 we are evaluating an expression too early. In any event, don't
4747 infinitely recurse. */
4751 /* In some cases, we will be offsetting OP0's address by a constant.
4752 So get it as a sum, if possible. If we will be using it
4753 directly in an insn, we validate it. */
4754 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4756 /* If this is a constant, put it into a register if it is a
4757 legitimate constant and memory if it isn't. */
4758 if (CONSTANT_P (op0))
4760 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4761 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4762 op0 = force_reg (mode, op0);
4764 op0 = validize_mem (force_const_mem (mode, op0));
4767 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4770 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4772 if (GET_CODE (op0) != MEM)
4774 op0 = change_address (op0, VOIDmode,
4775 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4776 force_reg (ptr_mode, offset_rtx)));
4777 /* If we have a variable offset, the known alignment
4778 is only that of the innermost structure containing the field.
4779 (Actually, we could sometimes do better by using the
4780 size of an element of the innermost array, but no need.) */
4781 if (TREE_CODE (exp) == COMPONENT_REF
4782 || TREE_CODE (exp) == BIT_FIELD_REF)
4783 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4787 /* Don't forget about volatility even if this is a bitfield. */
4788 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4790 op0 = copy_rtx (op0);
4791 MEM_VOLATILE_P (op0) = 1;
4794 /* In cases where an aligned union has an unaligned object
4795 as a field, we might be extracting a BLKmode value from
4796 an integer-mode (e.g., SImode) object. Handle this case
4797 by doing the extract into an object as wide as the field
4798 (which we know to be the width of a basic mode), then
4799 storing into memory, and changing the mode to BLKmode. */
4800 if (mode1 == VOIDmode
4801 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4802 || (modifier != EXPAND_CONST_ADDRESS
4803 && modifier != EXPAND_SUM
4804 && modifier != EXPAND_INITIALIZER
4805 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4806 /* If the field isn't aligned enough to fetch as a memref,
4807 fetch it as a bit field. */
4808 || (SLOW_UNALIGNED_ACCESS
4809 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4810 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4812 enum machine_mode ext_mode = mode;
4814 if (ext_mode == BLKmode)
4815 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4817 if (ext_mode == BLKmode)
4820 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4821 unsignedp, target, ext_mode, ext_mode,
4823 int_size_in_bytes (TREE_TYPE (tem)));
4824 if (mode == BLKmode)
4826 rtx new = assign_stack_temp (ext_mode,
4827 bitsize / BITS_PER_UNIT, 0);
4829 emit_move_insn (new, op0);
4830 op0 = copy_rtx (new);
4831 PUT_MODE (op0, BLKmode);
4832 MEM_IN_STRUCT_P (op0) = 1;
4838 /* Get a reference to just this component. */
4839 if (modifier == EXPAND_CONST_ADDRESS
4840 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4841 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4842 (bitpos / BITS_PER_UNIT)));
4844 op0 = change_address (op0, mode1,
4845 plus_constant (XEXP (op0, 0),
4846 (bitpos / BITS_PER_UNIT)));
4847 MEM_IN_STRUCT_P (op0) = 1;
4848 MEM_VOLATILE_P (op0) |= volatilep;
4849 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4852 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4853 convert_move (target, op0, unsignedp);
4859 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4860 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4861 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4862 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4863 MEM_IN_STRUCT_P (temp) = 1;
4864 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4865 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4866 a location is accessed through a pointer to const does not mean
4867 that the value there can never change. */
4868 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4873 /* Intended for a reference to a buffer of a file-object in Pascal.
4874 But it's not certain that a special tree code will really be
4875 necessary for these. INDIRECT_REF might work for them. */
4881 /* Pascal set IN expression.
4884 rlo = set_low - (set_low%bits_per_word);
4885 the_word = set [ (index - rlo)/bits_per_word ];
4886 bit_index = index % bits_per_word;
4887 bitmask = 1 << bit_index;
4888 return !!(the_word & bitmask); */
4890 tree set = TREE_OPERAND (exp, 0);
4891 tree index = TREE_OPERAND (exp, 1);
4892 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4893 tree set_type = TREE_TYPE (set);
4894 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4895 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4896 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4897 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4898 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4899 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4900 rtx setaddr = XEXP (setval, 0);
4901 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4903 rtx diff, quo, rem, addr, bit, result;
4905 preexpand_calls (exp);
4907 /* If domain is empty, answer is no. Likewise if index is constant
4908 and out of bounds. */
4909 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4910 && TREE_CODE (set_low_bound) == INTEGER_CST
4911 && tree_int_cst_lt (set_high_bound, set_low_bound)
4912 || (TREE_CODE (index) == INTEGER_CST
4913 && TREE_CODE (set_low_bound) == INTEGER_CST
4914 && tree_int_cst_lt (index, set_low_bound))
4915 || (TREE_CODE (set_high_bound) == INTEGER_CST
4916 && TREE_CODE (index) == INTEGER_CST
4917 && tree_int_cst_lt (set_high_bound, index))))
4921 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4923 /* If we get here, we have to generate the code for both cases
4924 (in range and out of range). */
4926 op0 = gen_label_rtx ();
4927 op1 = gen_label_rtx ();
4929 if (! (GET_CODE (index_val) == CONST_INT
4930 && GET_CODE (lo_r) == CONST_INT))
4932 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4933 GET_MODE (index_val), iunsignedp, 0);
4934 emit_jump_insn (gen_blt (op1));
4937 if (! (GET_CODE (index_val) == CONST_INT
4938 && GET_CODE (hi_r) == CONST_INT))
4940 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4941 GET_MODE (index_val), iunsignedp, 0);
4942 emit_jump_insn (gen_bgt (op1));
4945 /* Calculate the element number of bit zero in the first word
4947 if (GET_CODE (lo_r) == CONST_INT)
4948 rlow = GEN_INT (INTVAL (lo_r)
4949 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4951 rlow = expand_binop (index_mode, and_optab, lo_r,
4952 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4953 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4955 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4956 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4958 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4959 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4960 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4961 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4963 addr = memory_address (byte_mode,
4964 expand_binop (index_mode, add_optab, diff,
4965 setaddr, NULL_RTX, iunsignedp,
4968 /* Extract the bit we want to examine */
4969 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4970 gen_rtx (MEM, byte_mode, addr),
4971 make_tree (TREE_TYPE (index), rem),
4973 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4974 GET_MODE (target) == byte_mode ? target : 0,
4975 1, OPTAB_LIB_WIDEN);
4977 if (result != target)
4978 convert_move (target, result, 1);
4980 /* Output the code to handle the out-of-range case. */
4983 emit_move_insn (target, const0_rtx);
4988 case WITH_CLEANUP_EXPR:
4989 if (RTL_EXPR_RTL (exp) == 0)
4992 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4994 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4995 /* That's it for this cleanup. */
4996 TREE_OPERAND (exp, 2) = 0;
4997 (*interim_eh_hook) (NULL_TREE);
4999 return RTL_EXPR_RTL (exp);
5001 case CLEANUP_POINT_EXPR:
5003 extern int temp_slot_level;
5004 tree old_cleanups = cleanups_this_call;
5005 int old_temp_level = target_temp_slot_level;
5007 target_temp_slot_level = temp_slot_level;
5008 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5009 /* If we're going to use this value, load it up now. */
5011 op0 = force_not_mem (op0);
5012 expand_cleanups_to (old_cleanups);
5013 preserve_temp_slots (op0);
5016 target_temp_slot_level = old_temp_level;
5021 /* Check for a built-in function. */
5022 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5023 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5025 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5026 return expand_builtin (exp, target, subtarget, tmode, ignore);
5028 /* If this call was expanded already by preexpand_calls,
5029 just return the result we got. */
5030 if (CALL_EXPR_RTL (exp) != 0)
5031 return CALL_EXPR_RTL (exp);
5033 return expand_call (exp, target, ignore);
5035 case NON_LVALUE_EXPR:
5038 case REFERENCE_EXPR:
5039 if (TREE_CODE (type) == UNION_TYPE)
5041 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5044 if (mode == BLKmode)
5046 if (TYPE_SIZE (type) == 0
5047 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5049 target = assign_stack_temp (BLKmode,
5050 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5051 + BITS_PER_UNIT - 1)
5052 / BITS_PER_UNIT, 0);
5053 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5056 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5059 if (GET_CODE (target) == MEM)
5060 /* Store data into beginning of memory target. */
5061 store_expr (TREE_OPERAND (exp, 0),
5062 change_address (target, TYPE_MODE (valtype), 0), 0);
5064 else if (GET_CODE (target) == REG)
5065 /* Store this field into a union of the proper type. */
5066 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5067 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5069 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5073 /* Return the entire union. */
5077 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5079 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5082 /* If the signedness of the conversion differs and OP0 is
5083 a promoted SUBREG, clear that indication since we now
5084 have to do the proper extension. */
5085 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5086 && GET_CODE (op0) == SUBREG)
5087 SUBREG_PROMOTED_VAR_P (op0) = 0;
5092 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5093 if (GET_MODE (op0) == mode)
5096 /* If OP0 is a constant, just convert it into the proper mode. */
5097 if (CONSTANT_P (op0))
5099 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5100 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5102 if (modifier == EXPAND_INITIALIZER)
5103 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5105 if (flag_force_mem && GET_CODE (op0) == MEM)
5106 op0 = copy_to_reg (op0);
5110 convert_to_mode (mode, op0,
5111 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5113 convert_move (target, op0,
5114 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5118 /* We come here from MINUS_EXPR when the second operand is a constant. */
5120 this_optab = add_optab;
5122 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5123 something else, make sure we add the register to the constant and
5124 then to the other thing. This case can occur during strength
5125 reduction and doing it this way will produce better code if the
5126 frame pointer or argument pointer is eliminated.
5128 fold-const.c will ensure that the constant is always in the inner
5129 PLUS_EXPR, so the only case we need to do anything about is if
5130 sp, ap, or fp is our second argument, in which case we must swap
5131 the innermost first argument and our second argument. */
5133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5134 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5135 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5136 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5137 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5138 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5140 tree t = TREE_OPERAND (exp, 1);
5142 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5143 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5146 /* If the result is to be ptr_mode and we are adding an integer to
5147 something, we might be forming a constant. So try to use
5148 plus_constant. If it produces a sum and we can't accept it,
5149 use force_operand. This allows P = &ARR[const] to generate
5150 efficient code on machines where a SYMBOL_REF is not a valid
5153 If this is an EXPAND_SUM call, always return the sum. */
5154 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5155 || mode == ptr_mode)
5157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5158 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5159 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5161 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5163 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5164 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5165 op1 = force_operand (op1, target);
5169 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5170 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5171 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5175 if (! CONSTANT_P (op0))
5177 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5178 VOIDmode, modifier);
5179 /* Don't go to both_summands if modifier
5180 says it's not right to return a PLUS. */
5181 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5185 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5186 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5187 op0 = force_operand (op0, target);
5192 /* No sense saving up arithmetic to be done
5193 if it's all in the wrong mode to form part of an address.
5194 And force_operand won't know whether to sign-extend or
5196 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5197 || mode != ptr_mode)
5200 preexpand_calls (exp);
5201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5204 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5205 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5208 /* Make sure any term that's a sum with a constant comes last. */
5209 if (GET_CODE (op0) == PLUS
5210 && CONSTANT_P (XEXP (op0, 1)))
5216 /* If adding to a sum including a constant,
5217 associate it to put the constant outside. */
5218 if (GET_CODE (op1) == PLUS
5219 && CONSTANT_P (XEXP (op1, 1)))
5221 rtx constant_term = const0_rtx;
5223 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5226 /* Ensure that MULT comes first if there is one. */
5227 else if (GET_CODE (op0) == MULT)
5228 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5230 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5232 /* Let's also eliminate constants from op0 if possible. */
5233 op0 = eliminate_constant_term (op0, &constant_term);
5235 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5236 their sum should be a constant. Form it into OP1, since the
5237 result we want will then be OP0 + OP1. */
5239 temp = simplify_binary_operation (PLUS, mode, constant_term,
5244 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5247 /* Put a constant term last and put a multiplication first. */
5248 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5249 temp = op1, op1 = op0, op0 = temp;
5251 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5252 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5255 /* For initializers, we are allowed to return a MINUS of two
5256 symbolic constants. Here we handle all cases when both operands
5258 /* Handle difference of two symbolic constants,
5259 for the sake of an initializer. */
5260 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5261 && really_constant_p (TREE_OPERAND (exp, 0))
5262 && really_constant_p (TREE_OPERAND (exp, 1)))
5264 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5265 VOIDmode, modifier);
5266 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5267 VOIDmode, modifier);
5269 /* If the last operand is a CONST_INT, use plus_constant of
5270 the negated constant. Else make the MINUS. */
5271 if (GET_CODE (op1) == CONST_INT)
5272 return plus_constant (op0, - INTVAL (op1));
5274 return gen_rtx (MINUS, mode, op0, op1);
5276 /* Convert A - const to A + (-const). */
5277 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5279 tree negated = fold (build1 (NEGATE_EXPR, type,
5280 TREE_OPERAND (exp, 1)));
5282 /* Deal with the case where we can't negate the constant
5284 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5286 tree newtype = signed_type (type);
5287 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5288 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5289 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5291 if (! TREE_OVERFLOW (newneg))
5292 return expand_expr (convert (type,
5293 build (PLUS_EXPR, newtype,
5295 target, tmode, modifier);
5299 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5303 this_optab = sub_optab;
5307 preexpand_calls (exp);
5308 /* If first operand is constant, swap them.
5309 Thus the following special case checks need only
5310 check the second operand. */
5311 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5313 register tree t1 = TREE_OPERAND (exp, 0);
5314 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5315 TREE_OPERAND (exp, 1) = t1;
5318 /* Attempt to return something suitable for generating an
5319 indexed address, for machines that support that. */
5321 if (modifier == EXPAND_SUM && mode == ptr_mode
5322 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5327 /* Apply distributive law if OP0 is x+c. */
5328 if (GET_CODE (op0) == PLUS
5329 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5330 return gen_rtx (PLUS, mode,
5331 gen_rtx (MULT, mode, XEXP (op0, 0),
5332 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5333 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5334 * INTVAL (XEXP (op0, 1))));
5336 if (GET_CODE (op0) != REG)
5337 op0 = force_operand (op0, NULL_RTX);
5338 if (GET_CODE (op0) != REG)
5339 op0 = copy_to_mode_reg (mode, op0);
5341 return gen_rtx (MULT, mode, op0,
5342 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5345 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5348 /* Check for multiplying things that have been extended
5349 from a narrower type. If this machine supports multiplying
5350 in that narrower type with a result in the desired type,
5351 do it that way, and avoid the explicit type-conversion. */
5352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5353 && TREE_CODE (type) == INTEGER_TYPE
5354 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5355 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5356 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5357 && int_fits_type_p (TREE_OPERAND (exp, 1),
5358 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5359 /* Don't use a widening multiply if a shift will do. */
5360 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5361 > HOST_BITS_PER_WIDE_INT)
5362 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5364 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5365 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5367 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5368 /* If both operands are extended, they must either both
5369 be zero-extended or both be sign-extended. */
5370 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5372 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5374 enum machine_mode innermode
5375 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5376 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5377 ? umul_widen_optab : smul_widen_optab);
5378 if (mode == GET_MODE_WIDER_MODE (innermode)
5379 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5381 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5382 NULL_RTX, VOIDmode, 0);
5383 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5384 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5387 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5388 NULL_RTX, VOIDmode, 0);
5392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5394 return expand_mult (mode, op0, op1, target, unsignedp);
5396 case TRUNC_DIV_EXPR:
5397 case FLOOR_DIV_EXPR:
5399 case ROUND_DIV_EXPR:
5400 case EXACT_DIV_EXPR:
5401 preexpand_calls (exp);
5402 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5404 /* Possible optimization: compute the dividend with EXPAND_SUM
5405 then if the divisor is constant can optimize the case
5406 where some terms of the dividend have coeffs divisible by it. */
5407 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5409 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5412 this_optab = flodiv_optab;
5415 case TRUNC_MOD_EXPR:
5416 case FLOOR_MOD_EXPR:
5418 case ROUND_MOD_EXPR:
5419 preexpand_calls (exp);
5420 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5422 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5423 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5424 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5426 case FIX_ROUND_EXPR:
5427 case FIX_FLOOR_EXPR:
5429 abort (); /* Not used for C. */
5431 case FIX_TRUNC_EXPR:
5432 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5434 target = gen_reg_rtx (mode);
5435 expand_fix (target, op0, unsignedp);
5439 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5441 target = gen_reg_rtx (mode);
5442 /* expand_float can't figure out what to do if FROM has VOIDmode.
5443 So give it the correct mode. With -O, cse will optimize this. */
5444 if (GET_MODE (op0) == VOIDmode)
5445 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5447 expand_float (target, op0,
5448 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5452 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5453 temp = expand_unop (mode, neg_optab, op0, target, 0);
5459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5461 /* Handle complex values specially. */
5462 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5463 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5464 return expand_complex_abs (mode, op0, target, unsignedp);
5466 /* Unsigned abs is simply the operand. Testing here means we don't
5467 risk generating incorrect code below. */
5468 if (TREE_UNSIGNED (type))
5471 return expand_abs (mode, op0, target, unsignedp,
5472 safe_from_p (target, TREE_OPERAND (exp, 0)));
5476 target = original_target;
5477 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5478 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5479 || GET_MODE (target) != mode
5480 || (GET_CODE (target) == REG
5481 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5482 target = gen_reg_rtx (mode);
5483 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5484 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5486 /* First try to do it with a special MIN or MAX instruction.
5487 If that does not win, use a conditional jump to select the proper
5489 this_optab = (TREE_UNSIGNED (type)
5490 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5491 : (code == MIN_EXPR ? smin_optab : smax_optab));
5493 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5498 /* At this point, a MEM target is no longer useful; we will get better
5501 if (GET_CODE (target) == MEM)
5502 target = gen_reg_rtx (mode);
5505 emit_move_insn (target, op0);
5507 op0 = gen_label_rtx ();
5509 /* If this mode is an integer too wide to compare properly,
5510 compare word by word. Rely on cse to optimize constant cases. */
5511 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5513 if (code == MAX_EXPR)
5514 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5515 target, op1, NULL_RTX, op0);
5517 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5518 op1, target, NULL_RTX, op0);
5519 emit_move_insn (target, op1);
5523 if (code == MAX_EXPR)
5524 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5525 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5526 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5528 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5529 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5530 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5531 if (temp == const0_rtx)
5532 emit_move_insn (target, op1);
5533 else if (temp != const_true_rtx)
5535 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5536 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5539 emit_move_insn (target, op1);
5546 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5547 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5553 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5554 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5559 /* ??? Can optimize bitwise operations with one arg constant.
5560 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5561 and (a bitwise1 b) bitwise2 b (etc)
5562 but that is probably not worth while. */
5564 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5565 boolean values when we want in all cases to compute both of them. In
5566 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5567 as actual zero-or-1 values and then bitwise anding. In cases where
5568 there cannot be any side effects, better code would be made by
5569 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5570 how to recognize those cases. */
5572 case TRUTH_AND_EXPR:
5574 this_optab = and_optab;
5579 this_optab = ior_optab;
5582 case TRUTH_XOR_EXPR:
5584 this_optab = xor_optab;
5591 preexpand_calls (exp);
5592 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5594 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5595 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5598 /* Could determine the answer when only additive constants differ. Also,
5599 the addition of one can be handled by changing the condition. */
5606 preexpand_calls (exp);
5607 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5611 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5612 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5614 && GET_CODE (original_target) == REG
5615 && (GET_MODE (original_target)
5616 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5618 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5621 if (temp != original_target)
5622 temp = copy_to_reg (temp);
5624 op1 = gen_label_rtx ();
5625 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5626 GET_MODE (temp), unsignedp, 0);
5627 emit_jump_insn (gen_beq (op1));
5628 emit_move_insn (temp, const1_rtx);
5633 /* If no set-flag instruction, must generate a conditional
5634 store into a temporary variable. Drop through
5635 and handle this like && and ||. */
5637 case TRUTH_ANDIF_EXPR:
5638 case TRUTH_ORIF_EXPR:
5640 && (target == 0 || ! safe_from_p (target, exp)
5641 /* Make sure we don't have a hard reg (such as function's return
5642 value) live across basic blocks, if not optimizing. */
5643 || (!optimize && GET_CODE (target) == REG
5644 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5645 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5648 emit_clr_insn (target);
5650 op1 = gen_label_rtx ();
5651 jumpifnot (exp, op1);
5654 emit_0_to_1_insn (target);
5657 return ignore ? const0_rtx : target;
5659 case TRUTH_NOT_EXPR:
5660 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5661 /* The parser is careful to generate TRUTH_NOT_EXPR
5662 only with operands that are always zero or one. */
5663 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5664 target, 1, OPTAB_LIB_WIDEN);
5670 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5672 return expand_expr (TREE_OPERAND (exp, 1),
5673 (ignore ? const0_rtx : target),
5678 rtx flag = NULL_RTX;
5679 tree left_cleanups = NULL_TREE;
5680 tree right_cleanups = NULL_TREE;
5682 /* Used to save a pointer to the place to put the setting of
5683 the flag that indicates if this side of the conditional was
5684 taken. We backpatch the code, if we find out later that we
5685 have any conditional cleanups that need to be performed. */
5686 rtx dest_right_flag = NULL_RTX;
5687 rtx dest_left_flag = NULL_RTX;
5689 /* Note that COND_EXPRs whose type is a structure or union
5690 are required to be constructed to contain assignments of
5691 a temporary variable, so that we can evaluate them here
5692 for side effect only. If type is void, we must do likewise. */
5694 /* If an arm of the branch requires a cleanup,
5695 only that cleanup is performed. */
5698 tree binary_op = 0, unary_op = 0;
5699 tree old_cleanups = cleanups_this_call;
5701 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5702 convert it to our mode, if necessary. */
5703 if (integer_onep (TREE_OPERAND (exp, 1))
5704 && integer_zerop (TREE_OPERAND (exp, 2))
5705 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5709 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5714 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5715 if (GET_MODE (op0) == mode)
5719 target = gen_reg_rtx (mode);
5720 convert_move (target, op0, unsignedp);
5724 /* If we are not to produce a result, we have no target. Otherwise,
5725 if a target was specified use it; it will not be used as an
5726 intermediate target unless it is safe. If no target, use a
5731 else if (original_target
5732 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5733 && GET_MODE (original_target) == mode
5734 && ! (GET_CODE (original_target) == MEM
5735 && MEM_VOLATILE_P (original_target)))
5736 temp = original_target;
5737 else if (mode == BLKmode)
5739 if (TYPE_SIZE (type) == 0
5740 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5743 temp = assign_stack_temp (BLKmode,
5744 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5745 + BITS_PER_UNIT - 1)
5746 / BITS_PER_UNIT, 0);
5747 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5750 temp = gen_reg_rtx (mode);
5752 /* Check for X ? A + B : A. If we have this, we can copy
5753 A to the output and conditionally add B. Similarly for unary
5754 operations. Don't do this if X has side-effects because
5755 those side effects might affect A or B and the "?" operation is
5756 a sequence point in ANSI. (We test for side effects later.) */
5758 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5759 && operand_equal_p (TREE_OPERAND (exp, 2),
5760 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5761 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5762 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5763 && operand_equal_p (TREE_OPERAND (exp, 1),
5764 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5765 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5766 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5767 && operand_equal_p (TREE_OPERAND (exp, 2),
5768 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5769 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5770 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5771 && operand_equal_p (TREE_OPERAND (exp, 1),
5772 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5773 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5775 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5776 operation, do this as A + (X != 0). Similarly for other simple
5777 binary operators. */
5778 if (temp && singleton && binary_op
5779 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5780 && (TREE_CODE (binary_op) == PLUS_EXPR
5781 || TREE_CODE (binary_op) == MINUS_EXPR
5782 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5783 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5784 && integer_onep (TREE_OPERAND (binary_op, 1))
5785 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5788 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5789 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5790 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5793 /* If we had X ? A : A + 1, do this as A + (X == 0).
5795 We have to invert the truth value here and then put it
5796 back later if do_store_flag fails. We cannot simply copy
5797 TREE_OPERAND (exp, 0) to another variable and modify that
5798 because invert_truthvalue can modify the tree pointed to
5800 if (singleton == TREE_OPERAND (exp, 1))
5801 TREE_OPERAND (exp, 0)
5802 = invert_truthvalue (TREE_OPERAND (exp, 0));
5804 result = do_store_flag (TREE_OPERAND (exp, 0),
5805 (safe_from_p (temp, singleton)
5807 mode, BRANCH_COST <= 1);
5811 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5812 return expand_binop (mode, boptab, op1, result, temp,
5813 unsignedp, OPTAB_LIB_WIDEN);
5815 else if (singleton == TREE_OPERAND (exp, 1))
5816 TREE_OPERAND (exp, 0)
5817 = invert_truthvalue (TREE_OPERAND (exp, 0));
5821 op0 = gen_label_rtx ();
5823 flag = gen_reg_rtx (word_mode);
5824 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5828 /* If the target conflicts with the other operand of the
5829 binary op, we can't use it. Also, we can't use the target
5830 if it is a hard register, because evaluating the condition
5831 might clobber it. */
5833 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5834 || (GET_CODE (temp) == REG
5835 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5836 temp = gen_reg_rtx (mode);
5837 store_expr (singleton, temp, 0);
5840 expand_expr (singleton,
5841 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5842 dest_left_flag = get_last_insn ();
5843 if (singleton == TREE_OPERAND (exp, 1))
5844 jumpif (TREE_OPERAND (exp, 0), op0);
5846 jumpifnot (TREE_OPERAND (exp, 0), op0);
5848 /* Allows cleanups up to here. */
5849 old_cleanups = cleanups_this_call;
5850 if (binary_op && temp == 0)
5851 /* Just touch the other operand. */
5852 expand_expr (TREE_OPERAND (binary_op, 1),
5853 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5855 store_expr (build (TREE_CODE (binary_op), type,
5856 make_tree (type, temp),
5857 TREE_OPERAND (binary_op, 1)),
5860 store_expr (build1 (TREE_CODE (unary_op), type,
5861 make_tree (type, temp)),
5864 dest_right_flag = get_last_insn ();
5867 /* This is now done in jump.c and is better done there because it
5868 produces shorter register lifetimes. */
5870 /* Check for both possibilities either constants or variables
5871 in registers (but not the same as the target!). If so, can
5872 save branches by assigning one, branching, and assigning the
5874 else if (temp && GET_MODE (temp) != BLKmode
5875 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5876 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5877 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5878 && DECL_RTL (TREE_OPERAND (exp, 1))
5879 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5880 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5881 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5882 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5883 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5884 && DECL_RTL (TREE_OPERAND (exp, 2))
5885 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5886 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5888 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5889 temp = gen_reg_rtx (mode);
5890 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5891 dest_left_flag = get_last_insn ();
5892 jumpifnot (TREE_OPERAND (exp, 0), op0);
5894 /* Allows cleanups up to here. */
5895 old_cleanups = cleanups_this_call;
5896 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5898 dest_right_flag = get_last_insn ();
5901 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5902 comparison operator. If we have one of these cases, set the
5903 output to A, branch on A (cse will merge these two references),
5904 then set the output to FOO. */
5906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5907 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5909 TREE_OPERAND (exp, 1), 0)
5910 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5911 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5913 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5914 temp = gen_reg_rtx (mode);
5915 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5916 dest_left_flag = get_last_insn ();
5917 jumpif (TREE_OPERAND (exp, 0), op0);
5919 /* Allows cleanups up to here. */
5920 old_cleanups = cleanups_this_call;
5921 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5923 dest_right_flag = get_last_insn ();
5926 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5927 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5928 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5929 TREE_OPERAND (exp, 2), 0)
5930 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5931 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5933 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5934 temp = gen_reg_rtx (mode);
5935 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5936 dest_left_flag = get_last_insn ();
5937 jumpifnot (TREE_OPERAND (exp, 0), op0);
5939 /* Allows cleanups up to here. */
5940 old_cleanups = cleanups_this_call;
5941 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5943 dest_right_flag = get_last_insn ();
5947 op1 = gen_label_rtx ();
5948 jumpifnot (TREE_OPERAND (exp, 0), op0);
5950 /* Allows cleanups up to here. */
5951 old_cleanups = cleanups_this_call;
5953 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5955 expand_expr (TREE_OPERAND (exp, 1),
5956 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5957 dest_left_flag = get_last_insn ();
5959 /* Handle conditional cleanups, if any. */
5960 left_cleanups = defer_cleanups_to (old_cleanups);
5963 emit_jump_insn (gen_jump (op1));
5967 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5969 expand_expr (TREE_OPERAND (exp, 2),
5970 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5971 dest_right_flag = get_last_insn ();
5974 /* Handle conditional cleanups, if any. */
5975 right_cleanups = defer_cleanups_to (old_cleanups);
5981 /* Add back in, any conditional cleanups. */
5982 if (left_cleanups || right_cleanups)
5988 /* Now that we know that a flag is needed, go back and add in the
5989 setting of the flag. */
5991 /* Do the left side flag. */
5992 last = get_last_insn ();
5993 /* Flag left cleanups as needed. */
5994 emit_move_insn (flag, const1_rtx);
5995 /* ??? deprecated, use sequences instead. */
5996 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5998 /* Do the right side flag. */
5999 last = get_last_insn ();
6000 /* Flag left cleanups as needed. */
6001 emit_move_insn (flag, const0_rtx);
6002 /* ??? deprecated, use sequences instead. */
6003 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6005 /* convert flag, which is an rtx, into a tree. */
6006 cond = make_node (RTL_EXPR);
6007 TREE_TYPE (cond) = integer_type_node;
6008 RTL_EXPR_RTL (cond) = flag;
6009 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6011 if (! left_cleanups)
6012 left_cleanups = integer_zero_node;
6013 if (! right_cleanups)
6014 right_cleanups = integer_zero_node;
6015 new_cleanups = build (COND_EXPR, void_type_node,
6016 truthvalue_conversion (cond),
6017 left_cleanups, right_cleanups);
6018 new_cleanups = fold (new_cleanups);
6020 /* Now add in the conditionalized cleanups. */
6022 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6023 (*interim_eh_hook) (NULL_TREE);
6030 int need_exception_region = 0;
6031 /* Something needs to be initialized, but we didn't know
6032 where that thing was when building the tree. For example,
6033 it could be the return value of a function, or a parameter
6034 to a function which lays down in the stack, or a temporary
6035 variable which must be passed by reference.
6037 We guarantee that the expression will either be constructed
6038 or copied into our original target. */
6040 tree slot = TREE_OPERAND (exp, 0);
6044 if (TREE_CODE (slot) != VAR_DECL)
6049 if (DECL_RTL (slot) != 0)
6051 target = DECL_RTL (slot);
6052 /* If we have already expanded the slot, so don't do
6054 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6059 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6060 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6061 /* All temp slots at this level must not conflict. */
6062 preserve_temp_slots (target);
6063 DECL_RTL (slot) = target;
6065 /* Since SLOT is not known to the called function
6066 to belong to its stack frame, we must build an explicit
6067 cleanup. This case occurs when we must build up a reference
6068 to pass the reference as an argument. In this case,
6069 it is very likely that such a reference need not be
6072 if (TREE_OPERAND (exp, 2) == 0)
6073 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6074 if (TREE_OPERAND (exp, 2))
6076 cleanups_this_call = tree_cons (NULL_TREE,
6077 TREE_OPERAND (exp, 2),
6078 cleanups_this_call);
6079 need_exception_region = 1;
6085 /* This case does occur, when expanding a parameter which
6086 needs to be constructed on the stack. The target
6087 is the actual stack address that we want to initialize.
6088 The function we call will perform the cleanup in this case. */
6090 /* If we have already assigned it space, use that space,
6091 not target that we were passed in, as our target
6092 parameter is only a hint. */
6093 if (DECL_RTL (slot) != 0)
6095 target = DECL_RTL (slot);
6096 /* If we have already expanded the slot, so don't do
6098 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6102 DECL_RTL (slot) = target;
6105 exp1 = TREE_OPERAND (exp, 1);
6106 /* Mark it as expanded. */
6107 TREE_OPERAND (exp, 1) = NULL_TREE;
6109 temp = expand_expr (exp1, target, tmode, modifier);
6111 if (need_exception_region)
6112 (*interim_eh_hook) (NULL_TREE);
6119 tree lhs = TREE_OPERAND (exp, 0);
6120 tree rhs = TREE_OPERAND (exp, 1);
6121 tree noncopied_parts = 0;
6122 tree lhs_type = TREE_TYPE (lhs);
6124 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6125 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6126 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6127 TYPE_NONCOPIED_PARTS (lhs_type));
6128 while (noncopied_parts != 0)
6130 expand_assignment (TREE_VALUE (noncopied_parts),
6131 TREE_PURPOSE (noncopied_parts), 0, 0);
6132 noncopied_parts = TREE_CHAIN (noncopied_parts);
6139 /* If lhs is complex, expand calls in rhs before computing it.
6140 That's so we don't compute a pointer and save it over a call.
6141 If lhs is simple, compute it first so we can give it as a
6142 target if the rhs is just a call. This avoids an extra temp and copy
6143 and that prevents a partial-subsumption which makes bad code.
6144 Actually we could treat component_ref's of vars like vars. */
6146 tree lhs = TREE_OPERAND (exp, 0);
6147 tree rhs = TREE_OPERAND (exp, 1);
6148 tree noncopied_parts = 0;
6149 tree lhs_type = TREE_TYPE (lhs);
6153 if (TREE_CODE (lhs) != VAR_DECL
6154 && TREE_CODE (lhs) != RESULT_DECL
6155 && TREE_CODE (lhs) != PARM_DECL)
6156 preexpand_calls (exp);
6158 /* Check for |= or &= of a bitfield of size one into another bitfield
6159 of size 1. In this case, (unless we need the result of the
6160 assignment) we can do this more efficiently with a
6161 test followed by an assignment, if necessary.
6163 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6164 things change so we do, this code should be enhanced to
6167 && TREE_CODE (lhs) == COMPONENT_REF
6168 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6169 || TREE_CODE (rhs) == BIT_AND_EXPR)
6170 && TREE_OPERAND (rhs, 0) == lhs
6171 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6172 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6173 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6175 rtx label = gen_label_rtx ();
6177 do_jump (TREE_OPERAND (rhs, 1),
6178 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6179 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6180 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6181 (TREE_CODE (rhs) == BIT_IOR_EXPR
6183 : integer_zero_node)),
6185 do_pending_stack_adjust ();
6190 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6191 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6192 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6193 TYPE_NONCOPIED_PARTS (lhs_type));
6195 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6196 while (noncopied_parts != 0)
6198 expand_assignment (TREE_PURPOSE (noncopied_parts),
6199 TREE_VALUE (noncopied_parts), 0, 0);
6200 noncopied_parts = TREE_CHAIN (noncopied_parts);
6205 case PREINCREMENT_EXPR:
6206 case PREDECREMENT_EXPR:
6207 return expand_increment (exp, 0);
6209 case POSTINCREMENT_EXPR:
6210 case POSTDECREMENT_EXPR:
6211 /* Faster to treat as pre-increment if result is not used. */
6212 return expand_increment (exp, ! ignore);
6215 /* If nonzero, TEMP will be set to the address of something that might
6216 be a MEM corresponding to a stack slot. */
6219 /* Are we taking the address of a nested function? */
6220 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6221 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6223 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6224 op0 = force_operand (op0, target);
6226 /* If we are taking the address of something erroneous, just
6228 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6232 /* We make sure to pass const0_rtx down if we came in with
6233 ignore set, to avoid doing the cleanups twice for something. */
6234 op0 = expand_expr (TREE_OPERAND (exp, 0),
6235 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6236 (modifier == EXPAND_INITIALIZER
6237 ? modifier : EXPAND_CONST_ADDRESS));
6239 /* If we are going to ignore the result, OP0 will have been set
6240 to const0_rtx, so just return it. Don't get confused and
6241 think we are taking the address of the constant. */
6245 /* We would like the object in memory. If it is a constant,
6246 we can have it be statically allocated into memory. For
6247 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6248 memory and store the value into it. */
6250 if (CONSTANT_P (op0))
6251 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6253 else if (GET_CODE (op0) == MEM)
6255 mark_temp_addr_taken (op0);
6256 temp = XEXP (op0, 0);
6259 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6260 || GET_CODE (op0) == CONCAT)
6262 /* If this object is in a register, it must be not
6264 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6265 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6267 = assign_stack_temp (inner_mode,
6268 int_size_in_bytes (inner_type), 1);
6269 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6271 mark_temp_addr_taken (memloc);
6272 emit_move_insn (memloc, op0);
6276 if (GET_CODE (op0) != MEM)
6279 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6281 temp = XEXP (op0, 0);
6282 #ifdef POINTERS_EXTEND_UNSIGNED
6283 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6284 && mode == ptr_mode)
6285 temp = convert_memory_address (ptr_mode, temp);
6290 op0 = force_operand (XEXP (op0, 0), target);
6293 if (flag_force_addr && GET_CODE (op0) != REG)
6294 op0 = force_reg (Pmode, op0);
6296 if (GET_CODE (op0) == REG)
6297 mark_reg_pointer (op0);
6299 /* If we might have had a temp slot, add an equivalent address
6302 update_temp_slot_address (temp, op0);
6304 #ifdef POINTERS_EXTEND_UNSIGNED
6305 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6306 && mode == ptr_mode)
6307 op0 = convert_memory_address (ptr_mode, op0);
6312 case ENTRY_VALUE_EXPR:
6315 /* COMPLEX type for Extended Pascal & Fortran */
6318 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6321 /* Get the rtx code of the operands. */
6322 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6323 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6326 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6330 /* Move the real (op0) and imaginary (op1) parts to their location. */
6331 emit_move_insn (gen_realpart (mode, target), op0);
6332 emit_move_insn (gen_imagpart (mode, target), op1);
6334 insns = get_insns ();
6337 /* Complex construction should appear as a single unit. */
6338 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6339 each with a separate pseudo as destination.
6340 It's not correct for flow to treat them as a unit. */
6341 if (GET_CODE (target) != CONCAT)
6342 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6350 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6351 return gen_realpart (mode, op0);
6354 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6355 return gen_imagpart (mode, op0);
6359 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6363 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6366 target = gen_reg_rtx (mode);
6370 /* Store the realpart and the negated imagpart to target. */
6371 emit_move_insn (gen_realpart (partmode, target),
6372 gen_realpart (partmode, op0));
6374 imag_t = gen_imagpart (partmode, target);
6375 temp = expand_unop (partmode, neg_optab,
6376 gen_imagpart (partmode, op0), imag_t, 0);
6378 emit_move_insn (imag_t, temp);
6380 insns = get_insns ();
6383 /* Conjugate should appear as a single unit
6384 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6385 each with a separate pseudo as destination.
6386 It's not correct for flow to treat them as a unit. */
6387 if (GET_CODE (target) != CONCAT)
6388 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6396 op0 = CONST0_RTX (tmode);
6402 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6405 /* Here to do an ordinary binary operator, generating an instruction
6406 from the optab already placed in `this_optab'. */
6408 preexpand_calls (exp);
6409 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6411 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6412 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6414 temp = expand_binop (mode, this_optab, op0, op1, target,
6415 unsignedp, OPTAB_LIB_WIDEN);
6422 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6424 bc_expand_expr (exp)
6427 enum tree_code code;
6430 struct binary_operator *binoptab;
6431 struct unary_operator *unoptab;
6432 struct increment_operator *incroptab;
6433 struct bc_label *lab, *lab1;
6434 enum bytecode_opcode opcode;
6437 code = TREE_CODE (exp);
6443 if (DECL_RTL (exp) == 0)
6445 error_with_decl (exp, "prior parameter's size depends on `%s'");
6449 bc_load_parmaddr (DECL_RTL (exp));
6450 bc_load_memory (TREE_TYPE (exp), exp);
6456 if (DECL_RTL (exp) == 0)
6460 if (BYTECODE_LABEL (DECL_RTL (exp)))
6461 bc_load_externaddr (DECL_RTL (exp));
6463 bc_load_localaddr (DECL_RTL (exp));
6465 if (TREE_PUBLIC (exp))
6466 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6467 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6469 bc_load_localaddr (DECL_RTL (exp));
6471 bc_load_memory (TREE_TYPE (exp), exp);
6476 #ifdef DEBUG_PRINT_CODE
6477 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6479 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6481 : TYPE_MODE (TREE_TYPE (exp)))],
6482 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6488 #ifdef DEBUG_PRINT_CODE
6489 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6491 /* FIX THIS: find a better way to pass real_cst's. -bson */
6492 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6493 (double) TREE_REAL_CST (exp));
6502 /* We build a call description vector describing the type of
6503 the return value and of the arguments; this call vector,
6504 together with a pointer to a location for the return value
6505 and the base of the argument list, is passed to the low
6506 level machine dependent call subroutine, which is responsible
6507 for putting the arguments wherever real functions expect
6508 them, as well as getting the return value back. */
6510 tree calldesc = 0, arg;
6514 /* Push the evaluated args on the evaluation stack in reverse
6515 order. Also make an entry for each arg in the calldesc
6516 vector while we're at it. */
6518 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6520 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6523 bc_expand_expr (TREE_VALUE (arg));
6525 calldesc = tree_cons ((tree) 0,
6526 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6528 calldesc = tree_cons ((tree) 0,
6529 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6533 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6535 /* Allocate a location for the return value and push its
6536 address on the evaluation stack. Also make an entry
6537 at the front of the calldesc for the return value type. */
6539 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6540 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6541 bc_load_localaddr (retval);
6543 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6544 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6546 /* Prepend the argument count. */
6547 calldesc = tree_cons ((tree) 0,
6548 build_int_2 (nargs, 0),
6551 /* Push the address of the call description vector on the stack. */
6552 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6553 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6554 build_index_type (build_int_2 (nargs * 2, 0)));
6555 r = output_constant_def (calldesc);
6556 bc_load_externaddr (r);
6558 /* Push the address of the function to be called. */
6559 bc_expand_expr (TREE_OPERAND (exp, 0));
6561 /* Call the function, popping its address and the calldesc vector
6562 address off the evaluation stack in the process. */
6563 bc_emit_instruction (call);
6565 /* Pop the arguments off the stack. */
6566 bc_adjust_stack (nargs);
6568 /* Load the return value onto the stack. */
6569 bc_load_localaddr (retval);
6570 bc_load_memory (type, TREE_OPERAND (exp, 0));
6576 if (!SAVE_EXPR_RTL (exp))
6578 /* First time around: copy to local variable */
6579 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6580 TYPE_ALIGN (TREE_TYPE(exp)));
6581 bc_expand_expr (TREE_OPERAND (exp, 0));
6582 bc_emit_instruction (duplicate);
6584 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6585 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6589 /* Consecutive reference: use saved copy */
6590 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6591 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6596 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6597 how are they handled instead? */
6600 TREE_USED (exp) = 1;
6601 bc_expand_expr (STMT_BODY (exp));
6608 bc_expand_expr (TREE_OPERAND (exp, 0));
6609 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6614 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6619 bc_expand_address (TREE_OPERAND (exp, 0));
6624 bc_expand_expr (TREE_OPERAND (exp, 0));
6625 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6630 bc_expand_expr (bc_canonicalize_array_ref (exp));
6635 bc_expand_component_address (exp);
6637 /* If we have a bitfield, generate a proper load */
6638 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6643 bc_expand_expr (TREE_OPERAND (exp, 0));
6644 bc_emit_instruction (drop);
6645 bc_expand_expr (TREE_OPERAND (exp, 1));
6650 bc_expand_expr (TREE_OPERAND (exp, 0));
6651 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6652 lab = bc_get_bytecode_label ();
6653 bc_emit_bytecode (xjumpifnot);
6654 bc_emit_bytecode_labelref (lab);
6656 #ifdef DEBUG_PRINT_CODE
6657 fputc ('\n', stderr);
6659 bc_expand_expr (TREE_OPERAND (exp, 1));
6660 lab1 = bc_get_bytecode_label ();
6661 bc_emit_bytecode (jump);
6662 bc_emit_bytecode_labelref (lab1);
6664 #ifdef DEBUG_PRINT_CODE
6665 fputc ('\n', stderr);
6668 bc_emit_bytecode_labeldef (lab);
6669 bc_expand_expr (TREE_OPERAND (exp, 2));
6670 bc_emit_bytecode_labeldef (lab1);
6673 case TRUTH_ANDIF_EXPR:
6675 opcode = xjumpifnot;
6678 case TRUTH_ORIF_EXPR:
6685 binoptab = optab_plus_expr;
6690 binoptab = optab_minus_expr;
6695 binoptab = optab_mult_expr;
6698 case TRUNC_DIV_EXPR:
6699 case FLOOR_DIV_EXPR:
6701 case ROUND_DIV_EXPR:
6702 case EXACT_DIV_EXPR:
6704 binoptab = optab_trunc_div_expr;
6707 case TRUNC_MOD_EXPR:
6708 case FLOOR_MOD_EXPR:
6710 case ROUND_MOD_EXPR:
6712 binoptab = optab_trunc_mod_expr;
6715 case FIX_ROUND_EXPR:
6716 case FIX_FLOOR_EXPR:
6718 abort (); /* Not used for C. */
6720 case FIX_TRUNC_EXPR:
6727 abort (); /* FIXME */
6731 binoptab = optab_rdiv_expr;
6736 binoptab = optab_bit_and_expr;
6741 binoptab = optab_bit_ior_expr;
6746 binoptab = optab_bit_xor_expr;
6751 binoptab = optab_lshift_expr;
6756 binoptab = optab_rshift_expr;
6759 case TRUTH_AND_EXPR:
6761 binoptab = optab_truth_and_expr;
6766 binoptab = optab_truth_or_expr;
6771 binoptab = optab_lt_expr;
6776 binoptab = optab_le_expr;
6781 binoptab = optab_ge_expr;
6786 binoptab = optab_gt_expr;
6791 binoptab = optab_eq_expr;
6796 binoptab = optab_ne_expr;
6801 unoptab = optab_negate_expr;
6806 unoptab = optab_bit_not_expr;
6809 case TRUTH_NOT_EXPR:
6811 unoptab = optab_truth_not_expr;
6814 case PREDECREMENT_EXPR:
6816 incroptab = optab_predecrement_expr;
6819 case PREINCREMENT_EXPR:
6821 incroptab = optab_preincrement_expr;
6824 case POSTDECREMENT_EXPR:
6826 incroptab = optab_postdecrement_expr;
6829 case POSTINCREMENT_EXPR:
6831 incroptab = optab_postincrement_expr;
6836 bc_expand_constructor (exp);
6846 tree vars = TREE_OPERAND (exp, 0);
6847 int vars_need_expansion = 0;
6849 /* Need to open a binding contour here because
6850 if there are any cleanups they most be contained here. */
6851 expand_start_bindings (0);
6853 /* Mark the corresponding BLOCK for output. */
6854 if (TREE_OPERAND (exp, 2) != 0)
6855 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6857 /* If VARS have not yet been expanded, expand them now. */
6860 if (DECL_RTL (vars) == 0)
6862 vars_need_expansion = 1;
6865 expand_decl_init (vars);
6866 vars = TREE_CHAIN (vars);
6869 bc_expand_expr (TREE_OPERAND (exp, 1));
6871 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6881 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6882 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6888 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6894 bc_expand_expr (TREE_OPERAND (exp, 0));
6895 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6896 lab = bc_get_bytecode_label ();
6898 bc_emit_instruction (duplicate);
6899 bc_emit_bytecode (opcode);
6900 bc_emit_bytecode_labelref (lab);
6902 #ifdef DEBUG_PRINT_CODE
6903 fputc ('\n', stderr);
6906 bc_emit_instruction (drop);
6908 bc_expand_expr (TREE_OPERAND (exp, 1));
6909 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6910 bc_emit_bytecode_labeldef (lab);
6916 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6918 /* Push the quantum. */
6919 bc_expand_expr (TREE_OPERAND (exp, 1));
6921 /* Convert it to the lvalue's type. */
6922 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6924 /* Push the address of the lvalue */
6925 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6927 /* Perform actual increment */
6928 bc_expand_increment (incroptab, type);
6932 /* Return the alignment in bits of EXP, a pointer valued expression.
6933 But don't return more than MAX_ALIGN no matter what.
6934 The alignment returned is, by default, the alignment of the thing that
6935 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6937 Otherwise, look at the expression to see if we can do better, i.e., if the
6938 expression is actually pointing at an object whose alignment is tighter. */
6941 get_pointer_alignment (exp, max_align)
6945 unsigned align, inner;
6947 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6950 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6951 align = MIN (align, max_align);
6955 switch (TREE_CODE (exp))
6959 case NON_LVALUE_EXPR:
6960 exp = TREE_OPERAND (exp, 0);
6961 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6963 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6964 align = MIN (inner, max_align);
6968 /* If sum of pointer + int, restrict our maximum alignment to that
6969 imposed by the integer. If not, we can't do any better than
6971 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6974 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6979 exp = TREE_OPERAND (exp, 0);
6983 /* See what we are pointing at and look at its alignment. */
6984 exp = TREE_OPERAND (exp, 0);
6985 if (TREE_CODE (exp) == FUNCTION_DECL)
6986 align = FUNCTION_BOUNDARY;
6987 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6988 align = DECL_ALIGN (exp);
6989 #ifdef CONSTANT_ALIGNMENT
6990 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6991 align = CONSTANT_ALIGNMENT (exp, align);
6993 return MIN (align, max_align);
7001 /* Return the tree node and offset if a given argument corresponds to
7002 a string constant. */
7005 string_constant (arg, ptr_offset)
7011 if (TREE_CODE (arg) == ADDR_EXPR
7012 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7014 *ptr_offset = integer_zero_node;
7015 return TREE_OPERAND (arg, 0);
7017 else if (TREE_CODE (arg) == PLUS_EXPR)
7019 tree arg0 = TREE_OPERAND (arg, 0);
7020 tree arg1 = TREE_OPERAND (arg, 1);
7025 if (TREE_CODE (arg0) == ADDR_EXPR
7026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7029 return TREE_OPERAND (arg0, 0);
7031 else if (TREE_CODE (arg1) == ADDR_EXPR
7032 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7035 return TREE_OPERAND (arg1, 0);
7042 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7043 way, because it could contain a zero byte in the middle.
7044 TREE_STRING_LENGTH is the size of the character array, not the string.
7046 Unfortunately, string_constant can't access the values of const char
7047 arrays with initializers, so neither can we do so here. */
7057 src = string_constant (src, &offset_node);
7060 max = TREE_STRING_LENGTH (src);
7061 ptr = TREE_STRING_POINTER (src);
7062 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7064 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7065 compute the offset to the following null if we don't know where to
7066 start searching for it. */
7068 for (i = 0; i < max; i++)
7071 /* We don't know the starting offset, but we do know that the string
7072 has no internal zero bytes. We can assume that the offset falls
7073 within the bounds of the string; otherwise, the programmer deserves
7074 what he gets. Subtract the offset from the length of the string,
7076 /* This would perhaps not be valid if we were dealing with named
7077 arrays in addition to literal string constants. */
7078 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7081 /* We have a known offset into the string. Start searching there for
7082 a null character. */
7083 if (offset_node == 0)
7087 /* Did we get a long long offset? If so, punt. */
7088 if (TREE_INT_CST_HIGH (offset_node) != 0)
7090 offset = TREE_INT_CST_LOW (offset_node);
7092 /* If the offset is known to be out of bounds, warn, and call strlen at
7094 if (offset < 0 || offset > max)
7096 warning ("offset outside bounds of constant string");
7099 /* Use strlen to search for the first zero byte. Since any strings
7100 constructed with build_string will have nulls appended, we win even
7101 if we get handed something like (char[4])"abcd".
7103 Since OFFSET is our starting index into the string, no further
7104 calculation is needed. */
7105 return size_int (strlen (ptr + offset));
7108 /* Expand an expression EXP that calls a built-in function,
7109 with result going to TARGET if that's convenient
7110 (and in mode MODE if that's convenient).
7111 SUBTARGET may be used as the target for computing one of EXP's operands.
7112 IGNORE is nonzero if the value is to be ignored. */
7114 #define CALLED_AS_BUILT_IN(NODE) \
7115 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7118 expand_builtin (exp, target, subtarget, mode, ignore)
7122 enum machine_mode mode;
7125 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7126 tree arglist = TREE_OPERAND (exp, 1);
7129 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7130 optab builtin_optab;
7132 switch (DECL_FUNCTION_CODE (fndecl))
7137 /* build_function_call changes these into ABS_EXPR. */
7142 /* Treat these like sqrt, but only if the user asks for them. */
7143 if (! flag_fast_math)
7145 case BUILT_IN_FSQRT:
7146 /* If not optimizing, call the library function. */
7151 /* Arg could be wrong type if user redeclared this fcn wrong. */
7152 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7155 /* Stabilize and compute the argument. */
7156 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7157 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7159 exp = copy_node (exp);
7160 arglist = copy_node (arglist);
7161 TREE_OPERAND (exp, 1) = arglist;
7162 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7164 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7166 /* Make a suitable register to place result in. */
7167 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7172 switch (DECL_FUNCTION_CODE (fndecl))
7175 builtin_optab = sin_optab; break;
7177 builtin_optab = cos_optab; break;
7178 case BUILT_IN_FSQRT:
7179 builtin_optab = sqrt_optab; break;
7184 /* Compute into TARGET.
7185 Set TARGET to wherever the result comes back. */
7186 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7187 builtin_optab, op0, target, 0);
7189 /* If we were unable to expand via the builtin, stop the
7190 sequence (without outputting the insns) and break, causing
7191 a call the the library function. */
7198 /* Check the results by default. But if flag_fast_math is turned on,
7199 then assume sqrt will always be called with valid arguments. */
7201 if (! flag_fast_math)
7203 /* Don't define the builtin FP instructions
7204 if your machine is not IEEE. */
7205 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7208 lab1 = gen_label_rtx ();
7210 /* Test the result; if it is NaN, set errno=EDOM because
7211 the argument was not in the domain. */
7212 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7213 emit_jump_insn (gen_beq (lab1));
7217 #ifdef GEN_ERRNO_RTX
7218 rtx errno_rtx = GEN_ERRNO_RTX;
7221 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7224 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7227 /* We can't set errno=EDOM directly; let the library call do it.
7228 Pop the arguments right away in case the call gets deleted. */
7230 expand_call (exp, target, 0);
7237 /* Output the entire sequence. */
7238 insns = get_insns ();
7244 /* __builtin_apply_args returns block of memory allocated on
7245 the stack into which is stored the arg pointer, structure
7246 value address, static chain, and all the registers that might
7247 possibly be used in performing a function call. The code is
7248 moved to the start of the function so the incoming values are
7250 case BUILT_IN_APPLY_ARGS:
7251 /* Don't do __builtin_apply_args more than once in a function.
7252 Save the result of the first call and reuse it. */
7253 if (apply_args_value != 0)
7254 return apply_args_value;
7256 /* When this function is called, it means that registers must be
7257 saved on entry to this function. So we migrate the
7258 call to the first insn of this function. */
7263 temp = expand_builtin_apply_args ();
7267 apply_args_value = temp;
7269 /* Put the sequence after the NOTE that starts the function.
7270 If this is inside a SEQUENCE, make the outer-level insn
7271 chain current, so the code is placed at the start of the
7273 push_topmost_sequence ();
7274 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7275 pop_topmost_sequence ();
7279 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7280 FUNCTION with a copy of the parameters described by
7281 ARGUMENTS, and ARGSIZE. It returns a block of memory
7282 allocated on the stack into which is stored all the registers
7283 that might possibly be used for returning the result of a
7284 function. ARGUMENTS is the value returned by
7285 __builtin_apply_args. ARGSIZE is the number of bytes of
7286 arguments that must be copied. ??? How should this value be
7287 computed? We'll also need a safe worst case value for varargs
7289 case BUILT_IN_APPLY:
7291 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7292 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7293 || TREE_CHAIN (arglist) == 0
7294 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7295 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7296 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7304 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7305 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7307 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7310 /* __builtin_return (RESULT) causes the function to return the
7311 value described by RESULT. RESULT is address of the block of
7312 memory returned by __builtin_apply. */
7313 case BUILT_IN_RETURN:
7315 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7316 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7317 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7318 NULL_RTX, VOIDmode, 0));
7321 case BUILT_IN_SAVEREGS:
7322 /* Don't do __builtin_saveregs more than once in a function.
7323 Save the result of the first call and reuse it. */
7324 if (saveregs_value != 0)
7325 return saveregs_value;
7327 /* When this function is called, it means that registers must be
7328 saved on entry to this function. So we migrate the
7329 call to the first insn of this function. */
7333 /* Now really call the function. `expand_call' does not call
7334 expand_builtin, so there is no danger of infinite recursion here. */
7337 #ifdef EXPAND_BUILTIN_SAVEREGS
7338 /* Do whatever the machine needs done in this case. */
7339 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7341 /* The register where the function returns its value
7342 is likely to have something else in it, such as an argument.
7343 So preserve that register around the call. */
7345 if (value_mode != VOIDmode)
7347 rtx valreg = hard_libcall_value (value_mode);
7348 rtx saved_valreg = gen_reg_rtx (value_mode);
7350 emit_move_insn (saved_valreg, valreg);
7351 temp = expand_call (exp, target, ignore);
7352 emit_move_insn (valreg, saved_valreg);
7355 /* Generate the call, putting the value in a pseudo. */
7356 temp = expand_call (exp, target, ignore);
7362 saveregs_value = temp;
7364 /* Put the sequence after the NOTE that starts the function.
7365 If this is inside a SEQUENCE, make the outer-level insn
7366 chain current, so the code is placed at the start of the
7368 push_topmost_sequence ();
7369 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7370 pop_topmost_sequence ();
7374 /* __builtin_args_info (N) returns word N of the arg space info
7375 for the current function. The number and meanings of words
7376 is controlled by the definition of CUMULATIVE_ARGS. */
7377 case BUILT_IN_ARGS_INFO:
7379 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7381 int *word_ptr = (int *) ¤t_function_args_info;
7382 tree type, elts, result;
7384 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7385 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7386 __FILE__, __LINE__);
7390 tree arg = TREE_VALUE (arglist);
7391 if (TREE_CODE (arg) != INTEGER_CST)
7392 error ("argument of `__builtin_args_info' must be constant");
7395 int wordnum = TREE_INT_CST_LOW (arg);
7397 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7398 error ("argument of `__builtin_args_info' out of range");
7400 return GEN_INT (word_ptr[wordnum]);
7404 error ("missing argument in `__builtin_args_info'");
7409 for (i = 0; i < nwords; i++)
7410 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7412 type = build_array_type (integer_type_node,
7413 build_index_type (build_int_2 (nwords, 0)));
7414 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7415 TREE_CONSTANT (result) = 1;
7416 TREE_STATIC (result) = 1;
7417 result = build (INDIRECT_REF, build_pointer_type (type), result);
7418 TREE_CONSTANT (result) = 1;
7419 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7423 /* Return the address of the first anonymous stack arg. */
7424 case BUILT_IN_NEXT_ARG:
7426 tree fntype = TREE_TYPE (current_function_decl);
7428 if ((TYPE_ARG_TYPES (fntype) == 0
7429 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7431 && ! current_function_varargs)
7433 error ("`va_start' used in function with fixed args");
7439 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7440 tree arg = TREE_VALUE (arglist);
7442 /* Strip off all nops for the sake of the comparison. This
7443 is not quite the same as STRIP_NOPS. It does more. */
7444 while (TREE_CODE (arg) == NOP_EXPR
7445 || TREE_CODE (arg) == CONVERT_EXPR
7446 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7447 arg = TREE_OPERAND (arg, 0);
7448 if (arg != last_parm)
7449 warning ("second parameter of `va_start' not last named argument");
7451 else if (! current_function_varargs)
7452 /* Evidently an out of date version of <stdarg.h>; can't validate
7453 va_start's second argument, but can still work as intended. */
7454 warning ("`__builtin_next_arg' called without an argument");
7457 return expand_binop (Pmode, add_optab,
7458 current_function_internal_arg_pointer,
7459 current_function_arg_offset_rtx,
7460 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7462 case BUILT_IN_CLASSIFY_TYPE:
7465 tree type = TREE_TYPE (TREE_VALUE (arglist));
7466 enum tree_code code = TREE_CODE (type);
7467 if (code == VOID_TYPE)
7468 return GEN_INT (void_type_class);
7469 if (code == INTEGER_TYPE)
7470 return GEN_INT (integer_type_class);
7471 if (code == CHAR_TYPE)
7472 return GEN_INT (char_type_class);
7473 if (code == ENUMERAL_TYPE)
7474 return GEN_INT (enumeral_type_class);
7475 if (code == BOOLEAN_TYPE)
7476 return GEN_INT (boolean_type_class);
7477 if (code == POINTER_TYPE)
7478 return GEN_INT (pointer_type_class);
7479 if (code == REFERENCE_TYPE)
7480 return GEN_INT (reference_type_class);
7481 if (code == OFFSET_TYPE)
7482 return GEN_INT (offset_type_class);
7483 if (code == REAL_TYPE)
7484 return GEN_INT (real_type_class);
7485 if (code == COMPLEX_TYPE)
7486 return GEN_INT (complex_type_class);
7487 if (code == FUNCTION_TYPE)
7488 return GEN_INT (function_type_class);
7489 if (code == METHOD_TYPE)
7490 return GEN_INT (method_type_class);
7491 if (code == RECORD_TYPE)
7492 return GEN_INT (record_type_class);
7493 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7494 return GEN_INT (union_type_class);
7495 if (code == ARRAY_TYPE)
7497 if (TYPE_STRING_FLAG (type))
7498 return GEN_INT (string_type_class);
7500 return GEN_INT (array_type_class);
7502 if (code == SET_TYPE)
7503 return GEN_INT (set_type_class);
7504 if (code == FILE_TYPE)
7505 return GEN_INT (file_type_class);
7506 if (code == LANG_TYPE)
7507 return GEN_INT (lang_type_class);
7509 return GEN_INT (no_type_class);
7511 case BUILT_IN_CONSTANT_P:
7516 tree arg = TREE_VALUE (arglist);
7519 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7520 || (TREE_CODE (arg) == ADDR_EXPR
7521 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7522 ? const1_rtx : const0_rtx);
7525 case BUILT_IN_FRAME_ADDRESS:
7526 /* The argument must be a nonnegative integer constant.
7527 It counts the number of frames to scan up the stack.
7528 The value is the address of that frame. */
7529 case BUILT_IN_RETURN_ADDRESS:
7530 /* The argument must be a nonnegative integer constant.
7531 It counts the number of frames to scan up the stack.
7532 The value is the return address saved in that frame. */
7534 /* Warning about missing arg was already issued. */
7536 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7538 error ("invalid arg to `__builtin_return_address'");
7541 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7543 error ("invalid arg to `__builtin_return_address'");
7548 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7549 rtx tem = frame_pointer_rtx;
7552 /* Some machines need special handling before we can access arbitrary
7553 frames. For example, on the sparc, we must first flush all
7554 register windows to the stack. */
7555 #ifdef SETUP_FRAME_ADDRESSES
7556 SETUP_FRAME_ADDRESSES ();
7559 /* On the sparc, the return address is not in the frame, it is
7560 in a register. There is no way to access it off of the current
7561 frame pointer, but it can be accessed off the previous frame
7562 pointer by reading the value from the register window save
7564 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7565 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7569 /* Scan back COUNT frames to the specified frame. */
7570 for (i = 0; i < count; i++)
7572 /* Assume the dynamic chain pointer is in the word that
7573 the frame address points to, unless otherwise specified. */
7574 #ifdef DYNAMIC_CHAIN_ADDRESS
7575 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7577 tem = memory_address (Pmode, tem);
7578 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7581 /* For __builtin_frame_address, return what we've got. */
7582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7585 /* For __builtin_return_address,
7586 Get the return address from that frame. */
7587 #ifdef RETURN_ADDR_RTX
7588 return RETURN_ADDR_RTX (count, tem);
7590 tem = memory_address (Pmode,
7591 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7592 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7596 case BUILT_IN_ALLOCA:
7598 /* Arg could be non-integer if user redeclared this fcn wrong. */
7599 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7602 /* Compute the argument. */
7603 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7605 /* Allocate the desired space. */
7606 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7609 /* If not optimizing, call the library function. */
7610 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7614 /* Arg could be non-integer if user redeclared this fcn wrong. */
7615 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7618 /* Compute the argument. */
7619 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7620 /* Compute ffs, into TARGET if possible.
7621 Set TARGET to wherever the result comes back. */
7622 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7623 ffs_optab, op0, target, 1);
7628 case BUILT_IN_STRLEN:
7629 /* If not optimizing, call the library function. */
7630 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7634 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7635 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7639 tree src = TREE_VALUE (arglist);
7640 tree len = c_strlen (src);
7643 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7645 rtx result, src_rtx, char_rtx;
7646 enum machine_mode insn_mode = value_mode, char_mode;
7647 enum insn_code icode;
7649 /* If the length is known, just return it. */
7651 return expand_expr (len, target, mode, 0);
7653 /* If SRC is not a pointer type, don't do this operation inline. */
7657 /* Call a function if we can't compute strlen in the right mode. */
7659 while (insn_mode != VOIDmode)
7661 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7662 if (icode != CODE_FOR_nothing)
7665 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7667 if (insn_mode == VOIDmode)
7670 /* Make a place to write the result of the instruction. */
7673 && GET_CODE (result) == REG
7674 && GET_MODE (result) == insn_mode
7675 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7676 result = gen_reg_rtx (insn_mode);
7678 /* Make sure the operands are acceptable to the predicates. */
7680 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7681 result = gen_reg_rtx (insn_mode);
7683 src_rtx = memory_address (BLKmode,
7684 expand_expr (src, NULL_RTX, ptr_mode,
7686 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7687 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7689 char_rtx = const0_rtx;
7690 char_mode = insn_operand_mode[(int)icode][2];
7691 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7692 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7694 emit_insn (GEN_FCN (icode) (result,
7695 gen_rtx (MEM, BLKmode, src_rtx),
7696 char_rtx, GEN_INT (align)));
7698 /* Return the value in the proper mode for this function. */
7699 if (GET_MODE (result) == value_mode)
7701 else if (target != 0)
7703 convert_move (target, result, 0);
7707 return convert_to_mode (value_mode, result, 0);
7710 case BUILT_IN_STRCPY:
7711 /* If not optimizing, call the library function. */
7712 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7716 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7717 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7718 || TREE_CHAIN (arglist) == 0
7719 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7723 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7728 len = size_binop (PLUS_EXPR, len, integer_one_node);
7730 chainon (arglist, build_tree_list (NULL_TREE, len));
7734 case BUILT_IN_MEMCPY:
7735 /* If not optimizing, call the library function. */
7736 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7740 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7741 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7742 || TREE_CHAIN (arglist) == 0
7743 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7744 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7745 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7749 tree dest = TREE_VALUE (arglist);
7750 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7751 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7755 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7757 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7758 rtx dest_rtx, dest_mem, src_mem;
7760 /* If either SRC or DEST is not a pointer type, don't do
7761 this operation in-line. */
7762 if (src_align == 0 || dest_align == 0)
7764 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7765 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7769 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7770 dest_mem = gen_rtx (MEM, BLKmode,
7771 memory_address (BLKmode, dest_rtx));
7772 /* There could be a void* cast on top of the object. */
7773 if (TREE_CODE (dest) == NOP_EXPR)
7774 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (dest, 0)));
7776 type = TREE_TYPE (TREE_TYPE (dest));
7777 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7778 src_mem = gen_rtx (MEM, BLKmode,
7779 memory_address (BLKmode,
7780 expand_expr (src, NULL_RTX,
7783 /* There could be a void* cast on top of the object. */
7784 if (TREE_CODE (src) == NOP_EXPR)
7785 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (src, 0)));
7787 type = TREE_TYPE (TREE_TYPE (src));
7788 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7790 /* Copy word part most expediently. */
7791 emit_block_move (dest_mem, src_mem,
7792 expand_expr (len, NULL_RTX, VOIDmode, 0),
7793 MIN (src_align, dest_align));
7794 return force_operand (dest_rtx, NULL_RTX);
7797 /* These comparison functions need an instruction that returns an actual
7798 index. An ordinary compare that just sets the condition codes
7800 #ifdef HAVE_cmpstrsi
7801 case BUILT_IN_STRCMP:
7802 /* If not optimizing, call the library function. */
7803 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7807 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7808 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7809 || TREE_CHAIN (arglist) == 0
7810 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7812 else if (!HAVE_cmpstrsi)
7815 tree arg1 = TREE_VALUE (arglist);
7816 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7820 len = c_strlen (arg1);
7822 len = size_binop (PLUS_EXPR, integer_one_node, len);
7823 len2 = c_strlen (arg2);
7825 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7827 /* If we don't have a constant length for the first, use the length
7828 of the second, if we know it. We don't require a constant for
7829 this case; some cost analysis could be done if both are available
7830 but neither is constant. For now, assume they're equally cheap.
7832 If both strings have constant lengths, use the smaller. This
7833 could arise if optimization results in strcpy being called with
7834 two fixed strings, or if the code was machine-generated. We should
7835 add some code to the `memcmp' handler below to deal with such
7836 situations, someday. */
7837 if (!len || TREE_CODE (len) != INTEGER_CST)
7844 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7846 if (tree_int_cst_lt (len2, len))
7850 chainon (arglist, build_tree_list (NULL_TREE, len));
7854 case BUILT_IN_MEMCMP:
7855 /* If not optimizing, call the library function. */
7856 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7860 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7861 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7862 || TREE_CHAIN (arglist) == 0
7863 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7864 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7865 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7867 else if (!HAVE_cmpstrsi)
7870 tree arg1 = TREE_VALUE (arglist);
7871 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7872 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7876 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7878 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7879 enum machine_mode insn_mode
7880 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7882 /* If we don't have POINTER_TYPE, call the function. */
7883 if (arg1_align == 0 || arg2_align == 0)
7885 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7886 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7890 /* Make a place to write the result of the instruction. */
7893 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7894 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7895 result = gen_reg_rtx (insn_mode);
7897 emit_insn (gen_cmpstrsi (result,
7898 gen_rtx (MEM, BLKmode,
7899 expand_expr (arg1, NULL_RTX,
7902 gen_rtx (MEM, BLKmode,
7903 expand_expr (arg2, NULL_RTX,
7906 expand_expr (len, NULL_RTX, VOIDmode, 0),
7907 GEN_INT (MIN (arg1_align, arg2_align))));
7909 /* Return the value in the proper mode for this function. */
7910 mode = TYPE_MODE (TREE_TYPE (exp));
7911 if (GET_MODE (result) == mode)
7913 else if (target != 0)
7915 convert_move (target, result, 0);
7919 return convert_to_mode (mode, result, 0);
7922 case BUILT_IN_STRCMP:
7923 case BUILT_IN_MEMCMP:
7927 default: /* just do library call, if unknown builtin */
7928 error ("built-in function `%s' not currently supported",
7929 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7932 /* The switch statement above can drop through to cause the function
7933 to be called normally. */
7935 return expand_call (exp, target, ignore);
7938 /* Built-in functions to perform an untyped call and return. */
7940 /* For each register that may be used for calling a function, this
7941 gives a mode used to copy the register's value. VOIDmode indicates
7942 the register is not used for calling a function. If the machine
7943 has register windows, this gives only the outbound registers.
7944 INCOMING_REGNO gives the corresponding inbound register. */
7945 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7947 /* For each register that may be used for returning values, this gives
7948 a mode used to copy the register's value. VOIDmode indicates the
7949 register is not used for returning values. If the machine has
7950 register windows, this gives only the outbound registers.
7951 INCOMING_REGNO gives the corresponding inbound register. */
7952 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7954 /* For each register that may be used for calling a function, this
7955 gives the offset of that register into the block returned by
7956 __bultin_apply_args. 0 indicates that the register is not
7957 used for calling a function. */
7958 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7960 /* Return the offset of register REGNO into the block returned by
7961 __builtin_apply_args. This is not declared static, since it is
7962 needed in objc-act.c. */
7965 apply_args_register_offset (regno)
7970 /* Arguments are always put in outgoing registers (in the argument
7971 block) if such make sense. */
7972 #ifdef OUTGOING_REGNO
7973 regno = OUTGOING_REGNO(regno);
7975 return apply_args_reg_offset[regno];
7978 /* Return the size required for the block returned by __builtin_apply_args,
7979 and initialize apply_args_mode. */
7984 static int size = -1;
7986 enum machine_mode mode;
7988 /* The values computed by this function never change. */
7991 /* The first value is the incoming arg-pointer. */
7992 size = GET_MODE_SIZE (Pmode);
7994 /* The second value is the structure value address unless this is
7995 passed as an "invisible" first argument. */
7996 if (struct_value_rtx)
7997 size += GET_MODE_SIZE (Pmode);
7999 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8000 if (FUNCTION_ARG_REGNO_P (regno))
8002 /* Search for the proper mode for copying this register's
8003 value. I'm not sure this is right, but it works so far. */
8004 enum machine_mode best_mode = VOIDmode;
8006 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8008 mode = GET_MODE_WIDER_MODE (mode))
8009 if (HARD_REGNO_MODE_OK (regno, mode)
8010 && HARD_REGNO_NREGS (regno, mode) == 1)
8013 if (best_mode == VOIDmode)
8014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8016 mode = GET_MODE_WIDER_MODE (mode))
8017 if (HARD_REGNO_MODE_OK (regno, mode)
8018 && (mov_optab->handlers[(int) mode].insn_code
8019 != CODE_FOR_nothing))
8023 if (mode == VOIDmode)
8026 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8027 if (size % align != 0)
8028 size = CEIL (size, align) * align;
8029 apply_args_reg_offset[regno] = size;
8030 size += GET_MODE_SIZE (mode);
8031 apply_args_mode[regno] = mode;
8035 apply_args_mode[regno] = VOIDmode;
8036 apply_args_reg_offset[regno] = 0;
8042 /* Return the size required for the block returned by __builtin_apply,
8043 and initialize apply_result_mode. */
8046 apply_result_size ()
8048 static int size = -1;
8050 enum machine_mode mode;
8052 /* The values computed by this function never change. */
8057 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8058 if (FUNCTION_VALUE_REGNO_P (regno))
8060 /* Search for the proper mode for copying this register's
8061 value. I'm not sure this is right, but it works so far. */
8062 enum machine_mode best_mode = VOIDmode;
8064 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8066 mode = GET_MODE_WIDER_MODE (mode))
8067 if (HARD_REGNO_MODE_OK (regno, mode))
8070 if (best_mode == VOIDmode)
8071 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8073 mode = GET_MODE_WIDER_MODE (mode))
8074 if (HARD_REGNO_MODE_OK (regno, mode)
8075 && (mov_optab->handlers[(int) mode].insn_code
8076 != CODE_FOR_nothing))
8080 if (mode == VOIDmode)
8083 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8084 if (size % align != 0)
8085 size = CEIL (size, align) * align;
8086 size += GET_MODE_SIZE (mode);
8087 apply_result_mode[regno] = mode;
8090 apply_result_mode[regno] = VOIDmode;
8092 /* Allow targets that use untyped_call and untyped_return to override
8093 the size so that machine-specific information can be stored here. */
8094 #ifdef APPLY_RESULT_SIZE
8095 size = APPLY_RESULT_SIZE;
8101 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8102 /* Create a vector describing the result block RESULT. If SAVEP is true,
8103 the result block is used to save the values; otherwise it is used to
8104 restore the values. */
8107 result_vector (savep, result)
8111 int regno, size, align, nelts;
8112 enum machine_mode mode;
8114 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8117 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8118 if ((mode = apply_result_mode[regno]) != VOIDmode)
8120 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8121 if (size % align != 0)
8122 size = CEIL (size, align) * align;
8123 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8124 mem = change_address (result, mode,
8125 plus_constant (XEXP (result, 0), size));
8126 savevec[nelts++] = (savep
8127 ? gen_rtx (SET, VOIDmode, mem, reg)
8128 : gen_rtx (SET, VOIDmode, reg, mem));
8129 size += GET_MODE_SIZE (mode);
8131 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8133 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8135 /* Save the state required to perform an untyped call with the same
8136 arguments as were passed to the current function. */
8139 expand_builtin_apply_args ()
8142 int size, align, regno;
8143 enum machine_mode mode;
8145 /* Create a block where the arg-pointer, structure value address,
8146 and argument registers can be saved. */
8147 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8149 /* Walk past the arg-pointer and structure value address. */
8150 size = GET_MODE_SIZE (Pmode);
8151 if (struct_value_rtx)
8152 size += GET_MODE_SIZE (Pmode);
8154 /* Save each register used in calling a function to the block.
8155 Doing this in reverse order makes for much more compact code
8156 for i386 and family. If we do this in reverse order, a simple
8157 series of pops and stores will be generated. If we do this
8158 in ascending order, the pops and stores will be littered with
8159 stack swaps as well. Since the order is largely irrelevant for
8160 all other architectures, we use the optimal order for the i386. */
8161 for (regno = FIRST_PSEUDO_REGISTER; regno--;)
8162 if ((mode = apply_args_mode[regno]) != VOIDmode)
8166 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8167 if (size % align != 0)
8168 size = CEIL (size, align) * align;
8170 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8173 /* For reg-stack.c's stack register household.
8174 Compare with a similar piece of code in function.c. */
8176 emit_insn (gen_rtx (USE, mode, tem));
8179 emit_move_insn (change_address (registers, mode,
8180 plus_constant (XEXP (registers, 0),
8183 size += GET_MODE_SIZE (mode);
8186 /* Save the arg pointer to the block. */
8187 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8188 copy_to_reg (virtual_incoming_args_rtx));
8189 size = GET_MODE_SIZE (Pmode);
8191 /* Save the structure value address unless this is passed as an
8192 "invisible" first argument. */
8193 if (struct_value_incoming_rtx)
8195 emit_move_insn (change_address (registers, Pmode,
8196 plus_constant (XEXP (registers, 0),
8198 copy_to_reg (struct_value_incoming_rtx));
8199 size += GET_MODE_SIZE (Pmode);
8202 /* Return the address of the block. */
8203 return copy_addr_to_reg (XEXP (registers, 0));
8206 /* Perform an untyped call and save the state required to perform an
8207 untyped return of whatever value was returned by the given function. */
8210 expand_builtin_apply (function, arguments, argsize)
8211 rtx function, arguments, argsize;
8213 int size, align, regno;
8214 enum machine_mode mode;
8215 rtx incoming_args, result, reg, dest, call_insn;
8216 rtx old_stack_level = 0;
8217 rtx call_fusage = 0;
8219 /* Create a block where the return registers can be saved. */
8220 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8222 /* ??? The argsize value should be adjusted here. */
8224 /* Fetch the arg pointer from the ARGUMENTS block. */
8225 incoming_args = gen_reg_rtx (Pmode);
8226 emit_move_insn (incoming_args,
8227 gen_rtx (MEM, Pmode, arguments));
8228 #ifndef STACK_GROWS_DOWNWARD
8229 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8230 incoming_args, 0, OPTAB_LIB_WIDEN);
8233 /* Perform postincrements before actually calling the function. */
8236 /* Push a new argument block and copy the arguments. */
8237 do_pending_stack_adjust ();
8238 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8240 /* Push a block of memory onto the stack to store the memory arguments.
8241 Save the address in a register, and copy the memory arguments. ??? I
8242 haven't figured out how the calling convention macros effect this,
8243 but it's likely that the source and/or destination addresses in
8244 the block copy will need updating in machine specific ways. */
8245 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8246 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8247 gen_rtx (MEM, BLKmode, incoming_args),
8249 PARM_BOUNDARY / BITS_PER_UNIT);
8251 /* Refer to the argument block. */
8253 arguments = gen_rtx (MEM, BLKmode, arguments);
8255 /* Walk past the arg-pointer and structure value address. */
8256 size = GET_MODE_SIZE (Pmode);
8257 if (struct_value_rtx)
8258 size += GET_MODE_SIZE (Pmode);
8260 /* Restore each of the registers previously saved. Make USE insns
8261 for each of these registers for use in making the call.
8262 Doing this in reverse order makes for much more compact code
8263 for i386 and family. */
8264 for (regno = FIRST_PSEUDO_REGISTER; regno--; )
8265 if ((mode = apply_args_mode[regno]) != VOIDmode)
8267 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8268 if (size % align != 0)
8269 size = CEIL (size, align) * align;
8270 reg = gen_rtx (REG, mode, regno);
8271 emit_move_insn (reg,
8272 change_address (arguments, mode,
8273 plus_constant (XEXP (arguments, 0),
8276 use_reg (&call_fusage, reg);
8277 size += GET_MODE_SIZE (mode);
8280 /* Restore the structure value address unless this is passed as an
8281 "invisible" first argument. */
8282 size = GET_MODE_SIZE (Pmode);
8283 if (struct_value_rtx)
8285 rtx value = gen_reg_rtx (Pmode);
8286 emit_move_insn (value,
8287 change_address (arguments, Pmode,
8288 plus_constant (XEXP (arguments, 0),
8290 emit_move_insn (struct_value_rtx, value);
8291 if (GET_CODE (struct_value_rtx) == REG)
8292 use_reg (&call_fusage, struct_value_rtx);
8293 size += GET_MODE_SIZE (Pmode);
8296 /* All arguments and registers used for the call are set up by now! */
8297 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8299 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8300 and we don't want to load it into a register as an optimization,
8301 because prepare_call_address already did it if it should be done. */
8302 if (GET_CODE (function) != SYMBOL_REF)
8303 function = memory_address (FUNCTION_MODE, function);
8305 /* Generate the actual call instruction and save the return value. */
8306 #ifdef HAVE_untyped_call
8307 if (HAVE_untyped_call)
8308 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8309 result, result_vector (1, result)));
8312 #ifdef HAVE_call_value
8313 if (HAVE_call_value)
8317 /* Locate the unique return register. It is not possible to
8318 express a call that sets more than one return register using
8319 call_value; use untyped_call for that. In fact, untyped_call
8320 only needs to save the return registers in the given block. */
8321 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8322 if ((mode = apply_result_mode[regno]) != VOIDmode)
8325 abort (); /* HAVE_untyped_call required. */
8326 valreg = gen_rtx (REG, mode, regno);
8329 emit_call_insn (gen_call_value (valreg,
8330 gen_rtx (MEM, FUNCTION_MODE, function),
8331 const0_rtx, NULL_RTX, const0_rtx));
8333 emit_move_insn (change_address (result, GET_MODE (valreg),
8341 /* Find the CALL insn we just emitted. */
8342 for (call_insn = get_last_insn ();
8343 call_insn && GET_CODE (call_insn) != CALL_INSN;
8344 call_insn = PREV_INSN (call_insn))
8350 /* Put the register usage information on the CALL. If there is already
8351 some usage information, put ours at the end. */
8352 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8356 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8357 link = XEXP (link, 1))
8360 XEXP (link, 1) = call_fusage;
8363 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8365 /* Restore the stack. */
8366 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8368 /* Return the address of the result block. */
8369 return copy_addr_to_reg (XEXP (result, 0));
8372 /* Perform an untyped return. */
8375 expand_builtin_return (result)
8378 int size, align, regno;
8379 enum machine_mode mode;
8381 rtx call_fusage = 0;
8383 apply_result_size ();
8384 result = gen_rtx (MEM, BLKmode, result);
8386 #ifdef HAVE_untyped_return
8387 if (HAVE_untyped_return)
8389 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8395 /* Restore the return value and note that each value is used. */
8397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8398 if ((mode = apply_result_mode[regno]) != VOIDmode)
8400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8401 if (size % align != 0)
8402 size = CEIL (size, align) * align;
8403 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8404 emit_move_insn (reg,
8405 change_address (result, mode,
8406 plus_constant (XEXP (result, 0),
8409 push_to_sequence (call_fusage);
8410 emit_insn (gen_rtx (USE, VOIDmode, reg));
8411 call_fusage = get_insns ();
8413 size += GET_MODE_SIZE (mode);
8416 /* Put the USE insns before the return. */
8417 emit_insns (call_fusage);
8419 /* Return whatever values was restored by jumping directly to the end
8421 expand_null_return ();
8424 /* Expand code for a post- or pre- increment or decrement
8425 and return the RTX for the result.
8426 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8429 expand_increment (exp, post)
8433 register rtx op0, op1;
8434 register rtx temp, value;
8435 register tree incremented = TREE_OPERAND (exp, 0);
8436 optab this_optab = add_optab;
8438 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8439 int op0_is_copy = 0;
8440 int single_insn = 0;
8441 /* 1 means we can't store into OP0 directly,
8442 because it is a subreg narrower than a word,
8443 and we don't dare clobber the rest of the word. */
8446 if (output_bytecode)
8448 bc_expand_expr (exp);
8452 /* Stabilize any component ref that might need to be
8453 evaluated more than once below. */
8455 || TREE_CODE (incremented) == BIT_FIELD_REF
8456 || (TREE_CODE (incremented) == COMPONENT_REF
8457 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8458 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8459 incremented = stabilize_reference (incremented);
8460 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8461 ones into save exprs so that they don't accidentally get evaluated
8462 more than once by the code below. */
8463 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8464 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8465 incremented = save_expr (incremented);
8467 /* Compute the operands as RTX.
8468 Note whether OP0 is the actual lvalue or a copy of it:
8469 I believe it is a copy iff it is a register or subreg
8470 and insns were generated in computing it. */
8472 temp = get_last_insn ();
8473 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8475 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8476 in place but intead must do sign- or zero-extension during assignment,
8477 so we copy it into a new register and let the code below use it as
8480 Note that we can safely modify this SUBREG since it is know not to be
8481 shared (it was made by the expand_expr call above). */
8483 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8486 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8490 else if (GET_CODE (op0) == SUBREG
8491 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8493 /* We cannot increment this SUBREG in place. If we are
8494 post-incrementing, get a copy of the old value. Otherwise,
8495 just mark that we cannot increment in place. */
8497 op0 = copy_to_reg (op0);
8502 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8503 && temp != get_last_insn ());
8504 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8506 /* Decide whether incrementing or decrementing. */
8507 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8508 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8509 this_optab = sub_optab;
8511 /* Convert decrement by a constant into a negative increment. */
8512 if (this_optab == sub_optab
8513 && GET_CODE (op1) == CONST_INT)
8515 op1 = GEN_INT (- INTVAL (op1));
8516 this_optab = add_optab;
8519 /* For a preincrement, see if we can do this with a single instruction. */
8522 icode = (int) this_optab->handlers[(int) mode].insn_code;
8523 if (icode != (int) CODE_FOR_nothing
8524 /* Make sure that OP0 is valid for operands 0 and 1
8525 of the insn we want to queue. */
8526 && (*insn_operand_predicate[icode][0]) (op0, mode)
8527 && (*insn_operand_predicate[icode][1]) (op0, mode)
8528 && (*insn_operand_predicate[icode][2]) (op1, mode))
8532 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8533 then we cannot just increment OP0. We must therefore contrive to
8534 increment the original value. Then, for postincrement, we can return
8535 OP0 since it is a copy of the old value. For preincrement, expand here
8536 unless we can do it with a single insn.
8538 Likewise if storing directly into OP0 would clobber high bits
8539 we need to preserve (bad_subreg). */
8540 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8542 /* This is the easiest way to increment the value wherever it is.
8543 Problems with multiple evaluation of INCREMENTED are prevented
8544 because either (1) it is a component_ref or preincrement,
8545 in which case it was stabilized above, or (2) it is an array_ref
8546 with constant index in an array in a register, which is
8547 safe to reevaluate. */
8548 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8549 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8550 ? MINUS_EXPR : PLUS_EXPR),
8553 TREE_OPERAND (exp, 1));
8555 while (TREE_CODE (incremented) == NOP_EXPR
8556 || TREE_CODE (incremented) == CONVERT_EXPR)
8558 newexp = convert (TREE_TYPE (incremented), newexp);
8559 incremented = TREE_OPERAND (incremented, 0);
8562 temp = expand_assignment (incremented, newexp, ! post, 0);
8563 return post ? op0 : temp;
8568 /* We have a true reference to the value in OP0.
8569 If there is an insn to add or subtract in this mode, queue it.
8570 Queueing the increment insn avoids the register shuffling
8571 that often results if we must increment now and first save
8572 the old value for subsequent use. */
8574 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8575 op0 = stabilize (op0);
8578 icode = (int) this_optab->handlers[(int) mode].insn_code;
8579 if (icode != (int) CODE_FOR_nothing
8580 /* Make sure that OP0 is valid for operands 0 and 1
8581 of the insn we want to queue. */
8582 && (*insn_operand_predicate[icode][0]) (op0, mode)
8583 && (*insn_operand_predicate[icode][1]) (op0, mode))
8585 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8586 op1 = force_reg (mode, op1);
8588 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8592 /* Preincrement, or we can't increment with one simple insn. */
8594 /* Save a copy of the value before inc or dec, to return it later. */
8595 temp = value = copy_to_reg (op0);
8597 /* Arrange to return the incremented value. */
8598 /* Copy the rtx because expand_binop will protect from the queue,
8599 and the results of that would be invalid for us to return
8600 if our caller does emit_queue before using our result. */
8601 temp = copy_rtx (value = op0);
8603 /* Increment however we can. */
8604 op1 = expand_binop (mode, this_optab, value, op1, op0,
8605 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8606 /* Make sure the value is stored into OP0. */
8608 emit_move_insn (op0, op1);
8613 /* Expand all function calls contained within EXP, innermost ones first.
8614 But don't look within expressions that have sequence points.
8615 For each CALL_EXPR, record the rtx for its value
8616 in the CALL_EXPR_RTL field. */
8619 preexpand_calls (exp)
8622 register int nops, i;
8623 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8625 if (! do_preexpand_calls)
8628 /* Only expressions and references can contain calls. */
8630 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8633 switch (TREE_CODE (exp))
8636 /* Do nothing if already expanded. */
8637 if (CALL_EXPR_RTL (exp) != 0)
8640 /* Do nothing to built-in functions. */
8641 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8642 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8643 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8644 /* Do nothing if the call returns a variable-sized object. */
8645 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8646 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8651 case TRUTH_ANDIF_EXPR:
8652 case TRUTH_ORIF_EXPR:
8653 /* If we find one of these, then we can be sure
8654 the adjust will be done for it (since it makes jumps).
8655 Do it now, so that if this is inside an argument
8656 of a function, we don't get the stack adjustment
8657 after some other args have already been pushed. */
8658 do_pending_stack_adjust ();
8663 case WITH_CLEANUP_EXPR:
8667 if (SAVE_EXPR_RTL (exp) != 0)
8671 nops = tree_code_length[(int) TREE_CODE (exp)];
8672 for (i = 0; i < nops; i++)
8673 if (TREE_OPERAND (exp, i) != 0)
8675 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8676 if (type == 'e' || type == '<' || type == '1' || type == '2'
8678 preexpand_calls (TREE_OPERAND (exp, i));
8682 /* At the start of a function, record that we have no previously-pushed
8683 arguments waiting to be popped. */
8686 init_pending_stack_adjust ()
8688 pending_stack_adjust = 0;
8691 /* When exiting from function, if safe, clear out any pending stack adjust
8692 so the adjustment won't get done. */
8695 clear_pending_stack_adjust ()
8697 #ifdef EXIT_IGNORE_STACK
8698 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8699 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8700 && ! flag_inline_functions)
8701 pending_stack_adjust = 0;
8705 /* Pop any previously-pushed arguments that have not been popped yet. */
8708 do_pending_stack_adjust ()
8710 if (inhibit_defer_pop == 0)
8712 if (pending_stack_adjust != 0)
8713 adjust_stack (GEN_INT (pending_stack_adjust));
8714 pending_stack_adjust = 0;
8718 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8719 Returns the cleanups to be performed. */
8722 defer_cleanups_to (old_cleanups)
8725 tree new_cleanups = NULL_TREE;
8726 tree cleanups = cleanups_this_call;
8727 tree last = NULL_TREE;
8729 while (cleanups_this_call != old_cleanups)
8731 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8732 last = cleanups_this_call;
8733 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8738 /* Remove the list from the chain of cleanups. */
8739 TREE_CHAIN (last) = NULL_TREE;
8741 /* reverse them so that we can build them in the right order. */
8742 cleanups = nreverse (cleanups);
8747 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8748 TREE_VALUE (cleanups), new_cleanups);
8750 new_cleanups = TREE_VALUE (cleanups);
8752 cleanups = TREE_CHAIN (cleanups);
8756 return new_cleanups;
8759 /* Expand all cleanups up to OLD_CLEANUPS.
8760 Needed here, and also for language-dependent calls. */
8763 expand_cleanups_to (old_cleanups)
8766 while (cleanups_this_call != old_cleanups)
8768 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8769 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8770 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8774 /* Expand conditional expressions. */
8776 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8777 LABEL is an rtx of code CODE_LABEL, in this function and all the
8781 jumpifnot (exp, label)
8785 do_jump (exp, label, NULL_RTX);
8788 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8795 do_jump (exp, NULL_RTX, label);
8798 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8799 the result is zero, or IF_TRUE_LABEL if the result is one.
8800 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8801 meaning fall through in that case.
8803 do_jump always does any pending stack adjust except when it does not
8804 actually perform a jump. An example where there is no jump
8805 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8807 This function is responsible for optimizing cases such as
8808 &&, || and comparison operators in EXP. */
8811 do_jump (exp, if_false_label, if_true_label)
8813 rtx if_false_label, if_true_label;
8815 register enum tree_code code = TREE_CODE (exp);
8816 /* Some cases need to create a label to jump to
8817 in order to properly fall through.
8818 These cases set DROP_THROUGH_LABEL nonzero. */
8819 rtx drop_through_label = 0;
8824 enum machine_mode mode;
8834 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8840 /* This is not true with #pragma weak */
8842 /* The address of something can never be zero. */
8844 emit_jump (if_true_label);
8849 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8850 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8851 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8854 /* If we are narrowing the operand, we have to do the compare in the
8856 if ((TYPE_PRECISION (TREE_TYPE (exp))
8857 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8859 case NON_LVALUE_EXPR:
8860 case REFERENCE_EXPR:
8865 /* These cannot change zero->non-zero or vice versa. */
8866 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8870 /* This is never less insns than evaluating the PLUS_EXPR followed by
8871 a test and can be longer if the test is eliminated. */
8873 /* Reduce to minus. */
8874 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8875 TREE_OPERAND (exp, 0),
8876 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8877 TREE_OPERAND (exp, 1))));
8878 /* Process as MINUS. */
8882 /* Non-zero iff operands of minus differ. */
8883 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8884 TREE_OPERAND (exp, 0),
8885 TREE_OPERAND (exp, 1)),
8890 /* If we are AND'ing with a small constant, do this comparison in the
8891 smallest type that fits. If the machine doesn't have comparisons
8892 that small, it will be converted back to the wider comparison.
8893 This helps if we are testing the sign bit of a narrower object.
8894 combine can't do this for us because it can't know whether a
8895 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8897 if (! SLOW_BYTE_ACCESS
8898 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8899 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8900 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8901 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8902 && (type = type_for_mode (mode, 1)) != 0
8903 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8904 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8905 != CODE_FOR_nothing))
8907 do_jump (convert (type, exp), if_false_label, if_true_label);
8912 case TRUTH_NOT_EXPR:
8913 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8916 case TRUTH_ANDIF_EXPR:
8919 tree cleanups, old_cleanups;
8921 if (if_false_label == 0)
8922 if_false_label = drop_through_label = gen_label_rtx ();
8924 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8925 seq1 = get_insns ();
8928 old_cleanups = cleanups_this_call;
8930 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8931 seq2 = get_insns ();
8934 cleanups = defer_cleanups_to (old_cleanups);
8937 rtx flag = gen_reg_rtx (word_mode);
8941 /* Flag cleanups as not needed. */
8942 emit_move_insn (flag, const0_rtx);
8945 /* Flag cleanups as needed. */
8946 emit_move_insn (flag, const1_rtx);
8949 /* convert flag, which is an rtx, into a tree. */
8950 cond = make_node (RTL_EXPR);
8951 TREE_TYPE (cond) = integer_type_node;
8952 RTL_EXPR_RTL (cond) = flag;
8953 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8955 new_cleanups = build (COND_EXPR, void_type_node,
8956 truthvalue_conversion (cond),
8957 cleanups, integer_zero_node);
8958 new_cleanups = fold (new_cleanups);
8960 /* Now add in the conditionalized cleanups. */
8962 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8963 (*interim_eh_hook) (NULL_TREE);
8973 case TRUTH_ORIF_EXPR:
8976 tree cleanups, old_cleanups;
8978 if (if_true_label == 0)
8979 if_true_label = drop_through_label = gen_label_rtx ();
8981 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8982 seq1 = get_insns ();
8985 old_cleanups = cleanups_this_call;
8987 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8988 seq2 = get_insns ();
8991 cleanups = defer_cleanups_to (old_cleanups);
8994 rtx flag = gen_reg_rtx (word_mode);
8998 /* Flag cleanups as not needed. */
8999 emit_move_insn (flag, const0_rtx);
9002 /* Flag cleanups as needed. */
9003 emit_move_insn (flag, const1_rtx);
9006 /* convert flag, which is an rtx, into a tree. */
9007 cond = make_node (RTL_EXPR);
9008 TREE_TYPE (cond) = integer_type_node;
9009 RTL_EXPR_RTL (cond) = flag;
9010 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9012 new_cleanups = build (COND_EXPR, void_type_node,
9013 truthvalue_conversion (cond),
9014 cleanups, integer_zero_node);
9015 new_cleanups = fold (new_cleanups);
9017 /* Now add in the conditionalized cleanups. */
9019 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9020 (*interim_eh_hook) (NULL_TREE);
9032 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9036 do_pending_stack_adjust ();
9037 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9044 int bitsize, bitpos, unsignedp;
9045 enum machine_mode mode;
9050 /* Get description of this reference. We don't actually care
9051 about the underlying object here. */
9052 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9053 &mode, &unsignedp, &volatilep);
9055 type = type_for_size (bitsize, unsignedp);
9056 if (! SLOW_BYTE_ACCESS
9057 && type != 0 && bitsize >= 0
9058 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9059 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9060 != CODE_FOR_nothing))
9062 do_jump (convert (type, exp), if_false_label, if_true_label);
9069 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9070 if (integer_onep (TREE_OPERAND (exp, 1))
9071 && integer_zerop (TREE_OPERAND (exp, 2)))
9072 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9074 else if (integer_zerop (TREE_OPERAND (exp, 1))
9075 && integer_onep (TREE_OPERAND (exp, 2)))
9076 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9080 register rtx label1 = gen_label_rtx ();
9081 drop_through_label = gen_label_rtx ();
9082 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9083 /* Now the THEN-expression. */
9084 do_jump (TREE_OPERAND (exp, 1),
9085 if_false_label ? if_false_label : drop_through_label,
9086 if_true_label ? if_true_label : drop_through_label);
9087 /* In case the do_jump just above never jumps. */
9088 do_pending_stack_adjust ();
9089 emit_label (label1);
9090 /* Now the ELSE-expression. */
9091 do_jump (TREE_OPERAND (exp, 2),
9092 if_false_label ? if_false_label : drop_through_label,
9093 if_true_label ? if_true_label : drop_through_label);
9098 if (integer_zerop (TREE_OPERAND (exp, 1)))
9099 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9100 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9103 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9104 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9105 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9106 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9108 comparison = compare (exp, EQ, EQ);
9112 if (integer_zerop (TREE_OPERAND (exp, 1)))
9113 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9114 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9117 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9118 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9119 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9120 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9122 comparison = compare (exp, NE, NE);
9126 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9128 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9129 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9131 comparison = compare (exp, LT, LTU);
9135 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9137 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9138 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9140 comparison = compare (exp, LE, LEU);
9144 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9146 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9147 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9149 comparison = compare (exp, GT, GTU);
9153 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9155 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9156 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9158 comparison = compare (exp, GE, GEU);
9163 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9165 /* This is not needed any more and causes poor code since it causes
9166 comparisons and tests from non-SI objects to have different code
9168 /* Copy to register to avoid generating bad insns by cse
9169 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9170 if (!cse_not_expected && GET_CODE (temp) == MEM)
9171 temp = copy_to_reg (temp);
9173 do_pending_stack_adjust ();
9174 if (GET_CODE (temp) == CONST_INT)
9175 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9176 else if (GET_CODE (temp) == LABEL_REF)
9177 comparison = const_true_rtx;
9178 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9179 && !can_compare_p (GET_MODE (temp)))
9180 /* Note swapping the labels gives us not-equal. */
9181 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9182 else if (GET_MODE (temp) != VOIDmode)
9183 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9184 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9185 GET_MODE (temp), NULL_RTX, 0);
9190 /* Do any postincrements in the expression that was tested. */
9193 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9194 straight into a conditional jump instruction as the jump condition.
9195 Otherwise, all the work has been done already. */
9197 if (comparison == const_true_rtx)
9200 emit_jump (if_true_label);
9202 else if (comparison == const0_rtx)
9205 emit_jump (if_false_label);
9207 else if (comparison)
9208 do_jump_for_compare (comparison, if_false_label, if_true_label);
9210 if (drop_through_label)
9212 /* If do_jump produces code that might be jumped around,
9213 do any stack adjusts from that code, before the place
9214 where control merges in. */
9215 do_pending_stack_adjust ();
9216 emit_label (drop_through_label);
9220 /* Given a comparison expression EXP for values too wide to be compared
9221 with one insn, test the comparison and jump to the appropriate label.
9222 The code of EXP is ignored; we always test GT if SWAP is 0,
9223 and LT if SWAP is 1. */
9226 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9229 rtx if_false_label, if_true_label;
9231 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9232 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9233 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9234 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9235 rtx drop_through_label = 0;
9236 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9239 if (! if_true_label || ! if_false_label)
9240 drop_through_label = gen_label_rtx ();
9241 if (! if_true_label)
9242 if_true_label = drop_through_label;
9243 if (! if_false_label)
9244 if_false_label = drop_through_label;
9246 /* Compare a word at a time, high order first. */
9247 for (i = 0; i < nwords; i++)
9250 rtx op0_word, op1_word;
9252 if (WORDS_BIG_ENDIAN)
9254 op0_word = operand_subword_force (op0, i, mode);
9255 op1_word = operand_subword_force (op1, i, mode);
9259 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9260 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9263 /* All but high-order word must be compared as unsigned. */
9264 comp = compare_from_rtx (op0_word, op1_word,
9265 (unsignedp || i > 0) ? GTU : GT,
9266 unsignedp, word_mode, NULL_RTX, 0);
9267 if (comp == const_true_rtx)
9268 emit_jump (if_true_label);
9269 else if (comp != const0_rtx)
9270 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9272 /* Consider lower words only if these are equal. */
9273 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9275 if (comp == const_true_rtx)
9276 emit_jump (if_false_label);
9277 else if (comp != const0_rtx)
9278 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9282 emit_jump (if_false_label);
9283 if (drop_through_label)
9284 emit_label (drop_through_label);
9287 /* Compare OP0 with OP1, word at a time, in mode MODE.
9288 UNSIGNEDP says to do unsigned comparison.
9289 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9292 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9293 enum machine_mode mode;
9296 rtx if_false_label, if_true_label;
9298 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9299 rtx drop_through_label = 0;
9302 if (! if_true_label || ! if_false_label)
9303 drop_through_label = gen_label_rtx ();
9304 if (! if_true_label)
9305 if_true_label = drop_through_label;
9306 if (! if_false_label)
9307 if_false_label = drop_through_label;
9309 /* Compare a word at a time, high order first. */
9310 for (i = 0; i < nwords; i++)
9313 rtx op0_word, op1_word;
9315 if (WORDS_BIG_ENDIAN)
9317 op0_word = operand_subword_force (op0, i, mode);
9318 op1_word = operand_subword_force (op1, i, mode);
9322 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9323 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9326 /* All but high-order word must be compared as unsigned. */
9327 comp = compare_from_rtx (op0_word, op1_word,
9328 (unsignedp || i > 0) ? GTU : GT,
9329 unsignedp, word_mode, NULL_RTX, 0);
9330 if (comp == const_true_rtx)
9331 emit_jump (if_true_label);
9332 else if (comp != const0_rtx)
9333 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9335 /* Consider lower words only if these are equal. */
9336 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9338 if (comp == const_true_rtx)
9339 emit_jump (if_false_label);
9340 else if (comp != const0_rtx)
9341 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9345 emit_jump (if_false_label);
9346 if (drop_through_label)
9347 emit_label (drop_through_label);
9350 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9351 with one insn, test the comparison and jump to the appropriate label. */
9354 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9356 rtx if_false_label, if_true_label;
9358 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9359 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9361 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9363 rtx drop_through_label = 0;
9365 if (! if_false_label)
9366 drop_through_label = if_false_label = gen_label_rtx ();
9368 for (i = 0; i < nwords; i++)
9370 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9371 operand_subword_force (op1, i, mode),
9372 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9373 word_mode, NULL_RTX, 0);
9374 if (comp == const_true_rtx)
9375 emit_jump (if_false_label);
9376 else if (comp != const0_rtx)
9377 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9381 emit_jump (if_true_label);
9382 if (drop_through_label)
9383 emit_label (drop_through_label);
9386 /* Jump according to whether OP0 is 0.
9387 We assume that OP0 has an integer mode that is too wide
9388 for the available compare insns. */
9391 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9393 rtx if_false_label, if_true_label;
9395 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9397 rtx drop_through_label = 0;
9399 if (! if_false_label)
9400 drop_through_label = if_false_label = gen_label_rtx ();
9402 for (i = 0; i < nwords; i++)
9404 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9406 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9407 if (comp == const_true_rtx)
9408 emit_jump (if_false_label);
9409 else if (comp != const0_rtx)
9410 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9414 emit_jump (if_true_label);
9415 if (drop_through_label)
9416 emit_label (drop_through_label);
9419 /* Given a comparison expression in rtl form, output conditional branches to
9420 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9423 do_jump_for_compare (comparison, if_false_label, if_true_label)
9424 rtx comparison, if_false_label, if_true_label;
9428 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9429 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9434 emit_jump (if_false_label);
9436 else if (if_false_label)
9439 rtx prev = get_last_insn ();
9442 /* Output the branch with the opposite condition. Then try to invert
9443 what is generated. If more than one insn is a branch, or if the
9444 branch is not the last insn written, abort. If we can't invert
9445 the branch, emit make a true label, redirect this jump to that,
9446 emit a jump to the false label and define the true label. */
9448 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9449 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9453 /* Here we get the first insn that was just emitted. It used to be the
9454 case that, on some machines, emitting the branch would discard
9455 the previous compare insn and emit a replacement. This isn't
9456 done anymore, but abort if we see that PREV is deleted. */
9459 insn = get_insns ();
9460 else if (INSN_DELETED_P (prev))
9463 insn = NEXT_INSN (prev);
9465 for (; insn; insn = NEXT_INSN (insn))
9466 if (GET_CODE (insn) == JUMP_INSN)
9473 if (branch != get_last_insn ())
9476 JUMP_LABEL (branch) = if_false_label;
9477 if (! invert_jump (branch, if_false_label))
9479 if_true_label = gen_label_rtx ();
9480 redirect_jump (branch, if_true_label);
9481 emit_jump (if_false_label);
9482 emit_label (if_true_label);
9487 /* Generate code for a comparison expression EXP
9488 (including code to compute the values to be compared)
9489 and set (CC0) according to the result.
9490 SIGNED_CODE should be the rtx operation for this comparison for
9491 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9493 We force a stack adjustment unless there are currently
9494 things pushed on the stack that aren't yet used. */
9497 compare (exp, signed_code, unsigned_code)
9499 enum rtx_code signed_code, unsigned_code;
9502 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9504 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9505 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9506 register enum machine_mode mode = TYPE_MODE (type);
9507 int unsignedp = TREE_UNSIGNED (type);
9508 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9510 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9512 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9513 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9516 /* Like compare but expects the values to compare as two rtx's.
9517 The decision as to signed or unsigned comparison must be made by the caller.
9519 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9522 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9523 size of MODE should be used. */
9526 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9527 register rtx op0, op1;
9530 enum machine_mode mode;
9536 /* If one operand is constant, make it the second one. Only do this
9537 if the other operand is not constant as well. */
9539 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9540 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9545 code = swap_condition (code);
9550 op0 = force_not_mem (op0);
9551 op1 = force_not_mem (op1);
9554 do_pending_stack_adjust ();
9556 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9557 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9561 /* There's no need to do this now that combine.c can eliminate lots of
9562 sign extensions. This can be less efficient in certain cases on other
9565 /* If this is a signed equality comparison, we can do it as an
9566 unsigned comparison since zero-extension is cheaper than sign
9567 extension and comparisons with zero are done as unsigned. This is
9568 the case even on machines that can do fast sign extension, since
9569 zero-extension is easier to combine with other operations than
9570 sign-extension is. If we are comparing against a constant, we must
9571 convert it to what it would look like unsigned. */
9572 if ((code == EQ || code == NE) && ! unsignedp
9573 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9575 if (GET_CODE (op1) == CONST_INT
9576 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9577 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9582 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9584 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9587 /* Generate code to calculate EXP using a store-flag instruction
9588 and return an rtx for the result. EXP is either a comparison
9589 or a TRUTH_NOT_EXPR whose operand is a comparison.
9591 If TARGET is nonzero, store the result there if convenient.
9593 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9596 Return zero if there is no suitable set-flag instruction
9597 available on this machine.
9599 Once expand_expr has been called on the arguments of the comparison,
9600 we are committed to doing the store flag, since it is not safe to
9601 re-evaluate the expression. We emit the store-flag insn by calling
9602 emit_store_flag, but only expand the arguments if we have a reason
9603 to believe that emit_store_flag will be successful. If we think that
9604 it will, but it isn't, we have to simulate the store-flag with a
9605 set/jump/set sequence. */
9608 do_store_flag (exp, target, mode, only_cheap)
9611 enum machine_mode mode;
9615 tree arg0, arg1, type;
9617 enum machine_mode operand_mode;
9621 enum insn_code icode;
9622 rtx subtarget = target;
9623 rtx result, label, pattern, jump_pat;
9625 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9626 result at the end. We can't simply invert the test since it would
9627 have already been inverted if it were valid. This case occurs for
9628 some floating-point comparisons. */
9630 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9631 invert = 1, exp = TREE_OPERAND (exp, 0);
9633 arg0 = TREE_OPERAND (exp, 0);
9634 arg1 = TREE_OPERAND (exp, 1);
9635 type = TREE_TYPE (arg0);
9636 operand_mode = TYPE_MODE (type);
9637 unsignedp = TREE_UNSIGNED (type);
9639 /* We won't bother with BLKmode store-flag operations because it would mean
9640 passing a lot of information to emit_store_flag. */
9641 if (operand_mode == BLKmode)
9647 /* Get the rtx comparison code to use. We know that EXP is a comparison
9648 operation of some type. Some comparisons against 1 and -1 can be
9649 converted to comparisons with zero. Do so here so that the tests
9650 below will be aware that we have a comparison with zero. These
9651 tests will not catch constants in the first operand, but constants
9652 are rarely passed as the first operand. */
9654 switch (TREE_CODE (exp))
9663 if (integer_onep (arg1))
9664 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9666 code = unsignedp ? LTU : LT;
9669 if (! unsignedp && integer_all_onesp (arg1))
9670 arg1 = integer_zero_node, code = LT;
9672 code = unsignedp ? LEU : LE;
9675 if (! unsignedp && integer_all_onesp (arg1))
9676 arg1 = integer_zero_node, code = GE;
9678 code = unsignedp ? GTU : GT;
9681 if (integer_onep (arg1))
9682 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9684 code = unsignedp ? GEU : GE;
9690 /* Put a constant second. */
9691 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9693 tem = arg0; arg0 = arg1; arg1 = tem;
9694 code = swap_condition (code);
9697 /* If this is an equality or inequality test of a single bit, we can
9698 do this by shifting the bit being tested to the low-order bit and
9699 masking the result with the constant 1. If the condition was EQ,
9700 we xor it with 1. This does not require an scc insn and is faster
9701 than an scc insn even if we have it. */
9703 if ((code == NE || code == EQ)
9704 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9705 && integer_pow2p (TREE_OPERAND (arg0, 1))
9706 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9708 tree inner = TREE_OPERAND (arg0, 0);
9709 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9710 NULL_RTX, VOIDmode, 0)));
9713 /* If INNER is a right shift of a constant and it plus BITNUM does
9714 not overflow, adjust BITNUM and INNER. */
9716 if (TREE_CODE (inner) == RSHIFT_EXPR
9717 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9718 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9719 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9720 < TYPE_PRECISION (type)))
9722 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9723 inner = TREE_OPERAND (inner, 0);
9726 /* If we are going to be able to omit the AND below, we must do our
9727 operations as unsigned. If we must use the AND, we have a choice.
9728 Normally unsigned is faster, but for some machines signed is. */
9729 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9730 #ifdef LOAD_EXTEND_OP
9731 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9737 if (subtarget == 0 || GET_CODE (subtarget) != REG
9738 || GET_MODE (subtarget) != operand_mode
9739 || ! safe_from_p (subtarget, inner))
9742 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9745 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9746 size_int (bitnum), subtarget, ops_unsignedp);
9748 if (GET_MODE (op0) != mode)
9749 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9751 if ((code == EQ && ! invert) || (code == NE && invert))
9752 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9753 ops_unsignedp, OPTAB_LIB_WIDEN);
9755 /* Put the AND last so it can combine with more things. */
9756 if (bitnum != TYPE_PRECISION (type) - 1)
9757 op0 = expand_and (op0, const1_rtx, subtarget);
9762 /* Now see if we are likely to be able to do this. Return if not. */
9763 if (! can_compare_p (operand_mode))
9765 icode = setcc_gen_code[(int) code];
9766 if (icode == CODE_FOR_nothing
9767 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9769 /* We can only do this if it is one of the special cases that
9770 can be handled without an scc insn. */
9771 if ((code == LT && integer_zerop (arg1))
9772 || (! only_cheap && code == GE && integer_zerop (arg1)))
9774 else if (BRANCH_COST >= 0
9775 && ! only_cheap && (code == NE || code == EQ)
9776 && TREE_CODE (type) != REAL_TYPE
9777 && ((abs_optab->handlers[(int) operand_mode].insn_code
9778 != CODE_FOR_nothing)
9779 || (ffs_optab->handlers[(int) operand_mode].insn_code
9780 != CODE_FOR_nothing)))
9786 preexpand_calls (exp);
9787 if (subtarget == 0 || GET_CODE (subtarget) != REG
9788 || GET_MODE (subtarget) != operand_mode
9789 || ! safe_from_p (subtarget, arg1))
9792 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9793 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9796 target = gen_reg_rtx (mode);
9798 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9799 because, if the emit_store_flag does anything it will succeed and
9800 OP0 and OP1 will not be used subsequently. */
9802 result = emit_store_flag (target, code,
9803 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9804 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9805 operand_mode, unsignedp, 1);
9810 result = expand_binop (mode, xor_optab, result, const1_rtx,
9811 result, 0, OPTAB_LIB_WIDEN);
9815 /* If this failed, we have to do this with set/compare/jump/set code. */
9816 if (target == 0 || GET_CODE (target) != REG
9817 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9818 target = gen_reg_rtx (GET_MODE (target));
9820 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9821 result = compare_from_rtx (op0, op1, code, unsignedp,
9822 operand_mode, NULL_RTX, 0);
9823 if (GET_CODE (result) == CONST_INT)
9824 return (((result == const0_rtx && ! invert)
9825 || (result != const0_rtx && invert))
9826 ? const0_rtx : const1_rtx);
9828 label = gen_label_rtx ();
9829 if (bcc_gen_fctn[(int) code] == 0)
9832 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9833 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9839 /* Generate a tablejump instruction (used for switch statements). */
9841 #ifdef HAVE_tablejump
9843 /* INDEX is the value being switched on, with the lowest value
9844 in the table already subtracted.
9845 MODE is its expected mode (needed if INDEX is constant).
9846 RANGE is the length of the jump table.
9847 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9849 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9850 index value is out of range. */
9853 do_tablejump (index, mode, range, table_label, default_label)
9854 rtx index, range, table_label, default_label;
9855 enum machine_mode mode;
9857 register rtx temp, vector;
9859 /* Do an unsigned comparison (in the proper mode) between the index
9860 expression and the value which represents the length of the range.
9861 Since we just finished subtracting the lower bound of the range
9862 from the index expression, this comparison allows us to simultaneously
9863 check that the original index expression value is both greater than
9864 or equal to the minimum value of the range and less than or equal to
9865 the maximum value of the range. */
9867 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9868 emit_jump_insn (gen_bgtu (default_label));
9870 /* If index is in range, it must fit in Pmode.
9871 Convert to Pmode so we can index with it. */
9873 index = convert_to_mode (Pmode, index, 1);
9875 /* Don't let a MEM slip thru, because then INDEX that comes
9876 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9877 and break_out_memory_refs will go to work on it and mess it up. */
9878 #ifdef PIC_CASE_VECTOR_ADDRESS
9879 if (flag_pic && GET_CODE (index) != REG)
9880 index = copy_to_mode_reg (Pmode, index);
9883 /* If flag_force_addr were to affect this address
9884 it could interfere with the tricky assumptions made
9885 about addresses that contain label-refs,
9886 which may be valid only very near the tablejump itself. */
9887 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9888 GET_MODE_SIZE, because this indicates how large insns are. The other
9889 uses should all be Pmode, because they are addresses. This code
9890 could fail if addresses and insns are not the same size. */
9891 index = gen_rtx (PLUS, Pmode,
9892 gen_rtx (MULT, Pmode, index,
9893 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9894 gen_rtx (LABEL_REF, Pmode, table_label));
9895 #ifdef PIC_CASE_VECTOR_ADDRESS
9897 index = PIC_CASE_VECTOR_ADDRESS (index);
9900 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9901 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9902 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9903 RTX_UNCHANGING_P (vector) = 1;
9904 convert_move (temp, vector, 0);
9906 emit_jump_insn (gen_tablejump (temp, table_label));
9908 #ifndef CASE_VECTOR_PC_RELATIVE
9909 /* If we are generating PIC code or if the table is PC-relative, the
9910 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9916 #endif /* HAVE_tablejump */
9919 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9920 to that value is on the top of the stack. The resulting type is TYPE, and
9921 the source declaration is DECL. */
9924 bc_load_memory (type, decl)
9927 enum bytecode_opcode opcode;
9930 /* Bit fields are special. We only know about signed and
9931 unsigned ints, and enums. The latter are treated as
9934 if (DECL_BIT_FIELD (decl))
9935 if (TREE_CODE (type) == ENUMERAL_TYPE
9936 || TREE_CODE (type) == INTEGER_TYPE)
9937 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9941 /* See corresponding comment in bc_store_memory(). */
9942 if (TYPE_MODE (type) == BLKmode
9943 || TYPE_MODE (type) == VOIDmode)
9946 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9948 if (opcode == neverneverland)
9951 bc_emit_bytecode (opcode);
9953 #ifdef DEBUG_PRINT_CODE
9954 fputc ('\n', stderr);
9959 /* Store the contents of the second stack slot to the address in the
9960 top stack slot. DECL is the declaration of the destination and is used
9961 to determine whether we're dealing with a bitfield. */
9964 bc_store_memory (type, decl)
9967 enum bytecode_opcode opcode;
9970 if (DECL_BIT_FIELD (decl))
9972 if (TREE_CODE (type) == ENUMERAL_TYPE
9973 || TREE_CODE (type) == INTEGER_TYPE)
9979 if (TYPE_MODE (type) == BLKmode)
9981 /* Copy structure. This expands to a block copy instruction, storeBLK.
9982 In addition to the arguments expected by the other store instructions,
9983 it also expects a type size (SImode) on top of the stack, which is the
9984 structure size in size units (usually bytes). The two first arguments
9985 are already on the stack; so we just put the size on level 1. For some
9986 other languages, the size may be variable, this is why we don't encode
9987 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9989 bc_expand_expr (TYPE_SIZE (type));
9993 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9995 if (opcode == neverneverland)
9998 bc_emit_bytecode (opcode);
10000 #ifdef DEBUG_PRINT_CODE
10001 fputc ('\n', stderr);
10006 /* Allocate local stack space sufficient to hold a value of the given
10007 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10008 integral power of 2. A special case is locals of type VOID, which
10009 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10010 remapped into the corresponding attribute of SI. */
10013 bc_allocate_local (size, alignment)
10014 int size, alignment;
10017 int byte_alignment;
10022 /* Normalize size and alignment */
10024 size = UNITS_PER_WORD;
10026 if (alignment < BITS_PER_UNIT)
10027 byte_alignment = 1 << (INT_ALIGN - 1);
10030 byte_alignment = alignment / BITS_PER_UNIT;
10032 if (local_vars_size & (byte_alignment - 1))
10033 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10035 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10036 local_vars_size += size;
10042 /* Allocate variable-sized local array. Variable-sized arrays are
10043 actually pointers to the address in memory where they are stored. */
10046 bc_allocate_variable_array (size)
10050 const int ptralign = (1 << (PTR_ALIGN - 1));
10052 /* Align pointer */
10053 if (local_vars_size & ptralign)
10054 local_vars_size += ptralign - (local_vars_size & ptralign);
10056 /* Note down local space needed: pointer to block; also return
10059 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10060 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10065 /* Push the machine address for the given external variable offset. */
10067 bc_load_externaddr (externaddr)
10070 bc_emit_bytecode (constP);
10071 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10072 BYTECODE_BC_LABEL (externaddr)->offset);
10074 #ifdef DEBUG_PRINT_CODE
10075 fputc ('\n', stderr);
10084 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10090 /* Like above, but expects an IDENTIFIER. */
10092 bc_load_externaddr_id (id, offset)
10096 if (!IDENTIFIER_POINTER (id))
10099 bc_emit_bytecode (constP);
10100 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10102 #ifdef DEBUG_PRINT_CODE
10103 fputc ('\n', stderr);
10108 /* Push the machine address for the given local variable offset. */
10110 bc_load_localaddr (localaddr)
10113 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10117 /* Push the machine address for the given parameter offset.
10118 NOTE: offset is in bits. */
10120 bc_load_parmaddr (parmaddr)
10123 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10128 /* Convert a[i] into *(a + i). */
10130 bc_canonicalize_array_ref (exp)
10133 tree type = TREE_TYPE (exp);
10134 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10135 TREE_OPERAND (exp, 0));
10136 tree index = TREE_OPERAND (exp, 1);
10139 /* Convert the integer argument to a type the same size as a pointer
10140 so the multiply won't overflow spuriously. */
10142 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10143 index = convert (type_for_size (POINTER_SIZE, 0), index);
10145 /* The array address isn't volatile even if the array is.
10146 (Of course this isn't terribly relevant since the bytecode
10147 translator treats nearly everything as volatile anyway.) */
10148 TREE_THIS_VOLATILE (array_adr) = 0;
10150 return build1 (INDIRECT_REF, type,
10151 fold (build (PLUS_EXPR,
10152 TYPE_POINTER_TO (type),
10154 fold (build (MULT_EXPR,
10155 TYPE_POINTER_TO (type),
10157 size_in_bytes (type))))));
10161 /* Load the address of the component referenced by the given
10162 COMPONENT_REF expression.
10164 Returns innermost lvalue. */
10167 bc_expand_component_address (exp)
10171 enum machine_mode mode;
10173 HOST_WIDE_INT SIval;
10176 tem = TREE_OPERAND (exp, 1);
10177 mode = DECL_MODE (tem);
10180 /* Compute cumulative bit offset for nested component refs
10181 and array refs, and find the ultimate containing object. */
10183 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10185 if (TREE_CODE (tem) == COMPONENT_REF)
10186 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10188 if (TREE_CODE (tem) == ARRAY_REF
10189 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10190 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10192 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10193 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10194 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10199 bc_expand_expr (tem);
10202 /* For bitfields also push their offset and size */
10203 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10204 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10206 if (SIval = bitpos / BITS_PER_UNIT)
10207 bc_emit_instruction (addconstPSI, SIval);
10209 return (TREE_OPERAND (exp, 1));
10213 /* Emit code to push two SI constants */
10215 bc_push_offset_and_size (offset, size)
10216 HOST_WIDE_INT offset, size;
10218 bc_emit_instruction (constSI, offset);
10219 bc_emit_instruction (constSI, size);
10223 /* Emit byte code to push the address of the given lvalue expression to
10224 the stack. If it's a bit field, we also push offset and size info.
10226 Returns innermost component, which allows us to determine not only
10227 its type, but also whether it's a bitfield. */
10230 bc_expand_address (exp)
10234 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10238 switch (TREE_CODE (exp))
10242 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10244 case COMPONENT_REF:
10246 return (bc_expand_component_address (exp));
10250 bc_expand_expr (TREE_OPERAND (exp, 0));
10252 /* For variable-sized types: retrieve pointer. Sometimes the
10253 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10254 also make sure we have an operand, just in case... */
10256 if (TREE_OPERAND (exp, 0)
10257 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10258 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10259 bc_emit_instruction (loadP);
10261 /* If packed, also return offset and size */
10262 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10264 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10265 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10267 return (TREE_OPERAND (exp, 0));
10269 case FUNCTION_DECL:
10271 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10272 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10277 bc_load_parmaddr (DECL_RTL (exp));
10279 /* For variable-sized types: retrieve pointer */
10280 if (TYPE_SIZE (TREE_TYPE (exp))
10281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10282 bc_emit_instruction (loadP);
10284 /* If packed, also return offset and size */
10285 if (DECL_BIT_FIELD (exp))
10286 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10287 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10293 bc_emit_instruction (returnP);
10299 if (BYTECODE_LABEL (DECL_RTL (exp)))
10300 bc_load_externaddr (DECL_RTL (exp));
10303 if (DECL_EXTERNAL (exp))
10304 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10305 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10307 bc_load_localaddr (DECL_RTL (exp));
10309 /* For variable-sized types: retrieve pointer */
10310 if (TYPE_SIZE (TREE_TYPE (exp))
10311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10312 bc_emit_instruction (loadP);
10314 /* If packed, also return offset and size */
10315 if (DECL_BIT_FIELD (exp))
10316 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10317 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10325 bc_emit_bytecode (constP);
10326 r = output_constant_def (exp);
10327 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10329 #ifdef DEBUG_PRINT_CODE
10330 fputc ('\n', stderr);
10341 /* Most lvalues don't have components. */
10346 /* Emit a type code to be used by the runtime support in handling
10347 parameter passing. The type code consists of the machine mode
10348 plus the minimal alignment shifted left 8 bits. */
10351 bc_runtime_type_code (type)
10356 switch (TREE_CODE (type))
10362 case ENUMERAL_TYPE:
10366 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10378 return build_int_2 (val, 0);
10382 /* Generate constructor label */
10384 bc_gen_constr_label ()
10386 static int label_counter;
10387 static char label[20];
10389 sprintf (label, "*LR%d", label_counter++);
10391 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10395 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10396 expand the constructor data as static data, and push a pointer to it.
10397 The pointer is put in the pointer table and is retrieved by a constP
10398 bytecode instruction. We then loop and store each constructor member in
10399 the corresponding component. Finally, we return the original pointer on
10403 bc_expand_constructor (constr)
10407 HOST_WIDE_INT ptroffs;
10411 /* Literal constructors are handled as constants, whereas
10412 non-literals are evaluated and stored element by element
10413 into the data segment. */
10415 /* Allocate space in proper segment and push pointer to space on stack.
10418 l = bc_gen_constr_label ();
10420 if (TREE_CONSTANT (constr))
10424 bc_emit_const_labeldef (l);
10425 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10431 bc_emit_data_labeldef (l);
10432 bc_output_data_constructor (constr);
10436 /* Add reference to pointer table and recall pointer to stack;
10437 this code is common for both types of constructors: literals
10438 and non-literals. */
10440 ptroffs = bc_define_pointer (l);
10441 bc_emit_instruction (constP, ptroffs);
10443 /* This is all that has to be done if it's a literal. */
10444 if (TREE_CONSTANT (constr))
10448 /* At this point, we have the pointer to the structure on top of the stack.
10449 Generate sequences of store_memory calls for the constructor. */
10451 /* constructor type is structure */
10452 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10456 /* If the constructor has fewer fields than the structure,
10457 clear the whole structure first. */
10459 if (list_length (CONSTRUCTOR_ELTS (constr))
10460 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10462 bc_emit_instruction (duplicate);
10463 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10464 bc_emit_instruction (clearBLK);
10467 /* Store each element of the constructor into the corresponding
10468 field of TARGET. */
10470 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10472 register tree field = TREE_PURPOSE (elt);
10473 register enum machine_mode mode;
10478 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10479 mode = DECL_MODE (field);
10480 unsignedp = TREE_UNSIGNED (field);
10482 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10484 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10485 /* The alignment of TARGET is
10486 at least what its type requires. */
10488 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10489 int_size_in_bytes (TREE_TYPE (constr)));
10494 /* Constructor type is array */
10495 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10499 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10500 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10501 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10502 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10504 /* If the constructor has fewer fields than the structure,
10505 clear the whole structure first. */
10507 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10509 bc_emit_instruction (duplicate);
10510 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10511 bc_emit_instruction (clearBLK);
10515 /* Store each element of the constructor into the corresponding
10516 element of TARGET, determined by counting the elements. */
10518 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10520 elt = TREE_CHAIN (elt), i++)
10522 register enum machine_mode mode;
10527 mode = TYPE_MODE (elttype);
10528 bitsize = GET_MODE_BITSIZE (mode);
10529 unsignedp = TREE_UNSIGNED (elttype);
10531 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10532 /* * TYPE_SIZE_UNIT (elttype) */ );
10534 bc_store_field (elt, bitsize, bitpos, mode,
10535 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10536 /* The alignment of TARGET is
10537 at least what its type requires. */
10539 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10540 int_size_in_bytes (TREE_TYPE (constr)));
10547 /* Store the value of EXP (an expression tree) into member FIELD of
10548 structure at address on stack, which has type TYPE, mode MODE and
10549 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10552 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10553 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10556 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10557 value_mode, unsignedp, align, total_size)
10558 int bitsize, bitpos;
10559 enum machine_mode mode;
10560 tree field, exp, type;
10561 enum machine_mode value_mode;
10567 /* Expand expression and copy pointer */
10568 bc_expand_expr (exp);
10569 bc_emit_instruction (over);
10572 /* If the component is a bit field, we cannot use addressing to access
10573 it. Use bit-field techniques to store in it. */
10575 if (DECL_BIT_FIELD (field))
10577 bc_store_bit_field (bitpos, bitsize, unsignedp);
10581 /* Not bit field */
10583 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10585 /* Advance pointer to the desired member */
10587 bc_emit_instruction (addconstPSI, offset);
10590 bc_store_memory (type, field);
10595 /* Store SI/SU in bitfield */
10597 bc_store_bit_field (offset, size, unsignedp)
10598 int offset, size, unsignedp;
10600 /* Push bitfield offset and size */
10601 bc_push_offset_and_size (offset, size);
10604 bc_emit_instruction (sstoreBI);
10608 /* Load SI/SU from bitfield */
10610 bc_load_bit_field (offset, size, unsignedp)
10611 int offset, size, unsignedp;
10613 /* Push bitfield offset and size */
10614 bc_push_offset_and_size (offset, size);
10616 /* Load: sign-extend if signed, else zero-extend */
10617 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10621 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10622 (adjust stack pointer upwards), negative means add that number of
10623 levels (adjust the stack pointer downwards). Only positive values
10624 normally make sense. */
10627 bc_adjust_stack (nlevels)
10636 bc_emit_instruction (drop);
10639 bc_emit_instruction (drop);
10644 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10645 stack_depth -= nlevels;
10648 #if defined (VALIDATE_STACK_FOR_BC)
10649 VALIDATE_STACK_FOR_BC ();