1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "insn-flags.h"
30 #include "insn-codes.h"
32 #include "insn-config.h"
35 #include "typeclass.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
124 int explicit_inc_from;
130 /* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
134 extern int local_vars_size;
135 extern int stack_depth;
136 extern int max_stack_depth;
137 extern struct obstack permanent_obstack;
140 static rtx enqueue_insn PROTO((rtx, rtx));
141 static int queued_subexp_p PROTO((rtx));
142 static void init_queue PROTO((void));
143 static void move_by_pieces PROTO((rtx, rtx, int, int));
144 static int move_by_pieces_ninsns PROTO((unsigned int, int));
145 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
147 static void store_constructor PROTO((tree, rtx));
148 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
150 static int get_inner_unaligned_p PROTO((tree));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree));
154 static int fixed_type_p PROTO((tree));
155 static int get_pointer_alignment PROTO((tree, unsigned));
156 static tree string_constant PROTO((tree, tree *));
157 static tree c_strlen PROTO((tree));
158 static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
160 static int apply_args_size PROTO((void));
161 static int apply_result_size PROTO((void));
162 static rtx result_vector PROTO((int, rtx));
163 static rtx expand_builtin_apply_args PROTO((void));
164 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165 static void expand_builtin_return PROTO((rtx));
166 static rtx expand_increment PROTO((tree, int));
167 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168 tree bc_runtime_type_code PROTO((tree));
169 rtx bc_allocate_local PROTO((int, int));
170 void bc_store_memory PROTO((tree, tree));
171 tree bc_expand_component_address PROTO((tree));
172 tree bc_expand_address PROTO((tree));
173 void bc_expand_constructor PROTO((tree));
174 void bc_adjust_stack PROTO((int));
175 tree bc_canonicalize_array_ref PROTO((tree));
176 void bc_load_memory PROTO((tree, tree));
177 void bc_load_externaddr PROTO((rtx));
178 void bc_load_externaddr_id PROTO((tree, int));
179 void bc_load_localaddr PROTO((rtx));
180 void bc_load_parmaddr PROTO((rtx));
181 static void preexpand_calls PROTO((tree));
182 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
183 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
184 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
189 static tree defer_cleanups_to PROTO((tree));
190 extern void (*interim_eh_hook) PROTO((tree));
191 extern tree get_set_constructor_words PROTO((tree, HOST_WIDE_INT*, int));
193 /* Record for each mode whether we can move a register directly to or
194 from an object of that mode in memory. If we can't, we won't try
195 to use that mode directly when accessing a field of that mode. */
197 static char direct_load[NUM_MACHINE_MODES];
198 static char direct_store[NUM_MACHINE_MODES];
200 /* MOVE_RATIO is the number of move instructions that is better than
204 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
207 /* A value of around 6 would minimize code size; infinity would minimize
209 #define MOVE_RATIO 15
213 /* This array records the insn_code of insns to perform block moves. */
214 enum insn_code movstr_optab[NUM_MACHINE_MODES];
216 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
218 #ifndef SLOW_UNALIGNED_ACCESS
219 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
222 /* Register mappings for target machines without register windows. */
223 #ifndef INCOMING_REGNO
224 #define INCOMING_REGNO(OUT) (OUT)
226 #ifndef OUTGOING_REGNO
227 #define OUTGOING_REGNO(IN) (IN)
230 /* Maps used to convert modes to const, load, and store bytecodes. */
231 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
232 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
233 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
235 /* Initialize maps used to convert modes to const, load, and store
238 bc_init_mode_to_opcode_maps ()
242 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
243 mode_to_const_map[mode] =
244 mode_to_load_map[mode] =
245 mode_to_store_map[mode] = neverneverland;
247 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
248 mode_to_const_map[(int) SYM] = CONST; \
249 mode_to_load_map[(int) SYM] = LOAD; \
250 mode_to_store_map[(int) SYM] = STORE;
252 #include "modemap.def"
256 /* This is run once per compilation to set up which modes can be used
257 directly in memory and to initialize the block move optab. */
263 enum machine_mode mode;
264 /* Try indexing by frame ptr and try by stack ptr.
265 It is known that on the Convex the stack ptr isn't a valid index.
266 With luck, one or the other is valid on any machine. */
267 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
268 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
271 insn = emit_insn (gen_rtx (SET, 0, 0));
272 pat = PATTERN (insn);
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
281 direct_load[(int) mode] = direct_store[(int) mode] = 0;
282 PUT_MODE (mem, mode);
283 PUT_MODE (mem1, mode);
285 /* See if there is some register that can be used in this mode and
286 directly loaded or stored from memory. */
288 if (mode != VOIDmode && mode != BLKmode)
289 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
290 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 if (! HARD_REGNO_MODE_OK (regno, mode))
296 reg = gen_rtx (REG, mode, regno);
299 SET_DEST (pat) = reg;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_load[(int) mode] = 1;
303 SET_SRC (pat) = mem1;
304 SET_DEST (pat) = reg;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_load[(int) mode] = 1;
309 SET_DEST (pat) = mem;
310 if (recog (pat, insn, &num_clobbers) >= 0)
311 direct_store[(int) mode] = 1;
314 SET_DEST (pat) = mem1;
315 if (recog (pat, insn, &num_clobbers) >= 0)
316 direct_store[(int) mode] = 1;
323 /* This is run at the start of compiling a function. */
330 pending_stack_adjust = 0;
331 inhibit_defer_pop = 0;
332 cleanups_this_call = 0;
334 apply_args_value = 0;
338 /* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
345 /* Instead of saving the postincrement queue, empty it. */
348 p->pending_stack_adjust = pending_stack_adjust;
349 p->inhibit_defer_pop = inhibit_defer_pop;
350 p->cleanups_this_call = cleanups_this_call;
351 p->saveregs_value = saveregs_value;
352 p->apply_args_value = apply_args_value;
353 p->forced_labels = forced_labels;
355 pending_stack_adjust = 0;
356 inhibit_defer_pop = 0;
357 cleanups_this_call = 0;
359 apply_args_value = 0;
363 /* Restore all variables describing the current status from the structure *P.
364 This is used after a nested function. */
367 restore_expr_status (p)
370 pending_stack_adjust = p->pending_stack_adjust;
371 inhibit_defer_pop = p->inhibit_defer_pop;
372 cleanups_this_call = p->cleanups_this_call;
373 saveregs_value = p->saveregs_value;
374 apply_args_value = p->apply_args_value;
375 forced_labels = p->forced_labels;
378 /* Manage the queue of increment instructions to be output
379 for POSTINCREMENT_EXPR expressions, etc. */
381 static rtx pending_chain;
383 /* Queue up to increment (or change) VAR later. BODY says how:
384 BODY should be the same thing you would pass to emit_insn
385 to increment right away. It will go to emit_insn later on.
387 The value is a QUEUED expression to be used in place of VAR
388 where you want to guarantee the pre-incrementation value of VAR. */
391 enqueue_insn (var, body)
394 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
395 var, NULL_RTX, NULL_RTX, body, pending_chain);
396 return pending_chain;
399 /* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
415 protect_from_queue (x, modify)
419 register RTX_CODE code = GET_CODE (x);
421 #if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
437 register rtx y = XEXP (x, 0);
438 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
440 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
441 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
442 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
464 else if (code == PLUS || code == MULT)
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
489 return QUEUED_COPY (x);
492 /* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
501 register enum rtx_code code = GET_CODE (x);
507 return queued_subexp_p (XEXP (x, 0));
511 return queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1));
517 /* Perform all the pending incrementations. */
523 while (p = pending_chain)
525 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
526 pending_chain = QUEUED_NEXT (p);
537 /* Copy data from FROM to TO, where the machine modes are not the same.
538 Both modes may be integer, or both may be floating.
539 UNSIGNEDP should be nonzero if FROM is an unsigned type.
540 This causes zero-extension instead of sign-extension. */
543 convert_move (to, from, unsignedp)
544 register rtx to, from;
547 enum machine_mode to_mode = GET_MODE (to);
548 enum machine_mode from_mode = GET_MODE (from);
549 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
550 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
554 /* rtx code for making an equivalent value. */
555 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
587 #ifdef HAVE_extendqfhf2
588 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
590 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
594 #ifdef HAVE_extendqfsf2
595 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
597 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
601 #ifdef HAVE_extendqfdf2
602 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
604 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
608 #ifdef HAVE_extendqfxf2
609 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
611 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
615 #ifdef HAVE_extendqftf2
616 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
618 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
623 #ifdef HAVE_extendhftqf2
624 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
626 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
631 #ifdef HAVE_extendhfsf2
632 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
634 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
638 #ifdef HAVE_extendhfdf2
639 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
641 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
645 #ifdef HAVE_extendhfxf2
646 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
648 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
652 #ifdef HAVE_extendhftf2
653 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
655 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
660 #ifdef HAVE_extendsfdf2
661 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
663 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
667 #ifdef HAVE_extendsfxf2
668 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
670 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
674 #ifdef HAVE_extendsftf2
675 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
677 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
681 #ifdef HAVE_extenddfxf2
682 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
684 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
688 #ifdef HAVE_extenddftf2
689 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
691 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
696 #ifdef HAVE_trunchfqf2
697 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
699 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
703 #ifdef HAVE_truncsfqf2
704 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
706 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
710 #ifdef HAVE_truncdfqf2
711 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
713 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
717 #ifdef HAVE_truncxfqf2
718 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
720 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
724 #ifdef HAVE_trunctfqf2
725 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
727 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
732 #ifdef HAVE_trunctqfhf2
733 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
735 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
739 #ifdef HAVE_truncsfhf2
740 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
742 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
746 #ifdef HAVE_truncdfhf2
747 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
749 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
753 #ifdef HAVE_truncxfhf2
754 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
756 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
760 #ifdef HAVE_trunctfhf2
761 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
763 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
767 #ifdef HAVE_truncdfsf2
768 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
770 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
774 #ifdef HAVE_truncxfsf2
775 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
777 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
781 #ifdef HAVE_trunctfsf2
782 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
784 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
788 #ifdef HAVE_truncxfdf2
789 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
791 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
795 #ifdef HAVE_trunctfdf2
796 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
798 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
810 libcall = extendsfdf2_libfunc;
814 libcall = extendsfxf2_libfunc;
818 libcall = extendsftf2_libfunc;
827 libcall = truncdfsf2_libfunc;
831 libcall = extenddfxf2_libfunc;
835 libcall = extenddftf2_libfunc;
844 libcall = truncxfsf2_libfunc;
848 libcall = truncxfdf2_libfunc;
857 libcall = trunctfsf2_libfunc;
861 libcall = trunctfdf2_libfunc;
867 if (libcall == (rtx) 0)
868 /* This conversion is not implemented yet. */
871 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
873 emit_move_insn (to, value);
877 /* Now both modes are integers. */
879 /* Handle expanding beyond a word. */
880 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
881 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
888 enum machine_mode lowpart_mode;
889 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
891 /* Try converting directly if the insn is supported. */
892 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
895 /* If FROM is a SUBREG, put it into a register. Do this
896 so that we always generate the same set of insns for
897 better cse'ing; if an intermediate assignment occurred,
898 we won't be doing the operation directly on the SUBREG. */
899 if (optimize > 0 && GET_CODE (from) == SUBREG)
900 from = force_reg (from_mode, from);
901 emit_unop_insn (code, to, from, equiv_code);
904 /* Next, try converting via full word. */
905 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
906 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
907 != CODE_FOR_nothing))
909 if (GET_CODE (to) == REG)
910 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
911 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
912 emit_unop_insn (code, to,
913 gen_lowpart (word_mode, to), equiv_code);
917 /* No special multiword conversion insn; do it by hand. */
920 /* Since we will turn this into a no conflict block, we must ensure
921 that the source does not overlap the target. */
923 if (reg_overlap_mentioned_p (to, from))
924 from = force_reg (from_mode, from);
926 /* Get a copy of FROM widened to a word, if necessary. */
927 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
928 lowpart_mode = word_mode;
930 lowpart_mode = from_mode;
932 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
934 lowpart = gen_lowpart (lowpart_mode, to);
935 emit_move_insn (lowpart, lowfrom);
937 /* Compute the value to put in each remaining word. */
939 fill_value = const0_rtx;
944 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
945 && STORE_FLAG_VALUE == -1)
947 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
949 fill_value = gen_reg_rtx (word_mode);
950 emit_insn (gen_slt (fill_value));
956 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
957 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
959 fill_value = convert_to_mode (word_mode, fill_value, 1);
963 /* Fill the remaining words. */
964 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
966 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
967 rtx subword = operand_subword (to, index, 1, to_mode);
972 if (fill_value != subword)
973 emit_move_insn (subword, fill_value);
976 insns = get_insns ();
979 emit_no_conflict_block (insns, to, from, NULL_RTX,
980 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
984 /* Truncating multi-word to a word or less. */
985 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
986 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
988 if (!((GET_CODE (from) == MEM
989 && ! MEM_VOLATILE_P (from)
990 && direct_load[(int) to_mode]
991 && ! mode_dependent_address_p (XEXP (from, 0)))
992 || GET_CODE (from) == REG
993 || GET_CODE (from) == SUBREG))
994 from = force_reg (from_mode, from);
995 convert_move (to, gen_lowpart (word_mode, from), 0);
999 /* Handle pointer conversion */ /* SPEE 900220 */
1000 if (to_mode == PSImode)
1002 if (from_mode != SImode)
1003 from = convert_to_mode (SImode, from, unsignedp);
1005 #ifdef HAVE_truncsipsi2
1006 if (HAVE_truncsipsi2)
1008 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1011 #endif /* HAVE_truncsipsi2 */
1015 if (from_mode == PSImode)
1017 if (to_mode != SImode)
1019 from = convert_to_mode (SImode, from, unsignedp);
1024 #ifdef HAVE_extendpsisi2
1025 if (HAVE_extendpsisi2)
1027 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1030 #endif /* HAVE_extendpsisi2 */
1035 if (to_mode == PDImode)
1037 if (from_mode != DImode)
1038 from = convert_to_mode (DImode, from, unsignedp);
1040 #ifdef HAVE_truncdipdi2
1041 if (HAVE_truncdipdi2)
1043 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1046 #endif /* HAVE_truncdipdi2 */
1050 if (from_mode == PDImode)
1052 if (to_mode != DImode)
1054 from = convert_to_mode (DImode, from, unsignedp);
1059 #ifdef HAVE_extendpdidi2
1060 if (HAVE_extendpdidi2)
1062 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1065 #endif /* HAVE_extendpdidi2 */
1070 /* Now follow all the conversions between integers
1071 no more than a word long. */
1073 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1074 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1075 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076 GET_MODE_BITSIZE (from_mode)))
1078 if (!((GET_CODE (from) == MEM
1079 && ! MEM_VOLATILE_P (from)
1080 && direct_load[(int) to_mode]
1081 && ! mode_dependent_address_p (XEXP (from, 0)))
1082 || GET_CODE (from) == REG
1083 || GET_CODE (from) == SUBREG))
1084 from = force_reg (from_mode, from);
1085 emit_move_insn (to, gen_lowpart (to_mode, from));
1089 /* Handle extension. */
1090 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092 /* Convert directly if that works. */
1093 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1094 != CODE_FOR_nothing)
1096 emit_unop_insn (code, to, from, equiv_code);
1101 enum machine_mode intermediate;
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1118 /* No suitable intermediate mode. */
1123 /* Support special truncate insns for certain modes. */
1125 if (from_mode == DImode && to_mode == SImode)
1127 #ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2)
1130 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 if (from_mode == DImode && to_mode == HImode)
1140 #ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2)
1143 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 if (from_mode == DImode && to_mode == QImode)
1153 #ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2)
1156 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 if (from_mode == SImode && to_mode == HImode)
1166 #ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2)
1169 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == SImode && to_mode == QImode)
1179 #ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2)
1182 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == HImode && to_mode == QImode)
1192 #ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2)
1195 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == TImode && to_mode == DImode)
1205 #ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2)
1208 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 if (from_mode == TImode && to_mode == SImode)
1218 #ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2)
1221 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 if (from_mode == TImode && to_mode == HImode)
1231 #ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2)
1234 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 if (from_mode == TImode && to_mode == QImode)
1244 #ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2)
1247 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1260 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1261 emit_move_insn (to, temp);
1265 /* Mode combination is not recognized. */
1269 /* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
1274 or by copying to a new temporary with conversion.
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
1280 convert_to_mode (mode, x, unsignedp)
1281 enum machine_mode mode;
1285 return convert_modes (mode, VOIDmode, x, unsignedp);
1288 /* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1302 convert_modes (mode, oldmode, x, unsignedp)
1303 enum machine_mode mode, oldmode;
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1312 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1315 x = gen_lowpart (mode, x);
1317 if (GET_MODE (x) != VOIDmode)
1318 oldmode = GET_MODE (x);
1320 if (mode == oldmode)
1323 /* There is one case that we must handle specially: If we are converting
1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1329 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1330 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1331 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1332 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1334 /* We can do this with a gen_lowpart if both desired and current modes
1335 are integer, and this is either a constant integer, a register, or a
1336 non-volatile MEM. Except for the constant case where MODE is no
1337 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1339 if ((GET_CODE (x) == CONST_INT
1340 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1341 || (GET_MODE_CLASS (mode) == MODE_INT
1342 && GET_MODE_CLASS (oldmode) == MODE_INT
1343 && (GET_CODE (x) == CONST_DOUBLE
1344 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1345 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1346 && direct_load[(int) mode])
1347 || (GET_CODE (x) == REG
1348 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1349 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1351 /* ?? If we don't know OLDMODE, we have to assume here that
1352 X does not need sign- or zero-extension. This may not be
1353 the case, but it's the best we can do. */
1354 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1355 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1357 HOST_WIDE_INT val = INTVAL (x);
1358 int width = GET_MODE_BITSIZE (oldmode);
1360 /* We must sign or zero-extend in this case. Start by
1361 zero-extending, then sign extend if we need to. */
1362 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1364 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1365 val |= (HOST_WIDE_INT) (-1) << width;
1367 return GEN_INT (val);
1370 return gen_lowpart (mode, x);
1373 temp = gen_reg_rtx (mode);
1374 convert_move (temp, x, unsignedp);
1378 /* Generate several move instructions to copy LEN bytes
1379 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1380 The caller must pass FROM and TO
1381 through protect_from_queue before calling.
1382 ALIGN (in bytes) is maximum alignment we can assume. */
1385 move_by_pieces (to, from, len, align)
1389 struct move_by_pieces data;
1390 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1391 int max_size = MOVE_MAX + 1;
1394 data.to_addr = to_addr;
1395 data.from_addr = from_addr;
1399 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1400 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1402 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1403 || GET_CODE (from_addr) == POST_INC
1404 || GET_CODE (from_addr) == POST_DEC);
1406 data.explicit_inc_from = 0;
1407 data.explicit_inc_to = 0;
1409 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1410 if (data.reverse) data.offset = len;
1413 /* If copying requires more than two move insns,
1414 copy addresses to registers (to make displacements shorter)
1415 and use post-increment if available. */
1416 if (!(data.autinc_from && data.autinc_to)
1417 && move_by_pieces_ninsns (len, align) > 2)
1419 #ifdef HAVE_PRE_DECREMENT
1420 if (data.reverse && ! data.autinc_from)
1422 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1423 data.autinc_from = 1;
1424 data.explicit_inc_from = -1;
1427 #ifdef HAVE_POST_INCREMENT
1428 if (! data.autinc_from)
1430 data.from_addr = copy_addr_to_reg (from_addr);
1431 data.autinc_from = 1;
1432 data.explicit_inc_from = 1;
1435 if (!data.autinc_from && CONSTANT_P (from_addr))
1436 data.from_addr = copy_addr_to_reg (from_addr);
1437 #ifdef HAVE_PRE_DECREMENT
1438 if (data.reverse && ! data.autinc_to)
1440 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1442 data.explicit_inc_to = -1;
1445 #ifdef HAVE_POST_INCREMENT
1446 if (! data.reverse && ! data.autinc_to)
1448 data.to_addr = copy_addr_to_reg (to_addr);
1450 data.explicit_inc_to = 1;
1453 if (!data.autinc_to && CONSTANT_P (to_addr))
1454 data.to_addr = copy_addr_to_reg (to_addr);
1457 if (! SLOW_UNALIGNED_ACCESS
1458 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1461 /* First move what we can in the largest integer mode, then go to
1462 successively smaller modes. */
1464 while (max_size > 1)
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
1474 if (mode == VOIDmode)
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing
1479 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1480 GET_MODE_SIZE (mode)))
1481 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1483 max_size = GET_MODE_SIZE (mode);
1486 /* The code above should have handled everything. */
1491 /* Return number of insns required to move L bytes by pieces.
1492 ALIGN (in bytes) is maximum alignment we can assume. */
1495 move_by_pieces_ninsns (l, align)
1499 register int n_insns = 0;
1500 int max_size = MOVE_MAX + 1;
1502 if (! SLOW_UNALIGNED_ACCESS
1503 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1506 while (max_size > 1)
1508 enum machine_mode mode = VOIDmode, tmode;
1509 enum insn_code icode;
1511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1513 if (GET_MODE_SIZE (tmode) < max_size)
1516 if (mode == VOIDmode)
1519 icode = mov_optab->handlers[(int) mode].insn_code;
1520 if (icode != CODE_FOR_nothing
1521 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1522 GET_MODE_SIZE (mode)))
1523 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1525 max_size = GET_MODE_SIZE (mode);
1531 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1532 with move instructions for mode MODE. GENFUN is the gen_... function
1533 to make a move insn for that mode. DATA has all the other info. */
1536 move_by_pieces_1 (genfun, mode, data)
1538 enum machine_mode mode;
1539 struct move_by_pieces *data;
1541 register int size = GET_MODE_SIZE (mode);
1542 register rtx to1, from1;
1544 while (data->len >= size)
1546 if (data->reverse) data->offset -= size;
1548 to1 = (data->autinc_to
1549 ? gen_rtx (MEM, mode, data->to_addr)
1550 : change_address (data->to, mode,
1551 plus_constant (data->to_addr, data->offset)));
1554 ? gen_rtx (MEM, mode, data->from_addr)
1555 : change_address (data->from, mode,
1556 plus_constant (data->from_addr, data->offset)));
1558 #ifdef HAVE_PRE_DECREMENT
1559 if (data->explicit_inc_to < 0)
1560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1561 if (data->explicit_inc_from < 0)
1562 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1565 emit_insn ((*genfun) (to1, from1));
1566 #ifdef HAVE_POST_INCREMENT
1567 if (data->explicit_inc_to > 0)
1568 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1569 if (data->explicit_inc_from > 0)
1570 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1573 if (! data->reverse) data->offset += size;
1579 /* Emit code to move a block Y to a block X.
1580 This may be done with string-move instructions,
1581 with multiple scalar move instructions, or with a library call.
1583 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1585 SIZE is an rtx that says how long they are.
1586 ALIGN is the maximum alignment we can assume they have,
1587 measured in bytes. */
1590 emit_block_move (x, y, size, align)
1595 if (GET_MODE (x) != BLKmode)
1598 if (GET_MODE (y) != BLKmode)
1601 x = protect_from_queue (x, 1);
1602 y = protect_from_queue (y, 0);
1603 size = protect_from_queue (size, 0);
1605 if (GET_CODE (x) != MEM)
1607 if (GET_CODE (y) != MEM)
1612 if (GET_CODE (size) == CONST_INT
1613 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1614 move_by_pieces (x, y, INTVAL (size), align);
1617 /* Try the most limited insn first, because there's no point
1618 including more than one in the machine description unless
1619 the more limited one has some advantage. */
1621 rtx opalign = GEN_INT (align);
1622 enum machine_mode mode;
1624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1625 mode = GET_MODE_WIDER_MODE (mode))
1627 enum insn_code code = movstr_optab[(int) mode];
1629 if (code != CODE_FOR_nothing
1630 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1631 here because if SIZE is less than the mode mask, as it is
1632 returned by the macro, it will definitely be less than the
1633 actual mode mask. */
1634 && ((GET_CODE (size) == CONST_INT
1635 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1636 <= GET_MODE_MASK (mode)))
1637 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1638 && (insn_operand_predicate[(int) code][0] == 0
1639 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1640 && (insn_operand_predicate[(int) code][1] == 0
1641 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1642 && (insn_operand_predicate[(int) code][3] == 0
1643 || (*insn_operand_predicate[(int) code][3]) (opalign,
1647 rtx last = get_last_insn ();
1650 op2 = convert_to_mode (mode, size, 1);
1651 if (insn_operand_predicate[(int) code][2] != 0
1652 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1653 op2 = copy_to_mode_reg (mode, op2);
1655 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1662 delete_insns_since (last);
1666 #ifdef TARGET_MEM_FUNCTIONS
1667 emit_library_call (memcpy_libfunc, 0,
1668 VOIDmode, 3, XEXP (x, 0), Pmode,
1670 convert_to_mode (TYPE_MODE (sizetype), size,
1671 TREE_UNSIGNED (sizetype)),
1672 TYPE_MODE (sizetype));
1674 emit_library_call (bcopy_libfunc, 0,
1675 VOIDmode, 3, XEXP (y, 0), Pmode,
1677 convert_to_mode (TYPE_MODE (sizetype), size,
1678 TREE_UNSIGNED (sizetype)),
1679 TYPE_MODE (sizetype));
1684 /* Copy all or part of a value X into registers starting at REGNO.
1685 The number of registers to be filled is NREGS. */
1688 move_block_to_reg (regno, x, nregs, mode)
1692 enum machine_mode mode;
1700 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1701 x = validize_mem (force_const_mem (mode, x));
1703 /* See if the machine can do this with a load multiple insn. */
1704 #ifdef HAVE_load_multiple
1705 if (HAVE_load_multiple)
1707 last = get_last_insn ();
1708 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1716 delete_insns_since (last);
1720 for (i = 0; i < nregs; i++)
1721 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1722 operand_subword_force (x, i, mode));
1725 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1726 The number of registers to be filled is NREGS. SIZE indicates the number
1727 of bytes in the object X. */
1731 move_block_from_reg (regno, x, nregs, size)
1740 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1741 to the left before storing to memory. */
1742 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1744 rtx tem = operand_subword (x, 0, 1, BLKmode);
1750 shift = expand_shift (LSHIFT_EXPR, word_mode,
1751 gen_rtx (REG, word_mode, regno),
1752 build_int_2 ((UNITS_PER_WORD - size)
1753 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1754 emit_move_insn (tem, shift);
1758 /* See if the machine can do this with a store multiple insn. */
1759 #ifdef HAVE_store_multiple
1760 if (HAVE_store_multiple)
1762 last = get_last_insn ();
1763 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1771 delete_insns_since (last);
1775 for (i = 0; i < nregs; i++)
1777 rtx tem = operand_subword (x, i, 1, BLKmode);
1782 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1786 /* Add a USE expression for REG to the (possibly empty) list pointed
1787 to by CALL_FUSAGE. REG must denote a hard register. */
1790 use_reg (call_fusage, reg)
1791 rtx *call_fusage, reg;
1793 if (GET_CODE (reg) != REG
1794 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1798 = gen_rtx (EXPR_LIST, VOIDmode,
1799 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1802 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1803 starting at REGNO. All of these registers must be hard registers. */
1806 use_regs (call_fusage, regno, nregs)
1813 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1816 for (i = 0; i < nregs; i++)
1817 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1820 /* Write zeros through the storage of OBJECT.
1821 If OBJECT has BLKmode, SIZE is its length in bytes. */
1824 clear_storage (object, size)
1828 if (GET_MODE (object) == BLKmode)
1830 #ifdef TARGET_MEM_FUNCTIONS
1831 emit_library_call (memset_libfunc, 0,
1833 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1834 GEN_INT (size), Pmode);
1836 emit_library_call (bzero_libfunc, 0,
1838 XEXP (object, 0), Pmode,
1839 GEN_INT (size), Pmode);
1843 emit_move_insn (object, const0_rtx);
1846 /* Generate code to copy Y into X.
1847 Both Y and X must have the same mode, except that
1848 Y can be a constant with VOIDmode.
1849 This mode cannot be BLKmode; use emit_block_move for that.
1851 Return the last instruction emitted. */
1854 emit_move_insn (x, y)
1857 enum machine_mode mode = GET_MODE (x);
1859 x = protect_from_queue (x, 1);
1860 y = protect_from_queue (y, 0);
1862 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1865 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1866 y = force_const_mem (mode, y);
1868 /* If X or Y are memory references, verify that their addresses are valid
1870 if (GET_CODE (x) == MEM
1871 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1872 && ! push_operand (x, GET_MODE (x)))
1874 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1875 x = change_address (x, VOIDmode, XEXP (x, 0));
1877 if (GET_CODE (y) == MEM
1878 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1880 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1881 y = change_address (y, VOIDmode, XEXP (y, 0));
1883 if (mode == BLKmode)
1886 return emit_move_insn_1 (x, y);
1889 /* Low level part of emit_move_insn.
1890 Called just like emit_move_insn, but assumes X and Y
1891 are basically valid. */
1894 emit_move_insn_1 (x, y)
1897 enum machine_mode mode = GET_MODE (x);
1898 enum machine_mode submode;
1899 enum mode_class class = GET_MODE_CLASS (mode);
1902 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1904 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1906 /* Expand complex moves by moving real part and imag part, if possible. */
1907 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1908 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1910 (class == MODE_COMPLEX_INT
1911 ? MODE_INT : MODE_FLOAT),
1913 && (mov_optab->handlers[(int) submode].insn_code
1914 != CODE_FOR_nothing))
1916 /* Don't split destination if it is a stack push. */
1917 int stack = push_operand (x, GET_MODE (x));
1920 /* If this is a stack, push the highpart first, so it
1921 will be in the argument order.
1923 In that case, change_address is used only to convert
1924 the mode, not to change the address. */
1927 /* Note that the real part always precedes the imag part in memory
1928 regardless of machine's endianness. */
1929 #ifdef STACK_GROWS_DOWNWARD
1930 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1931 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1932 gen_imagpart (submode, y)));
1933 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1934 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1935 gen_realpart (submode, y)));
1937 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1938 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1939 gen_realpart (submode, y)));
1940 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1941 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1942 gen_imagpart (submode, y)));
1947 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1948 (gen_realpart (submode, x), gen_realpart (submode, y)));
1949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1950 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1953 return get_last_insn ();
1956 /* This will handle any multi-word mode that lacks a move_insn pattern.
1957 However, you will get better code if you define such patterns,
1958 even if they must turn into multiple assembler instructions. */
1959 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1964 #ifdef PUSH_ROUNDING
1966 /* If X is a push on the stack, do the push now and replace
1967 X with a reference to the stack pointer. */
1968 if (push_operand (x, GET_MODE (x)))
1970 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1971 x = change_address (x, VOIDmode, stack_pointer_rtx);
1976 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1979 rtx xpart = operand_subword (x, i, 1, mode);
1980 rtx ypart = operand_subword (y, i, 1, mode);
1982 /* If we can't get a part of Y, put Y into memory if it is a
1983 constant. Otherwise, force it into a register. If we still
1984 can't get a part of Y, abort. */
1985 if (ypart == 0 && CONSTANT_P (y))
1987 y = force_const_mem (mode, y);
1988 ypart = operand_subword (y, i, 1, mode);
1990 else if (ypart == 0)
1991 ypart = operand_subword_force (y, i, mode);
1993 if (xpart == 0 || ypart == 0)
1996 last_insn = emit_move_insn (xpart, ypart);
2005 /* Pushing data onto the stack. */
2007 /* Push a block of length SIZE (perhaps variable)
2008 and return an rtx to address the beginning of the block.
2009 Note that it is not possible for the value returned to be a QUEUED.
2010 The value may be virtual_outgoing_args_rtx.
2012 EXTRA is the number of bytes of padding to push in addition to SIZE.
2013 BELOW nonzero means this padding comes at low addresses;
2014 otherwise, the padding comes at high addresses. */
2017 push_block (size, extra, below)
2022 if (CONSTANT_P (size))
2023 anti_adjust_stack (plus_constant (size, extra));
2024 else if (GET_CODE (size) == REG && extra == 0)
2025 anti_adjust_stack (size);
2028 rtx temp = copy_to_mode_reg (Pmode, size);
2030 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2031 temp, 0, OPTAB_LIB_WIDEN);
2032 anti_adjust_stack (temp);
2035 #ifdef STACK_GROWS_DOWNWARD
2036 temp = virtual_outgoing_args_rtx;
2037 if (extra != 0 && below)
2038 temp = plus_constant (temp, extra);
2040 if (GET_CODE (size) == CONST_INT)
2041 temp = plus_constant (virtual_outgoing_args_rtx,
2042 - INTVAL (size) - (below ? 0 : extra));
2043 else if (extra != 0 && !below)
2044 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2045 negate_rtx (Pmode, plus_constant (size, extra)));
2047 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2048 negate_rtx (Pmode, size));
2051 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2057 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2060 /* Generate code to push X onto the stack, assuming it has mode MODE and
2062 MODE is redundant except when X is a CONST_INT (since they don't
2064 SIZE is an rtx for the size of data to be copied (in bytes),
2065 needed only if X is BLKmode.
2067 ALIGN (in bytes) is maximum alignment we can assume.
2069 If PARTIAL and REG are both nonzero, then copy that many of the first
2070 words of X into registers starting with REG, and push the rest of X.
2071 The amount of space pushed is decreased by PARTIAL words,
2072 rounded *down* to a multiple of PARM_BOUNDARY.
2073 REG must be a hard register in this case.
2074 If REG is zero but PARTIAL is not, take any all others actions for an
2075 argument partially in registers, but do not actually load any
2078 EXTRA is the amount in bytes of extra space to leave next to this arg.
2079 This is ignored if an argument block has already been allocated.
2081 On a machine that lacks real push insns, ARGS_ADDR is the address of
2082 the bottom of the argument block for this call. We use indexing off there
2083 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2084 argument block has not been preallocated.
2086 ARGS_SO_FAR is the size of args previously pushed for this call. */
2089 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2090 args_addr, args_so_far)
2092 enum machine_mode mode;
2103 enum direction stack_direction
2104 #ifdef STACK_GROWS_DOWNWARD
2110 /* Decide where to pad the argument: `downward' for below,
2111 `upward' for above, or `none' for don't pad it.
2112 Default is below for small data on big-endian machines; else above. */
2113 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2115 /* Invert direction if stack is post-update. */
2116 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2117 if (where_pad != none)
2118 where_pad = (where_pad == downward ? upward : downward);
2120 xinner = x = protect_from_queue (x, 0);
2122 if (mode == BLKmode)
2124 /* Copy a block into the stack, entirely or partially. */
2127 int used = partial * UNITS_PER_WORD;
2128 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2136 /* USED is now the # of bytes we need not copy to the stack
2137 because registers will take care of them. */
2140 xinner = change_address (xinner, BLKmode,
2141 plus_constant (XEXP (xinner, 0), used));
2143 /* If the partial register-part of the arg counts in its stack size,
2144 skip the part of stack space corresponding to the registers.
2145 Otherwise, start copying to the beginning of the stack space,
2146 by setting SKIP to 0. */
2147 #ifndef REG_PARM_STACK_SPACE
2153 #ifdef PUSH_ROUNDING
2154 /* Do it with several push insns if that doesn't take lots of insns
2155 and if there is no difficulty with push insns that skip bytes
2156 on the stack for alignment purposes. */
2158 && GET_CODE (size) == CONST_INT
2160 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2162 /* Here we avoid the case of a structure whose weak alignment
2163 forces many pushes of a small amount of data,
2164 and such small pushes do rounding that causes trouble. */
2165 && ((! SLOW_UNALIGNED_ACCESS)
2166 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2167 || PUSH_ROUNDING (align) == align)
2168 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2170 /* Push padding now if padding above and stack grows down,
2171 or if padding below and stack grows up.
2172 But if space already allocated, this has already been done. */
2173 if (extra && args_addr == 0
2174 && where_pad != none && where_pad != stack_direction)
2175 anti_adjust_stack (GEN_INT (extra));
2177 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2178 INTVAL (size) - used, align);
2181 #endif /* PUSH_ROUNDING */
2183 /* Otherwise make space on the stack and copy the data
2184 to the address of that space. */
2186 /* Deduct words put into registers from the size we must copy. */
2189 if (GET_CODE (size) == CONST_INT)
2190 size = GEN_INT (INTVAL (size) - used);
2192 size = expand_binop (GET_MODE (size), sub_optab, size,
2193 GEN_INT (used), NULL_RTX, 0,
2197 /* Get the address of the stack space.
2198 In this case, we do not deal with EXTRA separately.
2199 A single stack adjust will do. */
2202 temp = push_block (size, extra, where_pad == downward);
2205 else if (GET_CODE (args_so_far) == CONST_INT)
2206 temp = memory_address (BLKmode,
2207 plus_constant (args_addr,
2208 skip + INTVAL (args_so_far)));
2210 temp = memory_address (BLKmode,
2211 plus_constant (gen_rtx (PLUS, Pmode,
2212 args_addr, args_so_far),
2215 /* TEMP is the address of the block. Copy the data there. */
2216 if (GET_CODE (size) == CONST_INT
2217 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2220 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2221 INTVAL (size), align);
2224 /* Try the most limited insn first, because there's no point
2225 including more than one in the machine description unless
2226 the more limited one has some advantage. */
2227 #ifdef HAVE_movstrqi
2229 && GET_CODE (size) == CONST_INT
2230 && ((unsigned) INTVAL (size)
2231 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2233 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2234 xinner, size, GEN_INT (align));
2242 #ifdef HAVE_movstrhi
2244 && GET_CODE (size) == CONST_INT
2245 && ((unsigned) INTVAL (size)
2246 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2248 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2249 xinner, size, GEN_INT (align));
2257 #ifdef HAVE_movstrsi
2260 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2261 xinner, size, GEN_INT (align));
2269 #ifdef HAVE_movstrdi
2272 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2273 xinner, size, GEN_INT (align));
2282 #ifndef ACCUMULATE_OUTGOING_ARGS
2283 /* If the source is referenced relative to the stack pointer,
2284 copy it to another register to stabilize it. We do not need
2285 to do this if we know that we won't be changing sp. */
2287 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2288 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2289 temp = copy_to_reg (temp);
2292 /* Make inhibit_defer_pop nonzero around the library call
2293 to force it to pop the bcopy-arguments right away. */
2295 #ifdef TARGET_MEM_FUNCTIONS
2296 emit_library_call (memcpy_libfunc, 0,
2297 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2298 convert_to_mode (TYPE_MODE (sizetype),
2299 size, TREE_UNSIGNED (sizetype)),
2300 TYPE_MODE (sizetype));
2302 emit_library_call (bcopy_libfunc, 0,
2303 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2304 convert_to_mode (TYPE_MODE (sizetype),
2305 size, TREE_UNSIGNED (sizetype)),
2306 TYPE_MODE (sizetype));
2311 else if (partial > 0)
2313 /* Scalar partly in registers. */
2315 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2318 /* # words of start of argument
2319 that we must make space for but need not store. */
2320 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2321 int args_offset = INTVAL (args_so_far);
2324 /* Push padding now if padding above and stack grows down,
2325 or if padding below and stack grows up.
2326 But if space already allocated, this has already been done. */
2327 if (extra && args_addr == 0
2328 && where_pad != none && where_pad != stack_direction)
2329 anti_adjust_stack (GEN_INT (extra));
2331 /* If we make space by pushing it, we might as well push
2332 the real data. Otherwise, we can leave OFFSET nonzero
2333 and leave the space uninitialized. */
2337 /* Now NOT_STACK gets the number of words that we don't need to
2338 allocate on the stack. */
2339 not_stack = partial - offset;
2341 /* If the partial register-part of the arg counts in its stack size,
2342 skip the part of stack space corresponding to the registers.
2343 Otherwise, start copying to the beginning of the stack space,
2344 by setting SKIP to 0. */
2345 #ifndef REG_PARM_STACK_SPACE
2351 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2352 x = validize_mem (force_const_mem (mode, x));
2354 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2355 SUBREGs of such registers are not allowed. */
2356 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2357 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2358 x = copy_to_reg (x);
2360 /* Loop over all the words allocated on the stack for this arg. */
2361 /* We can do it by words, because any scalar bigger than a word
2362 has a size a multiple of a word. */
2363 #ifndef PUSH_ARGS_REVERSED
2364 for (i = not_stack; i < size; i++)
2366 for (i = size - 1; i >= not_stack; i--)
2368 if (i >= not_stack + offset)
2369 emit_push_insn (operand_subword_force (x, i, mode),
2370 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2372 GEN_INT (args_offset + ((i - not_stack + skip)
2373 * UNITS_PER_WORD)));
2379 /* Push padding now if padding above and stack grows down,
2380 or if padding below and stack grows up.
2381 But if space already allocated, this has already been done. */
2382 if (extra && args_addr == 0
2383 && where_pad != none && where_pad != stack_direction)
2384 anti_adjust_stack (GEN_INT (extra));
2386 #ifdef PUSH_ROUNDING
2388 addr = gen_push_operand ();
2391 if (GET_CODE (args_so_far) == CONST_INT)
2393 = memory_address (mode,
2394 plus_constant (args_addr, INTVAL (args_so_far)));
2396 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2399 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2403 /* If part should go in registers, copy that part
2404 into the appropriate registers. Do this now, at the end,
2405 since mem-to-mem copies above may do function calls. */
2406 if (partial > 0 && reg != 0)
2407 move_block_to_reg (REGNO (reg), x, partial, mode);
2409 if (extra && args_addr == 0 && where_pad == stack_direction)
2410 anti_adjust_stack (GEN_INT (extra));
2413 /* Expand an assignment that stores the value of FROM into TO.
2414 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2415 (This may contain a QUEUED rtx;
2416 if the value is constant, this rtx is a constant.)
2417 Otherwise, the returned value is NULL_RTX.
2419 SUGGEST_REG is no longer actually used.
2420 It used to mean, copy the value through a register
2421 and return that register, if that is possible.
2422 We now use WANT_VALUE to decide whether to do this. */
2425 expand_assignment (to, from, want_value, suggest_reg)
2430 register rtx to_rtx = 0;
2433 /* Don't crash if the lhs of the assignment was erroneous. */
2435 if (TREE_CODE (to) == ERROR_MARK)
2437 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2438 return want_value ? result : NULL_RTX;
2441 if (output_bytecode)
2443 tree dest_innermost;
2445 bc_expand_expr (from);
2446 bc_emit_instruction (duplicate);
2448 dest_innermost = bc_expand_address (to);
2450 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2451 take care of it here. */
2453 bc_store_memory (TREE_TYPE (to), dest_innermost);
2457 /* Assignment of a structure component needs special treatment
2458 if the structure component's rtx is not simply a MEM.
2459 Assignment of an array element at a constant index, and assignment of
2460 an array element in an unaligned packed structure field, has the same
2463 if (TREE_CODE (to) == COMPONENT_REF
2464 || TREE_CODE (to) == BIT_FIELD_REF
2465 || (TREE_CODE (to) == ARRAY_REF
2466 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2467 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2468 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2470 enum machine_mode mode1;
2480 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2481 &mode1, &unsignedp, &volatilep);
2483 /* If we are going to use store_bit_field and extract_bit_field,
2484 make sure to_rtx will be safe for multiple use. */
2486 if (mode1 == VOIDmode && want_value)
2487 tem = stabilize_reference (tem);
2489 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2490 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2493 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2495 if (GET_CODE (to_rtx) != MEM)
2497 to_rtx = change_address (to_rtx, VOIDmode,
2498 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2499 force_reg (Pmode, offset_rtx)));
2500 /* If we have a variable offset, the known alignment
2501 is only that of the innermost structure containing the field.
2502 (Actually, we could sometimes do better by using the
2503 align of an element of the innermost array, but no need.) */
2504 if (TREE_CODE (to) == COMPONENT_REF
2505 || TREE_CODE (to) == BIT_FIELD_REF)
2507 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2511 if (GET_CODE (to_rtx) == MEM)
2512 MEM_VOLATILE_P (to_rtx) = 1;
2513 #if 0 /* This was turned off because, when a field is volatile
2514 in an object which is not volatile, the object may be in a register,
2515 and then we would abort over here. */
2521 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2523 /* Spurious cast makes HPUX compiler happy. */
2524 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2527 /* Required alignment of containing datum. */
2529 int_size_in_bytes (TREE_TYPE (tem)));
2530 preserve_temp_slots (result);
2534 /* If the value is meaningful, convert RESULT to the proper mode.
2535 Otherwise, return nothing. */
2536 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2537 TYPE_MODE (TREE_TYPE (from)),
2539 TREE_UNSIGNED (TREE_TYPE (to)))
2543 /* If the rhs is a function call and its value is not an aggregate,
2544 call the function before we start to compute the lhs.
2545 This is needed for correct code for cases such as
2546 val = setjmp (buf) on machines where reference to val
2547 requires loading up part of an address in a separate insn.
2549 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2550 a promoted variable where the zero- or sign- extension needs to be done.
2551 Handling this in the normal way is safe because no computation is done
2553 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2554 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2559 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2561 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2563 if (GET_MODE (to_rtx) == BLKmode)
2565 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2566 emit_block_move (to_rtx, value, expr_size (from), align);
2569 emit_move_insn (to_rtx, value);
2570 preserve_temp_slots (to_rtx);
2573 return want_value ? to_rtx : NULL_RTX;
2576 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2577 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2580 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2582 /* Don't move directly into a return register. */
2583 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2588 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2589 emit_move_insn (to_rtx, temp);
2590 preserve_temp_slots (to_rtx);
2593 return want_value ? to_rtx : NULL_RTX;
2596 /* In case we are returning the contents of an object which overlaps
2597 the place the value is being stored, use a safe function when copying
2598 a value through a pointer into a structure value return block. */
2599 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2600 && current_function_returns_struct
2601 && !current_function_returns_pcc_struct)
2606 size = expr_size (from);
2607 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2609 #ifdef TARGET_MEM_FUNCTIONS
2610 emit_library_call (memcpy_libfunc, 0,
2611 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2612 XEXP (from_rtx, 0), Pmode,
2613 convert_to_mode (TYPE_MODE (sizetype),
2614 size, TREE_UNSIGNED (sizetype)),
2615 TYPE_MODE (sizetype));
2617 emit_library_call (bcopy_libfunc, 0,
2618 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2619 XEXP (to_rtx, 0), Pmode,
2620 convert_to_mode (TYPE_MODE (sizetype),
2621 size, TREE_UNSIGNED (sizetype)),
2622 TYPE_MODE (sizetype));
2625 preserve_temp_slots (to_rtx);
2628 return want_value ? to_rtx : NULL_RTX;
2631 /* Compute FROM and store the value in the rtx we got. */
2634 result = store_expr (from, to_rtx, want_value);
2635 preserve_temp_slots (result);
2638 return want_value ? result : NULL_RTX;
2641 /* Generate code for computing expression EXP,
2642 and storing the value into TARGET.
2643 TARGET may contain a QUEUED rtx.
2645 If WANT_VALUE is nonzero, return a copy of the value
2646 not in TARGET, so that we can be sure to use the proper
2647 value in a containing expression even if TARGET has something
2648 else stored in it. If possible, we copy the value through a pseudo
2649 and return that pseudo. Or, if the value is constant, we try to
2650 return the constant. In some cases, we return a pseudo
2651 copied *from* TARGET.
2653 If the mode is BLKmode then we may return TARGET itself.
2654 It turns out that in BLKmode it doesn't cause a problem.
2655 because C has no operators that could combine two different
2656 assignments into the same BLKmode object with different values
2657 with no sequence point. Will other languages need this to
2660 If WANT_VALUE is 0, we return NULL, to make sure
2661 to catch quickly any cases where the caller uses the value
2662 and fails to set WANT_VALUE. */
2665 store_expr (exp, target, want_value)
2667 register rtx target;
2671 int dont_return_target = 0;
2673 if (TREE_CODE (exp) == COMPOUND_EXPR)
2675 /* Perform first part of compound expression, then assign from second
2677 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2679 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2681 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2683 /* For conditional expression, get safe form of the target. Then
2684 test the condition, doing the appropriate assignment on either
2685 side. This avoids the creation of unnecessary temporaries.
2686 For non-BLKmode, it is more efficient not to do this. */
2688 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2691 target = protect_from_queue (target, 1);
2694 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2695 store_expr (TREE_OPERAND (exp, 1), target, 0);
2697 emit_jump_insn (gen_jump (lab2));
2700 store_expr (TREE_OPERAND (exp, 2), target, 0);
2704 return want_value ? target : NULL_RTX;
2706 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2707 && GET_MODE (target) != BLKmode)
2708 /* If target is in memory and caller wants value in a register instead,
2709 arrange that. Pass TARGET as target for expand_expr so that,
2710 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2711 We know expand_expr will not use the target in that case.
2712 Don't do this if TARGET is volatile because we are supposed
2713 to write it and then read it. */
2715 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2716 GET_MODE (target), 0);
2717 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2718 temp = copy_to_reg (temp);
2719 dont_return_target = 1;
2721 else if (queued_subexp_p (target))
2722 /* If target contains a postincrement, let's not risk
2723 using it as the place to generate the rhs. */
2725 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2727 /* Expand EXP into a new pseudo. */
2728 temp = gen_reg_rtx (GET_MODE (target));
2729 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2732 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2734 /* If target is volatile, ANSI requires accessing the value
2735 *from* the target, if it is accessed. So make that happen.
2736 In no case return the target itself. */
2737 if (! MEM_VOLATILE_P (target) && want_value)
2738 dont_return_target = 1;
2740 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2741 /* If this is an scalar in a register that is stored in a wider mode
2742 than the declared mode, compute the result into its declared mode
2743 and then convert to the wider mode. Our value is the computed
2746 /* If we don't want a value, we can do the conversion inside EXP,
2747 which will often result in some optimizations. */
2749 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2750 SUBREG_PROMOTED_UNSIGNED_P (target)),
2753 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2755 /* If TEMP is a volatile MEM and we want a result value, make
2756 the access now so it gets done only once. */
2757 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2758 temp = copy_to_reg (temp);
2760 /* If TEMP is a VOIDmode constant, use convert_modes to make
2761 sure that we properly convert it. */
2762 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2763 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2764 TYPE_MODE (TREE_TYPE (exp)), temp,
2765 SUBREG_PROMOTED_UNSIGNED_P (target));
2767 convert_move (SUBREG_REG (target), temp,
2768 SUBREG_PROMOTED_UNSIGNED_P (target));
2769 return want_value ? temp : NULL_RTX;
2773 temp = expand_expr (exp, target, GET_MODE (target), 0);
2774 /* Return TARGET if it's a specified hardware register.
2775 If TARGET is a volatile mem ref, either return TARGET
2776 or return a reg copied *from* TARGET; ANSI requires this.
2778 Otherwise, if TEMP is not TARGET, return TEMP
2779 if it is constant (for efficiency),
2780 or if we really want the correct value. */
2781 if (!(target && GET_CODE (target) == REG
2782 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2783 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2785 && (CONSTANT_P (temp) || want_value))
2786 dont_return_target = 1;
2789 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2790 the same as that of TARGET, adjust the constant. This is needed, for
2791 example, in case it is a CONST_DOUBLE and we want only a word-sized
2793 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2794 && TREE_CODE (exp) != ERROR_MARK
2795 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2796 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2797 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2799 /* If value was not generated in the target, store it there.
2800 Convert the value to TARGET's type first if nec. */
2802 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2804 target = protect_from_queue (target, 1);
2805 if (GET_MODE (temp) != GET_MODE (target)
2806 && GET_MODE (temp) != VOIDmode)
2808 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2809 if (dont_return_target)
2811 /* In this case, we will return TEMP,
2812 so make sure it has the proper mode.
2813 But don't forget to store the value into TARGET. */
2814 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2815 emit_move_insn (target, temp);
2818 convert_move (target, temp, unsignedp);
2821 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2823 /* Handle copying a string constant into an array.
2824 The string constant may be shorter than the array.
2825 So copy just the string's actual length, and clear the rest. */
2829 /* Get the size of the data type of the string,
2830 which is actually the size of the target. */
2831 size = expr_size (exp);
2832 if (GET_CODE (size) == CONST_INT
2833 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2834 emit_block_move (target, temp, size,
2835 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2838 /* Compute the size of the data to copy from the string. */
2840 = size_binop (MIN_EXPR,
2841 make_tree (sizetype, size),
2843 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2844 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2848 /* Copy that much. */
2849 emit_block_move (target, temp, copy_size_rtx,
2850 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2852 /* Figure out how much is left in TARGET
2853 that we have to clear. */
2854 if (GET_CODE (copy_size_rtx) == CONST_INT)
2856 addr = plus_constant (XEXP (target, 0),
2857 TREE_STRING_LENGTH (exp));
2858 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2862 enum machine_mode size_mode = Pmode;
2864 addr = force_reg (Pmode, XEXP (target, 0));
2865 addr = expand_binop (size_mode, add_optab, addr,
2866 copy_size_rtx, NULL_RTX, 0,
2869 size = expand_binop (size_mode, sub_optab, size,
2870 copy_size_rtx, NULL_RTX, 0,
2873 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2874 GET_MODE (size), 0, 0);
2875 label = gen_label_rtx ();
2876 emit_jump_insn (gen_blt (label));
2879 if (size != const0_rtx)
2881 #ifdef TARGET_MEM_FUNCTIONS
2882 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2883 Pmode, const0_rtx, Pmode, size, Pmode);
2885 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2886 addr, Pmode, size, Pmode);
2894 else if (GET_MODE (temp) == BLKmode)
2895 emit_block_move (target, temp, expr_size (exp),
2896 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2898 emit_move_insn (target, temp);
2901 /* If we don't want a value, return NULL_RTX. */
2905 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2906 ??? The latter test doesn't seem to make sense. */
2907 else if (dont_return_target && GET_CODE (temp) != MEM)
2910 /* Return TARGET itself if it is a hard register. */
2911 else if (want_value && GET_MODE (target) != BLKmode
2912 && ! (GET_CODE (target) == REG
2913 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2914 return copy_to_reg (target);
2920 /* Store the value of constructor EXP into the rtx TARGET.
2921 TARGET is either a REG or a MEM. */
2924 store_constructor (exp, target)
2928 tree type = TREE_TYPE (exp);
2930 /* We know our target cannot conflict, since safe_from_p has been called. */
2932 /* Don't try copying piece by piece into a hard register
2933 since that is vulnerable to being clobbered by EXP.
2934 Instead, construct in a pseudo register and then copy it all. */
2935 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2937 rtx temp = gen_reg_rtx (GET_MODE (target));
2938 store_constructor (exp, temp);
2939 emit_move_insn (target, temp);
2944 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2945 || TREE_CODE (type) == QUAL_UNION_TYPE)
2949 /* Inform later passes that the whole union value is dead. */
2950 if (TREE_CODE (type) == UNION_TYPE
2951 || TREE_CODE (type) == QUAL_UNION_TYPE)
2952 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2954 /* If we are building a static constructor into a register,
2955 set the initial value as zero so we can fold the value into
2957 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2958 emit_move_insn (target, const0_rtx);
2960 /* If the constructor has fewer fields than the structure,
2961 clear the whole structure first. */
2962 else if (list_length (CONSTRUCTOR_ELTS (exp))
2963 != list_length (TYPE_FIELDS (type)))
2964 clear_storage (target, int_size_in_bytes (type));
2966 /* Inform later passes that the old value is dead. */
2967 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2969 /* Store each element of the constructor into
2970 the corresponding field of TARGET. */
2972 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2974 register tree field = TREE_PURPOSE (elt);
2975 register enum machine_mode mode;
2979 tree pos, constant = 0, offset = 0;
2980 rtx to_rtx = target;
2982 /* Just ignore missing fields.
2983 We cleared the whole structure, above,
2984 if any fields are missing. */
2988 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2989 unsignedp = TREE_UNSIGNED (field);
2990 mode = DECL_MODE (field);
2991 if (DECL_BIT_FIELD (field))
2994 pos = DECL_FIELD_BITPOS (field);
2995 if (TREE_CODE (pos) == INTEGER_CST)
2997 else if (TREE_CODE (pos) == PLUS_EXPR
2998 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2999 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3004 bitpos = TREE_INT_CST_LOW (constant);
3010 if (contains_placeholder_p (offset))
3011 offset = build (WITH_RECORD_EXPR, sizetype,
3014 offset = size_binop (FLOOR_DIV_EXPR, offset,
3015 size_int (BITS_PER_UNIT));
3017 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3018 if (GET_CODE (to_rtx) != MEM)
3022 = change_address (to_rtx, VOIDmode,
3023 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
3024 force_reg (Pmode, offset_rtx)));
3027 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3028 /* The alignment of TARGET is
3029 at least what its type requires. */
3031 TYPE_ALIGN (type) / BITS_PER_UNIT,
3032 int_size_in_bytes (type));
3035 else if (TREE_CODE (type) == ARRAY_TYPE)
3039 tree domain = TYPE_DOMAIN (type);
3040 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3041 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3042 tree elttype = TREE_TYPE (type);
3044 /* If the constructor has fewer fields than the structure,
3045 clear the whole structure first. Similarly if this this is
3046 static constructor of a non-BLKmode object. */
3048 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3049 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3050 clear_storage (target, int_size_in_bytes (type));
3052 /* Inform later passes that the old value is dead. */
3053 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3055 /* Store each element of the constructor into
3056 the corresponding element of TARGET, determined
3057 by counting the elements. */
3058 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3060 elt = TREE_CHAIN (elt), i++)
3062 register enum machine_mode mode;
3066 tree index = TREE_PURPOSE (elt);
3067 rtx xtarget = target;
3069 mode = TYPE_MODE (elttype);
3070 bitsize = GET_MODE_BITSIZE (mode);
3071 unsignedp = TREE_UNSIGNED (elttype);
3073 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3074 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3076 rtx pos_rtx, addr, xtarget;
3080 index = size_int (i);
3082 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3083 size_int (BITS_PER_UNIT));
3084 position = size_binop (MULT_EXPR, index, position);
3085 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3086 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3087 xtarget = change_address (target, mode, addr);
3088 store_expr (TREE_VALUE (elt), xtarget, 0);
3093 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3094 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3096 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3098 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3099 /* The alignment of TARGET is
3100 at least what its type requires. */
3102 TYPE_ALIGN (type) / BITS_PER_UNIT,
3103 int_size_in_bytes (type));
3107 /* set constructor assignments */
3108 else if (TREE_CODE (type) == SET_TYPE)
3111 rtx xtarget = XEXP (target, 0);
3112 int set_word_size = TYPE_ALIGN (type);
3113 int nbytes = int_size_in_bytes (type);
3115 tree non_const_elements;
3116 int need_to_clear_first;
3117 tree domain = TYPE_DOMAIN (type);
3118 tree domain_min, domain_max, bitlength;
3120 /* The default implementation stategy is to extract the constant
3121 parts of the constructor, use that to initialize the target,
3122 and then "or" in whatever non-constant ranges we need in addition.
3124 If a large set is all zero or all ones, it is
3125 probably better to set it using memset (if available) or bzero.
3126 Also, if a large set has just a single range, it may also be
3127 better to first clear all the first clear the set (using
3128 bzero/memset), and set the bits we want. */
3130 /* Check for all zeros. */
3131 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3133 clear_storage (target, nbytes);
3140 nwords = (nbytes * BITS_PER_UNIT) / set_word_size;
3144 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3145 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3146 bitlength = size_binop (PLUS_EXPR,
3147 size_binop (MINUS_EXPR, domain_max, domain_min),
3150 /* Check for range all ones, or at most a single range.
3151 (This optimization is only a win for big sets.) */
3152 if (GET_MODE (target) == BLKmode && nbytes > 16
3153 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3155 need_to_clear_first = 1;
3156 non_const_elements = CONSTRUCTOR_ELTS (exp);
3160 HOST_WIDE_INT *buffer
3161 = (HOST_WIDE_INT*) alloca (sizeof (HOST_WIDE_INT) * nwords);
3162 non_const_elements = get_set_constructor_words (exp, buffer, nwords);
3164 if (nbytes * BITS_PER_UNIT <= set_word_size)
3166 if (BITS_BIG_ENDIAN)
3167 buffer[0] >>= set_word_size - nbytes * BITS_PER_UNIT;
3168 emit_move_insn (target, GEN_INT (buffer[0]));
3172 rtx addr = XEXP (target, 0);
3175 enum machine_mode mode
3176 = mode_for_size (set_word_size, MODE_INT, 1);
3178 for (i = 0; i < nwords; i++)
3180 int offset = i * set_word_size / BITS_PER_UNIT;
3181 rtx datum = GEN_INT (buffer[i]);
3182 rtx to_rtx = change_address (target, mode,
3183 plus_constant (addr, offset));
3184 MEM_IN_STRUCT_P (to_rtx) = 1;
3185 emit_move_insn (to_rtx, datum);
3188 need_to_clear_first = 0;
3191 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3193 /* start of range of element or NULL */
3194 tree startbit = TREE_PURPOSE (elt);
3195 /* end of range of element, or element value */
3196 tree endbit = TREE_VALUE (elt);
3197 HOST_WIDE_INT startb, endb;
3198 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3200 bitlength_rtx = expand_expr (bitlength,
3201 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3203 /* handle non-range tuple element like [ expr ] */
3204 if (startbit == NULL_TREE)
3206 startbit = save_expr (endbit);
3209 startbit = convert (sizetype, startbit);
3210 endbit = convert (sizetype, endbit);
3211 if (! integer_zerop (domain_min))
3213 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3214 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3216 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3217 EXPAND_CONST_ADDRESS);
3218 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3219 EXPAND_CONST_ADDRESS);
3223 targetx = assign_stack_temp (GET_MODE (target),
3224 GET_MODE_SIZE (GET_MODE (target)),
3226 emit_move_insn (targetx, target);
3228 else if (GET_CODE (target) == MEM)
3233 #ifdef TARGET_MEM_FUNCTIONS
3234 /* Optimization: If startbit and endbit are
3235 constants divisble by BITS_PER_UNIT,
3236 call memset instead. */
3237 if (TREE_CODE (startbit) == INTEGER_CST
3238 && TREE_CODE (endbit) == INTEGER_CST
3239 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3240 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3243 if (need_to_clear_first
3244 && endb - startb != nbytes * BITS_PER_UNIT)
3245 clear_storage (target, nbytes);
3246 need_to_clear_first = 0;
3247 emit_library_call (memset_libfunc, 0,
3249 plus_constant (XEXP (targetx, 0), startb),
3252 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3258 if (need_to_clear_first)
3260 clear_storage (target, nbytes);
3261 need_to_clear_first = 0;
3263 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3264 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3265 bitlength_rtx, TYPE_MODE (sizetype),
3266 startbit_rtx, TYPE_MODE (sizetype),
3267 endbit_rtx, TYPE_MODE (sizetype));
3270 emit_move_insn (target, targetx);
3278 /* Store the value of EXP (an expression tree)
3279 into a subfield of TARGET which has mode MODE and occupies
3280 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3281 If MODE is VOIDmode, it means that we are storing into a bit-field.
3283 If VALUE_MODE is VOIDmode, return nothing in particular.
3284 UNSIGNEDP is not used in this case.
3286 Otherwise, return an rtx for the value stored. This rtx
3287 has mode VALUE_MODE if that is convenient to do.
3288 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3290 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3291 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3294 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3295 unsignedp, align, total_size)
3297 int bitsize, bitpos;
3298 enum machine_mode mode;
3300 enum machine_mode value_mode;
3305 HOST_WIDE_INT width_mask = 0;
3307 if (bitsize < HOST_BITS_PER_WIDE_INT)
3308 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3310 /* If we are storing into an unaligned field of an aligned union that is
3311 in a register, we may have the mode of TARGET being an integer mode but
3312 MODE == BLKmode. In that case, get an aligned object whose size and
3313 alignment are the same as TARGET and store TARGET into it (we can avoid
3314 the store if the field being stored is the entire width of TARGET). Then
3315 call ourselves recursively to store the field into a BLKmode version of
3316 that object. Finally, load from the object into TARGET. This is not
3317 very efficient in general, but should only be slightly more expensive
3318 than the otherwise-required unaligned accesses. Perhaps this can be
3319 cleaned up later. */
3322 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3324 rtx object = assign_stack_temp (GET_MODE (target),
3325 GET_MODE_SIZE (GET_MODE (target)), 0);
3326 rtx blk_object = copy_rtx (object);
3328 MEM_IN_STRUCT_P (object) = 1;
3329 MEM_IN_STRUCT_P (blk_object) = 1;
3330 PUT_MODE (blk_object, BLKmode);
3332 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3333 emit_move_insn (object, target);
3335 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3338 /* Even though we aren't returning target, we need to
3339 give it the updated value. */
3340 emit_move_insn (target, object);
3345 /* If the structure is in a register or if the component
3346 is a bit field, we cannot use addressing to access it.
3347 Use bit-field techniques or SUBREG to store in it. */
3349 if (mode == VOIDmode
3350 || (mode != BLKmode && ! direct_store[(int) mode])
3351 || GET_CODE (target) == REG
3352 || GET_CODE (target) == SUBREG
3353 /* If the field isn't aligned enough to store as an ordinary memref,
3354 store it as a bit field. */
3355 || (SLOW_UNALIGNED_ACCESS
3356 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3357 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3359 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3361 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3363 if (mode != VOIDmode && mode != BLKmode
3364 && mode != TYPE_MODE (TREE_TYPE (exp)))
3365 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3367 /* Store the value in the bitfield. */
3368 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3369 if (value_mode != VOIDmode)
3371 /* The caller wants an rtx for the value. */
3372 /* If possible, avoid refetching from the bitfield itself. */
3374 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3377 enum machine_mode tmode;
3380 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3381 tmode = GET_MODE (temp);
3382 if (tmode == VOIDmode)
3384 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3385 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3386 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3388 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3389 NULL_RTX, value_mode, 0, align,
3396 rtx addr = XEXP (target, 0);
3399 /* If a value is wanted, it must be the lhs;
3400 so make the address stable for multiple use. */
3402 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3403 && ! CONSTANT_ADDRESS_P (addr)
3404 /* A frame-pointer reference is already stable. */
3405 && ! (GET_CODE (addr) == PLUS
3406 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3407 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3408 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3409 addr = copy_to_reg (addr);
3411 /* Now build a reference to just the desired component. */
3413 to_rtx = change_address (target, mode,
3414 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3415 MEM_IN_STRUCT_P (to_rtx) = 1;
3417 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3421 /* Return true if any object containing the innermost array is an unaligned
3422 packed structure field. */
3425 get_inner_unaligned_p (exp)
3428 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3432 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3434 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3438 else if (TREE_CODE (exp) != ARRAY_REF
3439 && TREE_CODE (exp) != NON_LVALUE_EXPR
3440 && ! ((TREE_CODE (exp) == NOP_EXPR
3441 || TREE_CODE (exp) == CONVERT_EXPR)
3442 && (TYPE_MODE (TREE_TYPE (exp))
3443 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3446 exp = TREE_OPERAND (exp, 0);
3452 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3453 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3454 ARRAY_REFs and find the ultimate containing object, which we return.
3456 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3457 bit position, and *PUNSIGNEDP to the signedness of the field.
3458 If the position of the field is variable, we store a tree
3459 giving the variable offset (in units) in *POFFSET.
3460 This offset is in addition to the bit position.
3461 If the position is not variable, we store 0 in *POFFSET.
3463 If any of the extraction expressions is volatile,
3464 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3466 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3467 is a mode that can be used to access the field. In that case, *PBITSIZE
3470 If the field describes a variable-sized object, *PMODE is set to
3471 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3472 this case, but the address of the object can be found. */
3475 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3476 punsignedp, pvolatilep)
3481 enum machine_mode *pmode;
3485 tree orig_exp = exp;
3487 enum machine_mode mode = VOIDmode;
3488 tree offset = integer_zero_node;
3490 if (TREE_CODE (exp) == COMPONENT_REF)
3492 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3493 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3494 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3495 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3497 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3499 size_tree = TREE_OPERAND (exp, 1);
3500 *punsignedp = TREE_UNSIGNED (exp);
3504 mode = TYPE_MODE (TREE_TYPE (exp));
3505 *pbitsize = GET_MODE_BITSIZE (mode);
3506 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3511 if (TREE_CODE (size_tree) != INTEGER_CST)
3512 mode = BLKmode, *pbitsize = -1;
3514 *pbitsize = TREE_INT_CST_LOW (size_tree);
3517 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3518 and find the ultimate containing object. */
3524 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3526 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3527 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3528 : TREE_OPERAND (exp, 2));
3529 tree constant = integer_zero_node, var = pos;
3531 /* If this field hasn't been filled in yet, don't go
3532 past it. This should only happen when folding expressions
3533 made during type construction. */
3537 /* Assume here that the offset is a multiple of a unit.
3538 If not, there should be an explicitly added constant. */
3539 if (TREE_CODE (pos) == PLUS_EXPR
3540 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3541 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3542 else if (TREE_CODE (pos) == INTEGER_CST)
3543 constant = pos, var = integer_zero_node;
3545 *pbitpos += TREE_INT_CST_LOW (constant);
3548 offset = size_binop (PLUS_EXPR, offset,
3549 size_binop (EXACT_DIV_EXPR, var,
3550 size_int (BITS_PER_UNIT)));
3553 else if (TREE_CODE (exp) == ARRAY_REF)
3555 /* This code is based on the code in case ARRAY_REF in expand_expr
3556 below. We assume here that the size of an array element is
3557 always an integral multiple of BITS_PER_UNIT. */
3559 tree index = TREE_OPERAND (exp, 1);
3560 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3562 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3563 tree index_type = TREE_TYPE (index);
3565 if (! integer_zerop (low_bound))
3566 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3568 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3570 index = convert (type_for_size (POINTER_SIZE, 0), index);
3571 index_type = TREE_TYPE (index);
3574 index = fold (build (MULT_EXPR, index_type, index,
3575 TYPE_SIZE (TREE_TYPE (exp))));
3577 if (TREE_CODE (index) == INTEGER_CST
3578 && TREE_INT_CST_HIGH (index) == 0)
3579 *pbitpos += TREE_INT_CST_LOW (index);
3581 offset = size_binop (PLUS_EXPR, offset,
3582 size_binop (FLOOR_DIV_EXPR, index,
3583 size_int (BITS_PER_UNIT)));
3585 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3586 && ! ((TREE_CODE (exp) == NOP_EXPR
3587 || TREE_CODE (exp) == CONVERT_EXPR)
3588 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3589 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3591 && (TYPE_MODE (TREE_TYPE (exp))
3592 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3595 /* If any reference in the chain is volatile, the effect is volatile. */
3596 if (TREE_THIS_VOLATILE (exp))
3598 exp = TREE_OPERAND (exp, 0);
3601 /* If this was a bit-field, see if there is a mode that allows direct
3602 access in case EXP is in memory. */
3603 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3605 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3606 if (mode == BLKmode)
3610 if (integer_zerop (offset))
3613 if (offset != 0 && contains_placeholder_p (offset))
3614 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3621 /* Given an rtx VALUE that may contain additions and multiplications,
3622 return an equivalent value that just refers to a register or memory.
3623 This is done by generating instructions to perform the arithmetic
3624 and returning a pseudo-register containing the value.
3626 The returned value may be a REG, SUBREG, MEM or constant. */
3629 force_operand (value, target)
3632 register optab binoptab = 0;
3633 /* Use a temporary to force order of execution of calls to
3637 /* Use subtarget as the target for operand 0 of a binary operation. */
3638 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3640 if (GET_CODE (value) == PLUS)
3641 binoptab = add_optab;
3642 else if (GET_CODE (value) == MINUS)
3643 binoptab = sub_optab;
3644 else if (GET_CODE (value) == MULT)
3646 op2 = XEXP (value, 1);
3647 if (!CONSTANT_P (op2)
3648 && !(GET_CODE (op2) == REG && op2 != subtarget))
3650 tmp = force_operand (XEXP (value, 0), subtarget);
3651 return expand_mult (GET_MODE (value), tmp,
3652 force_operand (op2, NULL_RTX),
3658 op2 = XEXP (value, 1);
3659 if (!CONSTANT_P (op2)
3660 && !(GET_CODE (op2) == REG && op2 != subtarget))
3662 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3664 binoptab = add_optab;
3665 op2 = negate_rtx (GET_MODE (value), op2);
3668 /* Check for an addition with OP2 a constant integer and our first
3669 operand a PLUS of a virtual register and something else. In that
3670 case, we want to emit the sum of the virtual register and the
3671 constant first and then add the other value. This allows virtual
3672 register instantiation to simply modify the constant rather than
3673 creating another one around this addition. */
3674 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3675 && GET_CODE (XEXP (value, 0)) == PLUS
3676 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3677 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3678 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3680 rtx temp = expand_binop (GET_MODE (value), binoptab,
3681 XEXP (XEXP (value, 0), 0), op2,
3682 subtarget, 0, OPTAB_LIB_WIDEN);
3683 return expand_binop (GET_MODE (value), binoptab, temp,
3684 force_operand (XEXP (XEXP (value, 0), 1), 0),
3685 target, 0, OPTAB_LIB_WIDEN);
3688 tmp = force_operand (XEXP (value, 0), subtarget);
3689 return expand_binop (GET_MODE (value), binoptab, tmp,
3690 force_operand (op2, NULL_RTX),
3691 target, 0, OPTAB_LIB_WIDEN);
3692 /* We give UNSIGNEDP = 0 to expand_binop
3693 because the only operations we are expanding here are signed ones. */
3698 /* Subroutine of expand_expr:
3699 save the non-copied parts (LIST) of an expr (LHS), and return a list
3700 which can restore these values to their previous values,
3701 should something modify their storage. */
3704 save_noncopied_parts (lhs, list)
3711 for (tail = list; tail; tail = TREE_CHAIN (tail))
3712 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3713 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3716 tree part = TREE_VALUE (tail);
3717 tree part_type = TREE_TYPE (part);
3718 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3719 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3720 int_size_in_bytes (part_type), 0);
3721 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3722 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3723 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3724 parts = tree_cons (to_be_saved,
3725 build (RTL_EXPR, part_type, NULL_TREE,
3728 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3733 /* Subroutine of expand_expr:
3734 record the non-copied parts (LIST) of an expr (LHS), and return a list
3735 which specifies the initial values of these parts. */
3738 init_noncopied_parts (lhs, list)
3745 for (tail = list; tail; tail = TREE_CHAIN (tail))
3746 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3747 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3750 tree part = TREE_VALUE (tail);
3751 tree part_type = TREE_TYPE (part);
3752 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3753 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3758 /* Subroutine of expand_expr: return nonzero iff there is no way that
3759 EXP can reference X, which is being modified. */
3762 safe_from_p (x, exp)
3770 /* If EXP has varying size, we MUST use a target since we currently
3771 have no way of allocating temporaries of variable size. So we
3772 assume here that something at a higher level has prevented a
3773 clash. This is somewhat bogus, but the best we can do. */
3774 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3778 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3779 find the underlying pseudo. */
3780 if (GET_CODE (x) == SUBREG)
3783 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3787 /* If X is a location in the outgoing argument area, it is always safe. */
3788 if (GET_CODE (x) == MEM
3789 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3790 || (GET_CODE (XEXP (x, 0)) == PLUS
3791 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3794 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3797 exp_rtl = DECL_RTL (exp);
3804 if (TREE_CODE (exp) == TREE_LIST)
3805 return ((TREE_VALUE (exp) == 0
3806 || safe_from_p (x, TREE_VALUE (exp)))
3807 && (TREE_CHAIN (exp) == 0
3808 || safe_from_p (x, TREE_CHAIN (exp))));
3813 return safe_from_p (x, TREE_OPERAND (exp, 0));
3817 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3818 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3822 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3823 the expression. If it is set, we conflict iff we are that rtx or
3824 both are in memory. Otherwise, we check all operands of the
3825 expression recursively. */
3827 switch (TREE_CODE (exp))
3830 return (staticp (TREE_OPERAND (exp, 0))
3831 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3834 if (GET_CODE (x) == MEM)
3839 exp_rtl = CALL_EXPR_RTL (exp);
3842 /* Assume that the call will clobber all hard registers and
3844 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3845 || GET_CODE (x) == MEM)
3852 exp_rtl = RTL_EXPR_RTL (exp);
3854 /* We don't know what this can modify. */
3859 case WITH_CLEANUP_EXPR:
3860 exp_rtl = RTL_EXPR_RTL (exp);
3863 case CLEANUP_POINT_EXPR:
3864 return safe_from_p (x, TREE_OPERAND (exp, 0));
3867 exp_rtl = SAVE_EXPR_RTL (exp);
3871 /* The only operand we look at is operand 1. The rest aren't
3872 part of the expression. */
3873 return safe_from_p (x, TREE_OPERAND (exp, 1));
3875 case METHOD_CALL_EXPR:
3876 /* This takes a rtx argument, but shouldn't appear here. */
3880 /* If we have an rtx, we do not need to scan our operands. */
3884 nops = tree_code_length[(int) TREE_CODE (exp)];
3885 for (i = 0; i < nops; i++)
3886 if (TREE_OPERAND (exp, i) != 0
3887 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3891 /* If we have an rtl, find any enclosed object. Then see if we conflict
3895 if (GET_CODE (exp_rtl) == SUBREG)
3897 exp_rtl = SUBREG_REG (exp_rtl);
3898 if (GET_CODE (exp_rtl) == REG
3899 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3903 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3904 are memory and EXP is not readonly. */
3905 return ! (rtx_equal_p (x, exp_rtl)
3906 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3907 && ! TREE_READONLY (exp)));
3910 /* If we reach here, it is safe. */
3914 /* Subroutine of expand_expr: return nonzero iff EXP is an
3915 expression whose type is statically determinable. */
3921 if (TREE_CODE (exp) == PARM_DECL
3922 || TREE_CODE (exp) == VAR_DECL
3923 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3924 || TREE_CODE (exp) == COMPONENT_REF
3925 || TREE_CODE (exp) == ARRAY_REF)
3930 /* expand_expr: generate code for computing expression EXP.
3931 An rtx for the computed value is returned. The value is never null.
3932 In the case of a void EXP, const0_rtx is returned.
3934 The value may be stored in TARGET if TARGET is nonzero.
3935 TARGET is just a suggestion; callers must assume that
3936 the rtx returned may not be the same as TARGET.
3938 If TARGET is CONST0_RTX, it means that the value will be ignored.
3940 If TMODE is not VOIDmode, it suggests generating the
3941 result in mode TMODE. But this is done only when convenient.
3942 Otherwise, TMODE is ignored and the value generated in its natural mode.
3943 TMODE is just a suggestion; callers must assume that
3944 the rtx returned may not have mode TMODE.
3946 Note that TARGET may have neither TMODE nor MODE. In that case, it
3947 probably will not be used.
3949 If MODIFIER is EXPAND_SUM then when EXP is an addition
3950 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3951 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3952 products as above, or REG or MEM, or constant.
3953 Ordinarily in such cases we would output mul or add instructions
3954 and then return a pseudo reg containing the sum.
3956 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3957 it also marks a label as absolutely required (it can't be dead).
3958 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3959 This is used for outputting expressions used in initializers.
3961 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3962 with a constant address even if that address is not normally legitimate.
3963 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3966 expand_expr (exp, target, tmode, modifier)
3969 enum machine_mode tmode;
3970 enum expand_modifier modifier;
3972 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3973 This is static so it will be accessible to our recursive callees. */
3974 static tree placeholder_list = 0;
3975 register rtx op0, op1, temp;
3976 tree type = TREE_TYPE (exp);
3977 int unsignedp = TREE_UNSIGNED (type);
3978 register enum machine_mode mode = TYPE_MODE (type);
3979 register enum tree_code code = TREE_CODE (exp);
3981 /* Use subtarget as the target for operand 0 of a binary operation. */
3982 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3983 rtx original_target = target;
3984 /* Maybe defer this until sure not doing bytecode? */
3985 int ignore = (target == const0_rtx
3986 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3987 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3988 || code == COND_EXPR)
3989 && TREE_CODE (type) == VOID_TYPE));
3993 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3995 bc_expand_expr (exp);
3999 /* Don't use hard regs as subtargets, because the combiner
4000 can only handle pseudo regs. */
4001 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4003 /* Avoid subtargets inside loops,
4004 since they hide some invariant expressions. */
4005 if (preserve_subexpressions_p ())
4008 /* If we are going to ignore this result, we need only do something
4009 if there is a side-effect somewhere in the expression. If there
4010 is, short-circuit the most common cases here. Note that we must
4011 not call expand_expr with anything but const0_rtx in case this
4012 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4016 if (! TREE_SIDE_EFFECTS (exp))
4019 /* Ensure we reference a volatile object even if value is ignored. */
4020 if (TREE_THIS_VOLATILE (exp)
4021 && TREE_CODE (exp) != FUNCTION_DECL
4022 && mode != VOIDmode && mode != BLKmode)
4024 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4025 if (GET_CODE (temp) == MEM)
4026 temp = copy_to_reg (temp);
4030 if (TREE_CODE_CLASS (code) == '1')
4031 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4032 VOIDmode, modifier);
4033 else if (TREE_CODE_CLASS (code) == '2'
4034 || TREE_CODE_CLASS (code) == '<')
4036 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4037 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4040 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4041 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4042 /* If the second operand has no side effects, just evaluate
4044 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4045 VOIDmode, modifier);
4050 /* If will do cse, generate all results into pseudo registers
4051 since 1) that allows cse to find more things
4052 and 2) otherwise cse could produce an insn the machine
4055 if (! cse_not_expected && mode != BLKmode && target
4056 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4063 tree function = decl_function_context (exp);
4064 /* Handle using a label in a containing function. */
4065 if (function != current_function_decl && function != 0)
4067 struct function *p = find_function_data (function);
4068 /* Allocate in the memory associated with the function
4069 that the label is in. */
4070 push_obstacks (p->function_obstack,
4071 p->function_maybepermanent_obstack);
4073 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4074 label_rtx (exp), p->forced_labels);
4077 else if (modifier == EXPAND_INITIALIZER)
4078 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4079 label_rtx (exp), forced_labels);
4080 temp = gen_rtx (MEM, FUNCTION_MODE,
4081 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4082 if (function != current_function_decl && function != 0)
4083 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4088 if (DECL_RTL (exp) == 0)
4090 error_with_decl (exp, "prior parameter's size depends on `%s'");
4091 return CONST0_RTX (mode);
4094 /* ... fall through ... */
4097 /* If a static var's type was incomplete when the decl was written,
4098 but the type is complete now, lay out the decl now. */
4099 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4100 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4102 push_obstacks_nochange ();
4103 end_temporary_allocation ();
4104 layout_decl (exp, 0);
4105 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4109 /* ... fall through ... */
4113 if (DECL_RTL (exp) == 0)
4116 /* Ensure variable marked as used even if it doesn't go through
4117 a parser. If it hasn't be used yet, write out an external
4119 if (! TREE_USED (exp))
4121 assemble_external (exp);
4122 TREE_USED (exp) = 1;
4125 /* Handle variables inherited from containing functions. */
4126 context = decl_function_context (exp);
4128 /* We treat inline_function_decl as an alias for the current function
4129 because that is the inline function whose vars, types, etc.
4130 are being merged into the current function.
4131 See expand_inline_function. */
4133 if (context != 0 && context != current_function_decl
4134 && context != inline_function_decl
4135 /* If var is static, we don't need a static chain to access it. */
4136 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4137 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4141 /* Mark as non-local and addressable. */
4142 DECL_NONLOCAL (exp) = 1;
4143 mark_addressable (exp);
4144 if (GET_CODE (DECL_RTL (exp)) != MEM)
4146 addr = XEXP (DECL_RTL (exp), 0);
4147 if (GET_CODE (addr) == MEM)
4148 addr = gen_rtx (MEM, Pmode,
4149 fix_lexical_addr (XEXP (addr, 0), exp));
4151 addr = fix_lexical_addr (addr, exp);
4152 return change_address (DECL_RTL (exp), mode, addr);
4155 /* This is the case of an array whose size is to be determined
4156 from its initializer, while the initializer is still being parsed.
4159 if (GET_CODE (DECL_RTL (exp)) == MEM
4160 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4161 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4162 XEXP (DECL_RTL (exp), 0));
4164 /* If DECL_RTL is memory, we are in the normal case and either
4165 the address is not valid or it is not a register and -fforce-addr
4166 is specified, get the address into a register. */
4168 if (GET_CODE (DECL_RTL (exp)) == MEM
4169 && modifier != EXPAND_CONST_ADDRESS
4170 && modifier != EXPAND_SUM
4171 && modifier != EXPAND_INITIALIZER
4172 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4174 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4175 return change_address (DECL_RTL (exp), VOIDmode,
4176 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4178 /* If the mode of DECL_RTL does not match that of the decl, it
4179 must be a promoted value. We return a SUBREG of the wanted mode,
4180 but mark it so that we know that it was already extended. */
4182 if (GET_CODE (DECL_RTL (exp)) == REG
4183 && GET_MODE (DECL_RTL (exp)) != mode)
4185 /* Get the signedness used for this variable. Ensure we get the
4186 same mode we got when the variable was declared. */
4187 if (GET_MODE (DECL_RTL (exp))
4188 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4191 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4192 SUBREG_PROMOTED_VAR_P (temp) = 1;
4193 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4197 return DECL_RTL (exp);
4200 return immed_double_const (TREE_INT_CST_LOW (exp),
4201 TREE_INT_CST_HIGH (exp),
4205 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4208 /* If optimized, generate immediate CONST_DOUBLE
4209 which will be turned into memory by reload if necessary.
4211 We used to force a register so that loop.c could see it. But
4212 this does not allow gen_* patterns to perform optimizations with
4213 the constants. It also produces two insns in cases like "x = 1.0;".
4214 On most machines, floating-point constants are not permitted in
4215 many insns, so we'd end up copying it to a register in any case.
4217 Now, we do the copying in expand_binop, if appropriate. */
4218 return immed_real_const (exp);
4222 if (! TREE_CST_RTL (exp))
4223 output_constant_def (exp);
4225 /* TREE_CST_RTL probably contains a constant address.
4226 On RISC machines where a constant address isn't valid,
4227 make some insns to get that address into a register. */
4228 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4229 && modifier != EXPAND_CONST_ADDRESS
4230 && modifier != EXPAND_INITIALIZER
4231 && modifier != EXPAND_SUM
4232 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4234 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4235 return change_address (TREE_CST_RTL (exp), VOIDmode,
4236 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4237 return TREE_CST_RTL (exp);
4240 context = decl_function_context (exp);
4242 /* We treat inline_function_decl as an alias for the current function
4243 because that is the inline function whose vars, types, etc.
4244 are being merged into the current function.
4245 See expand_inline_function. */
4246 if (context == current_function_decl || context == inline_function_decl)
4249 /* If this is non-local, handle it. */
4252 temp = SAVE_EXPR_RTL (exp);
4253 if (temp && GET_CODE (temp) == REG)
4255 put_var_into_stack (exp);
4256 temp = SAVE_EXPR_RTL (exp);
4258 if (temp == 0 || GET_CODE (temp) != MEM)
4260 return change_address (temp, mode,
4261 fix_lexical_addr (XEXP (temp, 0), exp));
4263 if (SAVE_EXPR_RTL (exp) == 0)
4265 if (mode == BLKmode)
4268 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4269 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4272 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4274 SAVE_EXPR_RTL (exp) = temp;
4275 if (!optimize && GET_CODE (temp) == REG)
4276 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4279 /* If the mode of TEMP does not match that of the expression, it
4280 must be a promoted value. We pass store_expr a SUBREG of the
4281 wanted mode but mark it so that we know that it was already
4282 extended. Note that `unsignedp' was modified above in
4285 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4287 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4288 SUBREG_PROMOTED_VAR_P (temp) = 1;
4289 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4292 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4295 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4296 must be a promoted value. We return a SUBREG of the wanted mode,
4297 but mark it so that we know that it was already extended. */
4299 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4300 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4302 /* Compute the signedness and make the proper SUBREG. */
4303 promote_mode (type, mode, &unsignedp, 0);
4304 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4305 SUBREG_PROMOTED_VAR_P (temp) = 1;
4306 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4310 return SAVE_EXPR_RTL (exp);
4312 case PLACEHOLDER_EXPR:
4313 /* If there is an object on the head of the placeholder list,
4314 see if some object in it's references is of type TYPE. For
4315 further information, see tree.def. */
4316 if (placeholder_list)
4319 tree old_list = placeholder_list;
4321 for (object = TREE_PURPOSE (placeholder_list);
4322 TREE_TYPE (object) != type
4323 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4324 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4325 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4326 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4327 object = TREE_OPERAND (object, 0))
4330 if (object && TREE_TYPE (object) == type)
4332 /* Expand this object skipping the list entries before
4333 it was found in case it is also a PLACEHOLDER_EXPR.
4334 In that case, we want to translate it using subsequent
4336 placeholder_list = TREE_CHAIN (placeholder_list);
4337 temp = expand_expr (object, original_target, tmode, modifier);
4338 placeholder_list = old_list;
4343 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4346 case WITH_RECORD_EXPR:
4347 /* Put the object on the placeholder list, expand our first operand,
4348 and pop the list. */
4349 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4351 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4353 placeholder_list = TREE_CHAIN (placeholder_list);
4357 expand_exit_loop_if_false (NULL_PTR,
4358 invert_truthvalue (TREE_OPERAND (exp, 0)));
4363 expand_start_loop (1);
4364 expand_expr_stmt (TREE_OPERAND (exp, 0));
4372 tree vars = TREE_OPERAND (exp, 0);
4373 int vars_need_expansion = 0;
4375 /* Need to open a binding contour here because
4376 if there are any cleanups they most be contained here. */
4377 expand_start_bindings (0);
4379 /* Mark the corresponding BLOCK for output in its proper place. */
4380 if (TREE_OPERAND (exp, 2) != 0
4381 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4382 insert_block (TREE_OPERAND (exp, 2));
4384 /* If VARS have not yet been expanded, expand them now. */
4387 if (DECL_RTL (vars) == 0)
4389 vars_need_expansion = 1;
4392 expand_decl_init (vars);
4393 vars = TREE_CHAIN (vars);
4396 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4398 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4404 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4406 emit_insns (RTL_EXPR_SEQUENCE (exp));
4407 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4408 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4409 free_temps_for_rtl_expr (exp);
4410 return RTL_EXPR_RTL (exp);
4413 /* If we don't need the result, just ensure we evaluate any
4418 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4419 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4423 /* All elts simple constants => refer to a constant in memory. But
4424 if this is a non-BLKmode mode, let it store a field at a time
4425 since that should make a CONST_INT or CONST_DOUBLE when we
4426 fold. Likewise, if we have a target we can use, it is best to
4427 store directly into the target unless the type is large enough
4428 that memcpy will be used. If we are making an initializer and
4429 all operands are constant, put it in memory as well. */
4430 else if ((TREE_STATIC (exp)
4431 && ((mode == BLKmode
4432 && ! (target != 0 && safe_from_p (target, exp)))
4433 || TREE_ADDRESSABLE (exp)
4434 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4435 && (move_by_pieces_ninsns
4436 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4439 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4441 rtx constructor = output_constant_def (exp);
4442 if (modifier != EXPAND_CONST_ADDRESS
4443 && modifier != EXPAND_INITIALIZER
4444 && modifier != EXPAND_SUM
4445 && (! memory_address_p (GET_MODE (constructor),
4446 XEXP (constructor, 0))
4448 && GET_CODE (XEXP (constructor, 0)) != REG)))
4449 constructor = change_address (constructor, VOIDmode,
4450 XEXP (constructor, 0));
4456 if (target == 0 || ! safe_from_p (target, exp))
4458 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4459 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4463 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4464 if (AGGREGATE_TYPE_P (type))
4465 MEM_IN_STRUCT_P (target) = 1;
4468 store_constructor (exp, target);
4474 tree exp1 = TREE_OPERAND (exp, 0);
4477 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4478 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4479 This code has the same general effect as simply doing
4480 expand_expr on the save expr, except that the expression PTR
4481 is computed for use as a memory address. This means different
4482 code, suitable for indexing, may be generated. */
4483 if (TREE_CODE (exp1) == SAVE_EXPR
4484 && SAVE_EXPR_RTL (exp1) == 0
4485 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4486 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4487 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4489 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4490 VOIDmode, EXPAND_SUM);
4491 op0 = memory_address (mode, temp);
4492 op0 = copy_all_regs (op0);
4493 SAVE_EXPR_RTL (exp1) = op0;
4497 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4498 op0 = memory_address (mode, op0);
4501 temp = gen_rtx (MEM, mode, op0);
4502 /* If address was computed by addition,
4503 mark this as an element of an aggregate. */
4504 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4505 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4506 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4507 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4508 || (TREE_CODE (exp1) == ADDR_EXPR
4509 && (exp2 = TREE_OPERAND (exp1, 0))
4510 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4511 MEM_IN_STRUCT_P (temp) = 1;
4512 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4513 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4514 a location is accessed through a pointer to const does not mean
4515 that the value there can never change. */
4516 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4522 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4526 tree array = TREE_OPERAND (exp, 0);
4527 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4528 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4529 tree index = TREE_OPERAND (exp, 1);
4530 tree index_type = TREE_TYPE (index);
4533 if (TREE_CODE (low_bound) != INTEGER_CST
4534 && contains_placeholder_p (low_bound))
4535 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4537 /* Optimize the special-case of a zero lower bound.
4539 We convert the low_bound to sizetype to avoid some problems
4540 with constant folding. (E.g. suppose the lower bound is 1,
4541 and its mode is QI. Without the conversion, (ARRAY
4542 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4543 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4545 But sizetype isn't quite right either (especially if
4546 the lowbound is negative). FIXME */
4548 if (! integer_zerop (low_bound))
4549 index = fold (build (MINUS_EXPR, index_type, index,
4550 convert (sizetype, low_bound)));
4552 if ((TREE_CODE (index) != INTEGER_CST
4553 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4554 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4556 /* Nonconstant array index or nonconstant element size, and
4557 not an array in an unaligned (packed) structure field.
4558 Generate the tree for *(&array+index) and expand that,
4559 except do it in a language-independent way
4560 and don't complain about non-lvalue arrays.
4561 `mark_addressable' should already have been called
4562 for any array for which this case will be reached. */
4564 /* Don't forget the const or volatile flag from the array
4566 tree variant_type = build_type_variant (type,
4567 TREE_READONLY (exp),
4568 TREE_THIS_VOLATILE (exp));
4569 tree array_adr = build1 (ADDR_EXPR,
4570 build_pointer_type (variant_type), array);
4572 tree size = size_in_bytes (type);
4574 /* Convert the integer argument to a type the same size as a
4575 pointer so the multiply won't overflow spuriously. */
4576 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4577 index = convert (type_for_size (POINTER_SIZE, 0), index);
4579 if (TREE_CODE (size) != INTEGER_CST
4580 && contains_placeholder_p (size))
4581 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4583 /* Don't think the address has side effects
4584 just because the array does.
4585 (In some cases the address might have side effects,
4586 and we fail to record that fact here. However, it should not
4587 matter, since expand_expr should not care.) */
4588 TREE_SIDE_EFFECTS (array_adr) = 0;
4590 elt = build1 (INDIRECT_REF, type,
4591 fold (build (PLUS_EXPR,
4592 TYPE_POINTER_TO (variant_type),
4594 fold (build (MULT_EXPR,
4595 TYPE_POINTER_TO (variant_type),
4598 /* Volatility, etc., of new expression is same as old
4600 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4601 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4602 TREE_READONLY (elt) = TREE_READONLY (exp);
4604 return expand_expr (elt, target, tmode, modifier);
4607 /* Fold an expression like: "foo"[2].
4608 This is not done in fold so it won't happen inside &.
4609 Don't fold if this is for wide characters since it's too
4610 difficult to do correctly and this is a very rare case. */
4612 if (TREE_CODE (array) == STRING_CST
4613 && TREE_CODE (index) == INTEGER_CST
4614 && !TREE_INT_CST_HIGH (index)
4615 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4616 && GET_MODE_CLASS (mode) == MODE_INT
4617 && GET_MODE_SIZE (mode) == 1)
4618 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4620 /* If this is a constant index into a constant array,
4621 just get the value from the array. Handle both the cases when
4622 we have an explicit constructor and when our operand is a variable
4623 that was declared const. */
4625 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4627 if (TREE_CODE (index) == INTEGER_CST
4628 && TREE_INT_CST_HIGH (index) == 0)
4630 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4632 i = TREE_INT_CST_LOW (index);
4634 elem = TREE_CHAIN (elem);
4636 return expand_expr (fold (TREE_VALUE (elem)), target,
4641 else if (optimize >= 1
4642 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4643 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4644 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4646 if (TREE_CODE (index) == INTEGER_CST
4647 && TREE_INT_CST_HIGH (index) == 0)
4649 tree init = DECL_INITIAL (array);
4651 i = TREE_INT_CST_LOW (index);
4652 if (TREE_CODE (init) == CONSTRUCTOR)
4654 tree elem = CONSTRUCTOR_ELTS (init);
4657 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4658 elem = TREE_CHAIN (elem);
4660 return expand_expr (fold (TREE_VALUE (elem)), target,
4663 else if (TREE_CODE (init) == STRING_CST
4664 && i < TREE_STRING_LENGTH (init))
4665 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4670 /* Treat array-ref with constant index as a component-ref. */
4674 /* If the operand is a CONSTRUCTOR, we can just extract the
4675 appropriate field if it is present. Don't do this if we have
4676 already written the data since we want to refer to that copy
4677 and varasm.c assumes that's what we'll do. */
4678 if (code != ARRAY_REF
4679 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4680 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4684 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4685 elt = TREE_CHAIN (elt))
4686 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4687 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4691 enum machine_mode mode1;
4696 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4697 &mode1, &unsignedp, &volatilep);
4700 /* If we got back the original object, something is wrong. Perhaps
4701 we are evaluating an expression too early. In any event, don't
4702 infinitely recurse. */
4706 /* In some cases, we will be offsetting OP0's address by a constant.
4707 So get it as a sum, if possible. If we will be using it
4708 directly in an insn, we validate it. */
4709 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4711 /* If this is a constant, put it into a register if it is a
4712 legitimate constant and memory if it isn't. */
4713 if (CONSTANT_P (op0))
4715 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4716 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4717 op0 = force_reg (mode, op0);
4719 op0 = validize_mem (force_const_mem (mode, op0));
4722 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4725 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4727 if (GET_CODE (op0) != MEM)
4729 op0 = change_address (op0, VOIDmode,
4730 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4731 force_reg (Pmode, offset_rtx)));
4732 /* If we have a variable offset, the known alignment
4733 is only that of the innermost structure containing the field.
4734 (Actually, we could sometimes do better by using the
4735 size of an element of the innermost array, but no need.) */
4736 if (TREE_CODE (exp) == COMPONENT_REF
4737 || TREE_CODE (exp) == BIT_FIELD_REF)
4738 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4742 /* Don't forget about volatility even if this is a bitfield. */
4743 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4745 op0 = copy_rtx (op0);
4746 MEM_VOLATILE_P (op0) = 1;
4749 /* In cases where an aligned union has an unaligned object
4750 as a field, we might be extracting a BLKmode value from
4751 an integer-mode (e.g., SImode) object. Handle this case
4752 by doing the extract into an object as wide as the field
4753 (which we know to be the width of a basic mode), then
4754 storing into memory, and changing the mode to BLKmode. */
4755 if (mode1 == VOIDmode
4756 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4757 && modifier != EXPAND_CONST_ADDRESS
4758 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4759 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4760 /* If the field isn't aligned enough to fetch as a memref,
4761 fetch it as a bit field. */
4762 || (SLOW_UNALIGNED_ACCESS
4763 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4764 || (SLOW_UNALIGNED_ACCESS
4765 && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4767 enum machine_mode ext_mode = mode;
4769 if (ext_mode == BLKmode)
4770 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4772 if (ext_mode == BLKmode)
4775 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4776 unsignedp, target, ext_mode, ext_mode,
4778 int_size_in_bytes (TREE_TYPE (tem)));
4779 if (mode == BLKmode)
4781 rtx new = assign_stack_temp (ext_mode,
4782 bitsize / BITS_PER_UNIT, 0);
4784 emit_move_insn (new, op0);
4785 op0 = copy_rtx (new);
4786 PUT_MODE (op0, BLKmode);
4787 MEM_IN_STRUCT_P (op0) = 1;
4793 /* Get a reference to just this component. */
4794 if (modifier == EXPAND_CONST_ADDRESS
4795 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4796 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4797 (bitpos / BITS_PER_UNIT)));
4799 op0 = change_address (op0, mode1,
4800 plus_constant (XEXP (op0, 0),
4801 (bitpos / BITS_PER_UNIT)));
4802 MEM_IN_STRUCT_P (op0) = 1;
4803 MEM_VOLATILE_P (op0) |= volatilep;
4804 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4807 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4808 convert_move (target, op0, unsignedp);
4814 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4815 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4816 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4817 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4818 MEM_IN_STRUCT_P (temp) = 1;
4819 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4820 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4821 a location is accessed through a pointer to const does not mean
4822 that the value there can never change. */
4823 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4828 /* Intended for a reference to a buffer of a file-object in Pascal.
4829 But it's not certain that a special tree code will really be
4830 necessary for these. INDIRECT_REF might work for them. */
4836 /* Pascal set IN expression.
4839 rlo = set_low - (set_low%bits_per_word);
4840 the_word = set [ (index - rlo)/bits_per_word ];
4841 bit_index = index % bits_per_word;
4842 bitmask = 1 << bit_index;
4843 return !!(the_word & bitmask); */
4845 tree set = TREE_OPERAND (exp, 0);
4846 tree index = TREE_OPERAND (exp, 1);
4847 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4848 tree set_type = TREE_TYPE (set);
4849 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4850 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4851 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4852 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4853 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4854 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4855 rtx setaddr = XEXP (setval, 0);
4856 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4858 rtx diff, quo, rem, addr, bit, result;
4860 preexpand_calls (exp);
4862 /* If domain is empty, answer is no. Likewise if index is constant
4863 and out of bounds. */
4864 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4865 && TREE_CODE (set_low_bound) == INTEGER_CST
4866 && tree_int_cst_lt (set_high_bound, set_low_bound)
4867 || (TREE_CODE (index) == INTEGER_CST
4868 && TREE_CODE (set_low_bound) == INTEGER_CST
4869 && tree_int_cst_lt (index, set_low_bound))
4870 || (TREE_CODE (set_high_bound) == INTEGER_CST
4871 && TREE_CODE (index) == INTEGER_CST
4872 && tree_int_cst_lt (set_high_bound, index))))
4876 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4878 /* If we get here, we have to generate the code for both cases
4879 (in range and out of range). */
4881 op0 = gen_label_rtx ();
4882 op1 = gen_label_rtx ();
4884 if (! (GET_CODE (index_val) == CONST_INT
4885 && GET_CODE (lo_r) == CONST_INT))
4887 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4888 GET_MODE (index_val), iunsignedp, 0);
4889 emit_jump_insn (gen_blt (op1));
4892 if (! (GET_CODE (index_val) == CONST_INT
4893 && GET_CODE (hi_r) == CONST_INT))
4895 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4896 GET_MODE (index_val), iunsignedp, 0);
4897 emit_jump_insn (gen_bgt (op1));
4900 /* Calculate the element number of bit zero in the first word
4902 if (GET_CODE (lo_r) == CONST_INT)
4903 rlow = GEN_INT (INTVAL (lo_r)
4904 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4906 rlow = expand_binop (index_mode, and_optab, lo_r,
4907 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4908 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4910 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4911 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4913 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4914 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4915 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4916 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4918 addr = memory_address (byte_mode,
4919 expand_binop (index_mode, add_optab, diff,
4920 setaddr, NULL_RTX, iunsignedp,
4923 /* Extract the bit we want to examine */
4924 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4925 gen_rtx (MEM, byte_mode, addr),
4926 make_tree (TREE_TYPE (index), rem),
4928 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4929 GET_MODE (target) == byte_mode ? target : 0,
4930 1, OPTAB_LIB_WIDEN);
4932 if (result != target)
4933 convert_move (target, result, 1);
4935 /* Output the code to handle the out-of-range case. */
4938 emit_move_insn (target, const0_rtx);
4943 case WITH_CLEANUP_EXPR:
4944 if (RTL_EXPR_RTL (exp) == 0)
4947 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4949 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4950 /* That's it for this cleanup. */
4951 TREE_OPERAND (exp, 2) = 0;
4952 (*interim_eh_hook) (NULL_TREE);
4954 return RTL_EXPR_RTL (exp);
4956 case CLEANUP_POINT_EXPR:
4958 extern int temp_slot_level;
4959 tree old_cleanups = cleanups_this_call;
4960 int old_temp_level = target_temp_slot_level;
4962 target_temp_slot_level = temp_slot_level;
4963 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4964 expand_cleanups_to (old_cleanups);
4965 preserve_temp_slots (op0);
4968 target_temp_slot_level = old_temp_level;
4973 /* Check for a built-in function. */
4974 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4975 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4977 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4978 return expand_builtin (exp, target, subtarget, tmode, ignore);
4980 /* If this call was expanded already by preexpand_calls,
4981 just return the result we got. */
4982 if (CALL_EXPR_RTL (exp) != 0)
4983 return CALL_EXPR_RTL (exp);
4985 return expand_call (exp, target, ignore);
4987 case NON_LVALUE_EXPR:
4990 case REFERENCE_EXPR:
4991 if (TREE_CODE (type) == UNION_TYPE)
4993 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4996 if (mode == BLKmode)
4998 if (TYPE_SIZE (type) == 0
4999 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5001 target = assign_stack_temp (BLKmode,
5002 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5003 + BITS_PER_UNIT - 1)
5004 / BITS_PER_UNIT, 0);
5005 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5008 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5011 if (GET_CODE (target) == MEM)
5012 /* Store data into beginning of memory target. */
5013 store_expr (TREE_OPERAND (exp, 0),
5014 change_address (target, TYPE_MODE (valtype), 0), 0);
5016 else if (GET_CODE (target) == REG)
5017 /* Store this field into a union of the proper type. */
5018 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5019 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5021 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5025 /* Return the entire union. */
5029 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5031 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5034 /* If the signedness of the conversion differs and OP0 is
5035 a promoted SUBREG, clear that indication since we now
5036 have to do the proper extension. */
5037 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5038 && GET_CODE (op0) == SUBREG)
5039 SUBREG_PROMOTED_VAR_P (op0) = 0;
5044 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5045 if (GET_MODE (op0) == mode)
5048 /* If OP0 is a constant, just convert it into the proper mode. */
5049 if (CONSTANT_P (op0))
5051 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5052 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5054 if (modifier == EXPAND_INITIALIZER)
5055 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5057 if (flag_force_mem && GET_CODE (op0) == MEM)
5058 op0 = copy_to_reg (op0);
5062 convert_to_mode (mode, op0,
5063 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5065 convert_move (target, op0,
5066 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5070 /* We come here from MINUS_EXPR when the second operand is a constant. */
5072 this_optab = add_optab;
5074 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5075 something else, make sure we add the register to the constant and
5076 then to the other thing. This case can occur during strength
5077 reduction and doing it this way will produce better code if the
5078 frame pointer or argument pointer is eliminated.
5080 fold-const.c will ensure that the constant is always in the inner
5081 PLUS_EXPR, so the only case we need to do anything about is if
5082 sp, ap, or fp is our second argument, in which case we must swap
5083 the innermost first argument and our second argument. */
5085 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5086 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5087 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5088 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5089 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5090 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5092 tree t = TREE_OPERAND (exp, 1);
5094 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5095 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5098 /* If the result is to be Pmode and we are adding an integer to
5099 something, we might be forming a constant. So try to use
5100 plus_constant. If it produces a sum and we can't accept it,
5101 use force_operand. This allows P = &ARR[const] to generate
5102 efficient code on machines where a SYMBOL_REF is not a valid
5105 If this is an EXPAND_SUM call, always return the sum. */
5106 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5110 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5111 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5113 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5115 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5116 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5117 op1 = force_operand (op1, target);
5121 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5122 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5123 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5127 if (! CONSTANT_P (op0))
5129 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5130 VOIDmode, modifier);
5131 /* Don't go to both_summands if modifier
5132 says it's not right to return a PLUS. */
5133 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5137 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5138 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5139 op0 = force_operand (op0, target);
5144 /* No sense saving up arithmetic to be done
5145 if it's all in the wrong mode to form part of an address.
5146 And force_operand won't know whether to sign-extend or
5148 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5152 preexpand_calls (exp);
5153 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5156 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5157 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5160 /* Make sure any term that's a sum with a constant comes last. */
5161 if (GET_CODE (op0) == PLUS
5162 && CONSTANT_P (XEXP (op0, 1)))
5168 /* If adding to a sum including a constant,
5169 associate it to put the constant outside. */
5170 if (GET_CODE (op1) == PLUS
5171 && CONSTANT_P (XEXP (op1, 1)))
5173 rtx constant_term = const0_rtx;
5175 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5178 /* Ensure that MULT comes first if there is one. */
5179 else if (GET_CODE (op0) == MULT)
5180 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5182 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5184 /* Let's also eliminate constants from op0 if possible. */
5185 op0 = eliminate_constant_term (op0, &constant_term);
5187 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5188 their sum should be a constant. Form it into OP1, since the
5189 result we want will then be OP0 + OP1. */
5191 temp = simplify_binary_operation (PLUS, mode, constant_term,
5196 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5199 /* Put a constant term last and put a multiplication first. */
5200 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5201 temp = op1, op1 = op0, op0 = temp;
5203 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5204 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5207 /* For initializers, we are allowed to return a MINUS of two
5208 symbolic constants. Here we handle all cases when both operands
5210 /* Handle difference of two symbolic constants,
5211 for the sake of an initializer. */
5212 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5213 && really_constant_p (TREE_OPERAND (exp, 0))
5214 && really_constant_p (TREE_OPERAND (exp, 1)))
5216 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5217 VOIDmode, modifier);
5218 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5219 VOIDmode, modifier);
5221 /* If the last operand is a CONST_INT, use plus_constant of
5222 the negated constant. Else make the MINUS. */
5223 if (GET_CODE (op1) == CONST_INT)
5224 return plus_constant (op0, - INTVAL (op1));
5226 return gen_rtx (MINUS, mode, op0, op1);
5228 /* Convert A - const to A + (-const). */
5229 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5231 tree negated = fold (build1 (NEGATE_EXPR, type,
5232 TREE_OPERAND (exp, 1)));
5234 /* Deal with the case where we can't negate the constant
5236 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5238 tree newtype = signed_type (type);
5239 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5240 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5241 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5243 if (! TREE_OVERFLOW (newneg))
5244 return expand_expr (convert (type,
5245 build (PLUS_EXPR, newtype,
5247 target, tmode, modifier);
5251 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5255 this_optab = sub_optab;
5259 preexpand_calls (exp);
5260 /* If first operand is constant, swap them.
5261 Thus the following special case checks need only
5262 check the second operand. */
5263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5265 register tree t1 = TREE_OPERAND (exp, 0);
5266 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5267 TREE_OPERAND (exp, 1) = t1;
5270 /* Attempt to return something suitable for generating an
5271 indexed address, for machines that support that. */
5273 if (modifier == EXPAND_SUM && mode == Pmode
5274 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5275 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5279 /* Apply distributive law if OP0 is x+c. */
5280 if (GET_CODE (op0) == PLUS
5281 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5282 return gen_rtx (PLUS, mode,
5283 gen_rtx (MULT, mode, XEXP (op0, 0),
5284 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5285 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5286 * INTVAL (XEXP (op0, 1))));
5288 if (GET_CODE (op0) != REG)
5289 op0 = force_operand (op0, NULL_RTX);
5290 if (GET_CODE (op0) != REG)
5291 op0 = copy_to_mode_reg (mode, op0);
5293 return gen_rtx (MULT, mode, op0,
5294 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5297 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5300 /* Check for multiplying things that have been extended
5301 from a narrower type. If this machine supports multiplying
5302 in that narrower type with a result in the desired type,
5303 do it that way, and avoid the explicit type-conversion. */
5304 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5305 && TREE_CODE (type) == INTEGER_TYPE
5306 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5307 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5308 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5309 && int_fits_type_p (TREE_OPERAND (exp, 1),
5310 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5311 /* Don't use a widening multiply if a shift will do. */
5312 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5313 > HOST_BITS_PER_WIDE_INT)
5314 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5316 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5317 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5319 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5320 /* If both operands are extended, they must either both
5321 be zero-extended or both be sign-extended. */
5322 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5324 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5326 enum machine_mode innermode
5327 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5328 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5329 ? umul_widen_optab : smul_widen_optab);
5330 if (mode == GET_MODE_WIDER_MODE (innermode)
5331 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5333 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5334 NULL_RTX, VOIDmode, 0);
5335 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5336 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5339 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5340 NULL_RTX, VOIDmode, 0);
5344 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5345 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5346 return expand_mult (mode, op0, op1, target, unsignedp);
5348 case TRUNC_DIV_EXPR:
5349 case FLOOR_DIV_EXPR:
5351 case ROUND_DIV_EXPR:
5352 case EXACT_DIV_EXPR:
5353 preexpand_calls (exp);
5354 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5356 /* Possible optimization: compute the dividend with EXPAND_SUM
5357 then if the divisor is constant can optimize the case
5358 where some terms of the dividend have coeffs divisible by it. */
5359 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5361 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5364 this_optab = flodiv_optab;
5367 case TRUNC_MOD_EXPR:
5368 case FLOOR_MOD_EXPR:
5370 case ROUND_MOD_EXPR:
5371 preexpand_calls (exp);
5372 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5374 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5375 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5376 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5378 case FIX_ROUND_EXPR:
5379 case FIX_FLOOR_EXPR:
5381 abort (); /* Not used for C. */
5383 case FIX_TRUNC_EXPR:
5384 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5386 target = gen_reg_rtx (mode);
5387 expand_fix (target, op0, unsignedp);
5391 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5393 target = gen_reg_rtx (mode);
5394 /* expand_float can't figure out what to do if FROM has VOIDmode.
5395 So give it the correct mode. With -O, cse will optimize this. */
5396 if (GET_MODE (op0) == VOIDmode)
5397 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5399 expand_float (target, op0,
5400 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5404 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5405 temp = expand_unop (mode, neg_optab, op0, target, 0);
5411 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5413 /* Handle complex values specially. */
5414 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5415 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5416 return expand_complex_abs (mode, op0, target, unsignedp);
5418 /* Unsigned abs is simply the operand. Testing here means we don't
5419 risk generating incorrect code below. */
5420 if (TREE_UNSIGNED (type))
5423 return expand_abs (mode, op0, target, unsignedp,
5424 safe_from_p (target, TREE_OPERAND (exp, 0)));
5428 target = original_target;
5429 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5430 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5431 || GET_MODE (target) != mode
5432 || (GET_CODE (target) == REG
5433 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5434 target = gen_reg_rtx (mode);
5435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5436 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5438 /* First try to do it with a special MIN or MAX instruction.
5439 If that does not win, use a conditional jump to select the proper
5441 this_optab = (TREE_UNSIGNED (type)
5442 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5443 : (code == MIN_EXPR ? smin_optab : smax_optab));
5445 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5450 /* At this point, a MEM target is no longer useful; we will get better
5453 if (GET_CODE (target) == MEM)
5454 target = gen_reg_rtx (mode);
5457 emit_move_insn (target, op0);
5459 op0 = gen_label_rtx ();
5461 /* If this mode is an integer too wide to compare properly,
5462 compare word by word. Rely on cse to optimize constant cases. */
5463 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5465 if (code == MAX_EXPR)
5466 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5467 target, op1, NULL_RTX, op0);
5469 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5470 op1, target, NULL_RTX, op0);
5471 emit_move_insn (target, op1);
5475 if (code == MAX_EXPR)
5476 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5477 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5478 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5480 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5481 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5482 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5483 if (temp == const0_rtx)
5484 emit_move_insn (target, op1);
5485 else if (temp != const_true_rtx)
5487 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5488 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5491 emit_move_insn (target, op1);
5498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5499 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5506 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5511 /* ??? Can optimize bitwise operations with one arg constant.
5512 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5513 and (a bitwise1 b) bitwise2 b (etc)
5514 but that is probably not worth while. */
5516 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5517 boolean values when we want in all cases to compute both of them. In
5518 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5519 as actual zero-or-1 values and then bitwise anding. In cases where
5520 there cannot be any side effects, better code would be made by
5521 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5522 how to recognize those cases. */
5524 case TRUTH_AND_EXPR:
5526 this_optab = and_optab;
5531 this_optab = ior_optab;
5534 case TRUTH_XOR_EXPR:
5536 this_optab = xor_optab;
5543 preexpand_calls (exp);
5544 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5546 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5547 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5550 /* Could determine the answer when only additive constants differ. Also,
5551 the addition of one can be handled by changing the condition. */
5558 preexpand_calls (exp);
5559 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5563 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5564 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5566 && GET_CODE (original_target) == REG
5567 && (GET_MODE (original_target)
5568 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5570 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5573 if (temp != original_target)
5574 temp = copy_to_reg (temp);
5576 op1 = gen_label_rtx ();
5577 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5578 GET_MODE (temp), unsignedp, 0);
5579 emit_jump_insn (gen_beq (op1));
5580 emit_move_insn (temp, const1_rtx);
5585 /* If no set-flag instruction, must generate a conditional
5586 store into a temporary variable. Drop through
5587 and handle this like && and ||. */
5589 case TRUTH_ANDIF_EXPR:
5590 case TRUTH_ORIF_EXPR:
5592 && (target == 0 || ! safe_from_p (target, exp)
5593 /* Make sure we don't have a hard reg (such as function's return
5594 value) live across basic blocks, if not optimizing. */
5595 || (!optimize && GET_CODE (target) == REG
5596 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5597 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5600 emit_clr_insn (target);
5602 op1 = gen_label_rtx ();
5603 jumpifnot (exp, op1);
5606 emit_0_to_1_insn (target);
5609 return ignore ? const0_rtx : target;
5611 case TRUTH_NOT_EXPR:
5612 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5613 /* The parser is careful to generate TRUTH_NOT_EXPR
5614 only with operands that are always zero or one. */
5615 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5616 target, 1, OPTAB_LIB_WIDEN);
5622 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5624 return expand_expr (TREE_OPERAND (exp, 1),
5625 (ignore ? const0_rtx : target),
5630 rtx flag = NULL_RTX;
5631 tree left_cleanups = NULL_TREE;
5632 tree right_cleanups = NULL_TREE;
5634 /* Used to save a pointer to the place to put the setting of
5635 the flag that indicates if this side of the conditional was
5636 taken. We backpatch the code, if we find out later that we
5637 have any conditional cleanups that need to be performed. */
5638 rtx dest_right_flag = NULL_RTX;
5639 rtx dest_left_flag = NULL_RTX;
5641 /* Note that COND_EXPRs whose type is a structure or union
5642 are required to be constructed to contain assignments of
5643 a temporary variable, so that we can evaluate them here
5644 for side effect only. If type is void, we must do likewise. */
5646 /* If an arm of the branch requires a cleanup,
5647 only that cleanup is performed. */
5650 tree binary_op = 0, unary_op = 0;
5651 tree old_cleanups = cleanups_this_call;
5653 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5654 convert it to our mode, if necessary. */
5655 if (integer_onep (TREE_OPERAND (exp, 1))
5656 && integer_zerop (TREE_OPERAND (exp, 2))
5657 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5661 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5666 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5667 if (GET_MODE (op0) == mode)
5671 target = gen_reg_rtx (mode);
5672 convert_move (target, op0, unsignedp);
5676 /* If we are not to produce a result, we have no target. Otherwise,
5677 if a target was specified use it; it will not be used as an
5678 intermediate target unless it is safe. If no target, use a
5683 else if (original_target
5684 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5685 && GET_MODE (original_target) == mode
5686 && ! (GET_CODE (original_target) == MEM
5687 && MEM_VOLATILE_P (original_target)))
5688 temp = original_target;
5689 else if (mode == BLKmode)
5691 if (TYPE_SIZE (type) == 0
5692 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5695 temp = assign_stack_temp (BLKmode,
5696 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5697 + BITS_PER_UNIT - 1)
5698 / BITS_PER_UNIT, 0);
5699 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5702 temp = gen_reg_rtx (mode);
5704 /* Check for X ? A + B : A. If we have this, we can copy
5705 A to the output and conditionally add B. Similarly for unary
5706 operations. Don't do this if X has side-effects because
5707 those side effects might affect A or B and the "?" operation is
5708 a sequence point in ANSI. (We test for side effects later.) */
5710 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5711 && operand_equal_p (TREE_OPERAND (exp, 2),
5712 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5713 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5714 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5715 && operand_equal_p (TREE_OPERAND (exp, 1),
5716 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5717 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5718 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5719 && operand_equal_p (TREE_OPERAND (exp, 2),
5720 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5721 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5722 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5723 && operand_equal_p (TREE_OPERAND (exp, 1),
5724 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5725 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5727 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5728 operation, do this as A + (X != 0). Similarly for other simple
5729 binary operators. */
5730 if (temp && singleton && binary_op
5731 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5732 && (TREE_CODE (binary_op) == PLUS_EXPR
5733 || TREE_CODE (binary_op) == MINUS_EXPR
5734 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5735 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5736 && integer_onep (TREE_OPERAND (binary_op, 1))
5737 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5740 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5741 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5742 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5745 /* If we had X ? A : A + 1, do this as A + (X == 0).
5747 We have to invert the truth value here and then put it
5748 back later if do_store_flag fails. We cannot simply copy
5749 TREE_OPERAND (exp, 0) to another variable and modify that
5750 because invert_truthvalue can modify the tree pointed to
5752 if (singleton == TREE_OPERAND (exp, 1))
5753 TREE_OPERAND (exp, 0)
5754 = invert_truthvalue (TREE_OPERAND (exp, 0));
5756 result = do_store_flag (TREE_OPERAND (exp, 0),
5757 (safe_from_p (temp, singleton)
5759 mode, BRANCH_COST <= 1);
5763 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5764 return expand_binop (mode, boptab, op1, result, temp,
5765 unsignedp, OPTAB_LIB_WIDEN);
5767 else if (singleton == TREE_OPERAND (exp, 1))
5768 TREE_OPERAND (exp, 0)
5769 = invert_truthvalue (TREE_OPERAND (exp, 0));
5773 op0 = gen_label_rtx ();
5775 flag = gen_reg_rtx (word_mode);
5776 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5780 /* If the target conflicts with the other operand of the
5781 binary op, we can't use it. Also, we can't use the target
5782 if it is a hard register, because evaluating the condition
5783 might clobber it. */
5785 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5786 || (GET_CODE (temp) == REG
5787 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5788 temp = gen_reg_rtx (mode);
5789 store_expr (singleton, temp, 0);
5792 expand_expr (singleton,
5793 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5794 dest_left_flag = get_last_insn ();
5795 if (singleton == TREE_OPERAND (exp, 1))
5796 jumpif (TREE_OPERAND (exp, 0), op0);
5798 jumpifnot (TREE_OPERAND (exp, 0), op0);
5800 /* Allows cleanups up to here. */
5801 old_cleanups = cleanups_this_call;
5802 if (binary_op && temp == 0)
5803 /* Just touch the other operand. */
5804 expand_expr (TREE_OPERAND (binary_op, 1),
5805 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5807 store_expr (build (TREE_CODE (binary_op), type,
5808 make_tree (type, temp),
5809 TREE_OPERAND (binary_op, 1)),
5812 store_expr (build1 (TREE_CODE (unary_op), type,
5813 make_tree (type, temp)),
5816 dest_right_flag = get_last_insn ();
5819 /* This is now done in jump.c and is better done there because it
5820 produces shorter register lifetimes. */
5822 /* Check for both possibilities either constants or variables
5823 in registers (but not the same as the target!). If so, can
5824 save branches by assigning one, branching, and assigning the
5826 else if (temp && GET_MODE (temp) != BLKmode
5827 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5828 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5829 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5830 && DECL_RTL (TREE_OPERAND (exp, 1))
5831 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5832 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5833 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5834 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5835 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5836 && DECL_RTL (TREE_OPERAND (exp, 2))
5837 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5838 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5840 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5841 temp = gen_reg_rtx (mode);
5842 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5843 dest_left_flag = get_last_insn ();
5844 jumpifnot (TREE_OPERAND (exp, 0), op0);
5846 /* Allows cleanups up to here. */
5847 old_cleanups = cleanups_this_call;
5848 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5850 dest_right_flag = get_last_insn ();
5853 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5854 comparison operator. If we have one of these cases, set the
5855 output to A, branch on A (cse will merge these two references),
5856 then set the output to FOO. */
5858 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5859 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5860 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5861 TREE_OPERAND (exp, 1), 0)
5862 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5863 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5865 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5866 temp = gen_reg_rtx (mode);
5867 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5868 dest_left_flag = get_last_insn ();
5869 jumpif (TREE_OPERAND (exp, 0), op0);
5871 /* Allows cleanups up to here. */
5872 old_cleanups = cleanups_this_call;
5873 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5875 dest_right_flag = get_last_insn ();
5878 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5879 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5880 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5881 TREE_OPERAND (exp, 2), 0)
5882 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5883 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5885 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5886 temp = gen_reg_rtx (mode);
5887 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5888 dest_left_flag = get_last_insn ();
5889 jumpifnot (TREE_OPERAND (exp, 0), op0);
5891 /* Allows cleanups up to here. */
5892 old_cleanups = cleanups_this_call;
5893 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5895 dest_right_flag = get_last_insn ();
5899 op1 = gen_label_rtx ();
5900 jumpifnot (TREE_OPERAND (exp, 0), op0);
5902 /* Allows cleanups up to here. */
5903 old_cleanups = cleanups_this_call;
5905 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5907 expand_expr (TREE_OPERAND (exp, 1),
5908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5909 dest_left_flag = get_last_insn ();
5911 /* Handle conditional cleanups, if any. */
5912 left_cleanups = defer_cleanups_to (old_cleanups);
5915 emit_jump_insn (gen_jump (op1));
5919 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5921 expand_expr (TREE_OPERAND (exp, 2),
5922 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5923 dest_right_flag = get_last_insn ();
5926 /* Handle conditional cleanups, if any. */
5927 right_cleanups = defer_cleanups_to (old_cleanups);
5933 /* Add back in, any conditional cleanups. */
5934 if (left_cleanups || right_cleanups)
5940 /* Now that we know that a flag is needed, go back and add in the
5941 setting of the flag. */
5943 /* Do the left side flag. */
5944 last = get_last_insn ();
5945 /* Flag left cleanups as needed. */
5946 emit_move_insn (flag, const1_rtx);
5947 /* ??? deprecated, use sequences instead. */
5948 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5950 /* Do the right side flag. */
5951 last = get_last_insn ();
5952 /* Flag left cleanups as needed. */
5953 emit_move_insn (flag, const0_rtx);
5954 /* ??? deprecated, use sequences instead. */
5955 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5957 /* convert flag, which is an rtx, into a tree. */
5958 cond = make_node (RTL_EXPR);
5959 TREE_TYPE (cond) = integer_type_node;
5960 RTL_EXPR_RTL (cond) = flag;
5961 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5963 if (! left_cleanups)
5964 left_cleanups = integer_zero_node;
5965 if (! right_cleanups)
5966 right_cleanups = integer_zero_node;
5967 new_cleanups = build (COND_EXPR, void_type_node,
5968 truthvalue_conversion (cond),
5969 left_cleanups, right_cleanups);
5970 new_cleanups = fold (new_cleanups);
5972 /* Now add in the conditionalized cleanups. */
5974 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
5975 (*interim_eh_hook) (NULL_TREE);
5982 int need_exception_region = 0;
5983 /* Something needs to be initialized, but we didn't know
5984 where that thing was when building the tree. For example,
5985 it could be the return value of a function, or a parameter
5986 to a function which lays down in the stack, or a temporary
5987 variable which must be passed by reference.
5989 We guarantee that the expression will either be constructed
5990 or copied into our original target. */
5992 tree slot = TREE_OPERAND (exp, 0);
5996 if (TREE_CODE (slot) != VAR_DECL)
6001 if (DECL_RTL (slot) != 0)
6003 target = DECL_RTL (slot);
6004 /* If we have already expanded the slot, so don't do
6006 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6011 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6012 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6013 /* All temp slots at this level must not conflict. */
6014 preserve_temp_slots (target);
6015 DECL_RTL (slot) = target;
6017 /* Since SLOT is not known to the called function
6018 to belong to its stack frame, we must build an explicit
6019 cleanup. This case occurs when we must build up a reference
6020 to pass the reference as an argument. In this case,
6021 it is very likely that such a reference need not be
6024 if (TREE_OPERAND (exp, 2) == 0)
6025 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6026 if (TREE_OPERAND (exp, 2))
6028 cleanups_this_call = tree_cons (NULL_TREE,
6029 TREE_OPERAND (exp, 2),
6030 cleanups_this_call);
6031 need_exception_region = 1;
6037 /* This case does occur, when expanding a parameter which
6038 needs to be constructed on the stack. The target
6039 is the actual stack address that we want to initialize.
6040 The function we call will perform the cleanup in this case. */
6042 /* If we have already assigned it space, use that space,
6043 not target that we were passed in, as our target
6044 parameter is only a hint. */
6045 if (DECL_RTL (slot) != 0)
6047 target = DECL_RTL (slot);
6048 /* If we have already expanded the slot, so don't do
6050 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6054 DECL_RTL (slot) = target;
6057 exp1 = TREE_OPERAND (exp, 1);
6058 /* Mark it as expanded. */
6059 TREE_OPERAND (exp, 1) = NULL_TREE;
6061 temp = expand_expr (exp1, target, tmode, modifier);
6063 if (need_exception_region)
6064 (*interim_eh_hook) (NULL_TREE);
6071 tree lhs = TREE_OPERAND (exp, 0);
6072 tree rhs = TREE_OPERAND (exp, 1);
6073 tree noncopied_parts = 0;
6074 tree lhs_type = TREE_TYPE (lhs);
6076 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6077 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6078 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6079 TYPE_NONCOPIED_PARTS (lhs_type));
6080 while (noncopied_parts != 0)
6082 expand_assignment (TREE_VALUE (noncopied_parts),
6083 TREE_PURPOSE (noncopied_parts), 0, 0);
6084 noncopied_parts = TREE_CHAIN (noncopied_parts);
6091 /* If lhs is complex, expand calls in rhs before computing it.
6092 That's so we don't compute a pointer and save it over a call.
6093 If lhs is simple, compute it first so we can give it as a
6094 target if the rhs is just a call. This avoids an extra temp and copy
6095 and that prevents a partial-subsumption which makes bad code.
6096 Actually we could treat component_ref's of vars like vars. */
6098 tree lhs = TREE_OPERAND (exp, 0);
6099 tree rhs = TREE_OPERAND (exp, 1);
6100 tree noncopied_parts = 0;
6101 tree lhs_type = TREE_TYPE (lhs);
6105 if (TREE_CODE (lhs) != VAR_DECL
6106 && TREE_CODE (lhs) != RESULT_DECL
6107 && TREE_CODE (lhs) != PARM_DECL)
6108 preexpand_calls (exp);
6110 /* Check for |= or &= of a bitfield of size one into another bitfield
6111 of size 1. In this case, (unless we need the result of the
6112 assignment) we can do this more efficiently with a
6113 test followed by an assignment, if necessary.
6115 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6116 things change so we do, this code should be enhanced to
6119 && TREE_CODE (lhs) == COMPONENT_REF
6120 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6121 || TREE_CODE (rhs) == BIT_AND_EXPR)
6122 && TREE_OPERAND (rhs, 0) == lhs
6123 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6124 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6125 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6127 rtx label = gen_label_rtx ();
6129 do_jump (TREE_OPERAND (rhs, 1),
6130 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6131 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6132 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6133 (TREE_CODE (rhs) == BIT_IOR_EXPR
6135 : integer_zero_node)),
6137 do_pending_stack_adjust ();
6142 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6143 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6144 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6145 TYPE_NONCOPIED_PARTS (lhs_type));
6147 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6148 while (noncopied_parts != 0)
6150 expand_assignment (TREE_PURPOSE (noncopied_parts),
6151 TREE_VALUE (noncopied_parts), 0, 0);
6152 noncopied_parts = TREE_CHAIN (noncopied_parts);
6157 case PREINCREMENT_EXPR:
6158 case PREDECREMENT_EXPR:
6159 return expand_increment (exp, 0);
6161 case POSTINCREMENT_EXPR:
6162 case POSTDECREMENT_EXPR:
6163 /* Faster to treat as pre-increment if result is not used. */
6164 return expand_increment (exp, ! ignore);
6167 /* If nonzero, TEMP will be set to the address of something that might
6168 be a MEM corresponding to a stack slot. */
6171 /* Are we taking the address of a nested function? */
6172 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6173 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6175 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6176 op0 = force_operand (op0, target);
6178 /* If we are taking the address of something erroneous, just
6180 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6184 /* We make sure to pass const0_rtx down if we came in with
6185 ignore set, to avoid doing the cleanups twice for something. */
6186 op0 = expand_expr (TREE_OPERAND (exp, 0),
6187 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6188 (modifier == EXPAND_INITIALIZER
6189 ? modifier : EXPAND_CONST_ADDRESS));
6191 /* If we are going to ignore the result, OP0 will have been set
6192 to const0_rtx, so just return it. Don't get confused and
6193 think we are taking the address of the constant. */
6197 /* We would like the object in memory. If it is a constant,
6198 we can have it be statically allocated into memory. For
6199 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6200 memory and store the value into it. */
6202 if (CONSTANT_P (op0))
6203 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6205 else if (GET_CODE (op0) == MEM)
6207 mark_temp_addr_taken (op0);
6208 temp = XEXP (op0, 0);
6211 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6212 || GET_CODE (op0) == CONCAT)
6214 /* If this object is in a register, it must be not
6216 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6217 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6219 = assign_stack_temp (inner_mode,
6220 int_size_in_bytes (inner_type), 1);
6221 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6223 mark_temp_addr_taken (memloc);
6224 emit_move_insn (memloc, op0);
6228 if (GET_CODE (op0) != MEM)
6231 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6232 return XEXP (op0, 0);
6234 op0 = force_operand (XEXP (op0, 0), target);
6237 if (flag_force_addr && GET_CODE (op0) != REG)
6238 op0 = force_reg (Pmode, op0);
6240 if (GET_CODE (op0) == REG)
6241 mark_reg_pointer (op0);
6243 /* If we might have had a temp slot, add an equivalent address
6246 update_temp_slot_address (temp, op0);
6250 case ENTRY_VALUE_EXPR:
6253 /* COMPLEX type for Extended Pascal & Fortran */
6256 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6259 /* Get the rtx code of the operands. */
6260 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6261 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6264 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6268 /* Move the real (op0) and imaginary (op1) parts to their location. */
6269 emit_move_insn (gen_realpart (mode, target), op0);
6270 emit_move_insn (gen_imagpart (mode, target), op1);
6272 insns = get_insns ();
6275 /* Complex construction should appear as a single unit. */
6276 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6277 each with a separate pseudo as destination.
6278 It's not correct for flow to treat them as a unit. */
6279 if (GET_CODE (target) != CONCAT)
6280 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6288 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6289 return gen_realpart (mode, op0);
6292 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6293 return gen_imagpart (mode, op0);
6297 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6301 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6304 target = gen_reg_rtx (mode);
6308 /* Store the realpart and the negated imagpart to target. */
6309 emit_move_insn (gen_realpart (partmode, target),
6310 gen_realpart (partmode, op0));
6312 imag_t = gen_imagpart (partmode, target);
6313 temp = expand_unop (partmode, neg_optab,
6314 gen_imagpart (partmode, op0), imag_t, 0);
6316 emit_move_insn (imag_t, temp);
6318 insns = get_insns ();
6321 /* Conjugate should appear as a single unit
6322 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6323 each with a separate pseudo as destination.
6324 It's not correct for flow to treat them as a unit. */
6325 if (GET_CODE (target) != CONCAT)
6326 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6334 op0 = CONST0_RTX (tmode);
6340 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6343 /* Here to do an ordinary binary operator, generating an instruction
6344 from the optab already placed in `this_optab'. */
6346 preexpand_calls (exp);
6347 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6349 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6350 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6352 temp = expand_binop (mode, this_optab, op0, op1, target,
6353 unsignedp, OPTAB_LIB_WIDEN);
6360 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6362 bc_expand_expr (exp)
6365 enum tree_code code;
6368 struct binary_operator *binoptab;
6369 struct unary_operator *unoptab;
6370 struct increment_operator *incroptab;
6371 struct bc_label *lab, *lab1;
6372 enum bytecode_opcode opcode;
6375 code = TREE_CODE (exp);
6381 if (DECL_RTL (exp) == 0)
6383 error_with_decl (exp, "prior parameter's size depends on `%s'");
6387 bc_load_parmaddr (DECL_RTL (exp));
6388 bc_load_memory (TREE_TYPE (exp), exp);
6394 if (DECL_RTL (exp) == 0)
6398 if (BYTECODE_LABEL (DECL_RTL (exp)))
6399 bc_load_externaddr (DECL_RTL (exp));
6401 bc_load_localaddr (DECL_RTL (exp));
6403 if (TREE_PUBLIC (exp))
6404 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6405 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6407 bc_load_localaddr (DECL_RTL (exp));
6409 bc_load_memory (TREE_TYPE (exp), exp);
6414 #ifdef DEBUG_PRINT_CODE
6415 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6417 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6419 : TYPE_MODE (TREE_TYPE (exp)))],
6420 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6426 #ifdef DEBUG_PRINT_CODE
6427 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6429 /* FIX THIS: find a better way to pass real_cst's. -bson */
6430 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6431 (double) TREE_REAL_CST (exp));
6440 /* We build a call description vector describing the type of
6441 the return value and of the arguments; this call vector,
6442 together with a pointer to a location for the return value
6443 and the base of the argument list, is passed to the low
6444 level machine dependent call subroutine, which is responsible
6445 for putting the arguments wherever real functions expect
6446 them, as well as getting the return value back. */
6448 tree calldesc = 0, arg;
6452 /* Push the evaluated args on the evaluation stack in reverse
6453 order. Also make an entry for each arg in the calldesc
6454 vector while we're at it. */
6456 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6458 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6461 bc_expand_expr (TREE_VALUE (arg));
6463 calldesc = tree_cons ((tree) 0,
6464 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6466 calldesc = tree_cons ((tree) 0,
6467 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6471 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6473 /* Allocate a location for the return value and push its
6474 address on the evaluation stack. Also make an entry
6475 at the front of the calldesc for the return value type. */
6477 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6478 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6479 bc_load_localaddr (retval);
6481 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6482 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6484 /* Prepend the argument count. */
6485 calldesc = tree_cons ((tree) 0,
6486 build_int_2 (nargs, 0),
6489 /* Push the address of the call description vector on the stack. */
6490 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6491 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6492 build_index_type (build_int_2 (nargs * 2, 0)));
6493 r = output_constant_def (calldesc);
6494 bc_load_externaddr (r);
6496 /* Push the address of the function to be called. */
6497 bc_expand_expr (TREE_OPERAND (exp, 0));
6499 /* Call the function, popping its address and the calldesc vector
6500 address off the evaluation stack in the process. */
6501 bc_emit_instruction (call);
6503 /* Pop the arguments off the stack. */
6504 bc_adjust_stack (nargs);
6506 /* Load the return value onto the stack. */
6507 bc_load_localaddr (retval);
6508 bc_load_memory (type, TREE_OPERAND (exp, 0));
6514 if (!SAVE_EXPR_RTL (exp))
6516 /* First time around: copy to local variable */
6517 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6518 TYPE_ALIGN (TREE_TYPE(exp)));
6519 bc_expand_expr (TREE_OPERAND (exp, 0));
6520 bc_emit_instruction (duplicate);
6522 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6523 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6527 /* Consecutive reference: use saved copy */
6528 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6529 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6534 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6535 how are they handled instead? */
6538 TREE_USED (exp) = 1;
6539 bc_expand_expr (STMT_BODY (exp));
6546 bc_expand_expr (TREE_OPERAND (exp, 0));
6547 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6552 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6557 bc_expand_address (TREE_OPERAND (exp, 0));
6562 bc_expand_expr (TREE_OPERAND (exp, 0));
6563 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6568 bc_expand_expr (bc_canonicalize_array_ref (exp));
6573 bc_expand_component_address (exp);
6575 /* If we have a bitfield, generate a proper load */
6576 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6581 bc_expand_expr (TREE_OPERAND (exp, 0));
6582 bc_emit_instruction (drop);
6583 bc_expand_expr (TREE_OPERAND (exp, 1));
6588 bc_expand_expr (TREE_OPERAND (exp, 0));
6589 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6590 lab = bc_get_bytecode_label ();
6591 bc_emit_bytecode (xjumpifnot);
6592 bc_emit_bytecode_labelref (lab);
6594 #ifdef DEBUG_PRINT_CODE
6595 fputc ('\n', stderr);
6597 bc_expand_expr (TREE_OPERAND (exp, 1));
6598 lab1 = bc_get_bytecode_label ();
6599 bc_emit_bytecode (jump);
6600 bc_emit_bytecode_labelref (lab1);
6602 #ifdef DEBUG_PRINT_CODE
6603 fputc ('\n', stderr);
6606 bc_emit_bytecode_labeldef (lab);
6607 bc_expand_expr (TREE_OPERAND (exp, 2));
6608 bc_emit_bytecode_labeldef (lab1);
6611 case TRUTH_ANDIF_EXPR:
6613 opcode = xjumpifnot;
6616 case TRUTH_ORIF_EXPR:
6623 binoptab = optab_plus_expr;
6628 binoptab = optab_minus_expr;
6633 binoptab = optab_mult_expr;
6636 case TRUNC_DIV_EXPR:
6637 case FLOOR_DIV_EXPR:
6639 case ROUND_DIV_EXPR:
6640 case EXACT_DIV_EXPR:
6642 binoptab = optab_trunc_div_expr;
6645 case TRUNC_MOD_EXPR:
6646 case FLOOR_MOD_EXPR:
6648 case ROUND_MOD_EXPR:
6650 binoptab = optab_trunc_mod_expr;
6653 case FIX_ROUND_EXPR:
6654 case FIX_FLOOR_EXPR:
6656 abort (); /* Not used for C. */
6658 case FIX_TRUNC_EXPR:
6665 abort (); /* FIXME */
6669 binoptab = optab_rdiv_expr;
6674 binoptab = optab_bit_and_expr;
6679 binoptab = optab_bit_ior_expr;
6684 binoptab = optab_bit_xor_expr;
6689 binoptab = optab_lshift_expr;
6694 binoptab = optab_rshift_expr;
6697 case TRUTH_AND_EXPR:
6699 binoptab = optab_truth_and_expr;
6704 binoptab = optab_truth_or_expr;
6709 binoptab = optab_lt_expr;
6714 binoptab = optab_le_expr;
6719 binoptab = optab_ge_expr;
6724 binoptab = optab_gt_expr;
6729 binoptab = optab_eq_expr;
6734 binoptab = optab_ne_expr;
6739 unoptab = optab_negate_expr;
6744 unoptab = optab_bit_not_expr;
6747 case TRUTH_NOT_EXPR:
6749 unoptab = optab_truth_not_expr;
6752 case PREDECREMENT_EXPR:
6754 incroptab = optab_predecrement_expr;
6757 case PREINCREMENT_EXPR:
6759 incroptab = optab_preincrement_expr;
6762 case POSTDECREMENT_EXPR:
6764 incroptab = optab_postdecrement_expr;
6767 case POSTINCREMENT_EXPR:
6769 incroptab = optab_postincrement_expr;
6774 bc_expand_constructor (exp);
6784 tree vars = TREE_OPERAND (exp, 0);
6785 int vars_need_expansion = 0;
6787 /* Need to open a binding contour here because
6788 if there are any cleanups they most be contained here. */
6789 expand_start_bindings (0);
6791 /* Mark the corresponding BLOCK for output. */
6792 if (TREE_OPERAND (exp, 2) != 0)
6793 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6795 /* If VARS have not yet been expanded, expand them now. */
6798 if (DECL_RTL (vars) == 0)
6800 vars_need_expansion = 1;
6803 expand_decl_init (vars);
6804 vars = TREE_CHAIN (vars);
6807 bc_expand_expr (TREE_OPERAND (exp, 1));
6809 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6819 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6820 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6826 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6832 bc_expand_expr (TREE_OPERAND (exp, 0));
6833 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6834 lab = bc_get_bytecode_label ();
6836 bc_emit_instruction (duplicate);
6837 bc_emit_bytecode (opcode);
6838 bc_emit_bytecode_labelref (lab);
6840 #ifdef DEBUG_PRINT_CODE
6841 fputc ('\n', stderr);
6844 bc_emit_instruction (drop);
6846 bc_expand_expr (TREE_OPERAND (exp, 1));
6847 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6848 bc_emit_bytecode_labeldef (lab);
6854 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6856 /* Push the quantum. */
6857 bc_expand_expr (TREE_OPERAND (exp, 1));
6859 /* Convert it to the lvalue's type. */
6860 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6862 /* Push the address of the lvalue */
6863 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6865 /* Perform actual increment */
6866 bc_expand_increment (incroptab, type);
6870 /* Return the alignment in bits of EXP, a pointer valued expression.
6871 But don't return more than MAX_ALIGN no matter what.
6872 The alignment returned is, by default, the alignment of the thing that
6873 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6875 Otherwise, look at the expression to see if we can do better, i.e., if the
6876 expression is actually pointing at an object whose alignment is tighter. */
6879 get_pointer_alignment (exp, max_align)
6883 unsigned align, inner;
6885 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6888 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6889 align = MIN (align, max_align);
6893 switch (TREE_CODE (exp))
6897 case NON_LVALUE_EXPR:
6898 exp = TREE_OPERAND (exp, 0);
6899 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6901 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6902 align = MIN (inner, max_align);
6906 /* If sum of pointer + int, restrict our maximum alignment to that
6907 imposed by the integer. If not, we can't do any better than
6909 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6912 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6917 exp = TREE_OPERAND (exp, 0);
6921 /* See what we are pointing at and look at its alignment. */
6922 exp = TREE_OPERAND (exp, 0);
6923 if (TREE_CODE (exp) == FUNCTION_DECL)
6924 align = FUNCTION_BOUNDARY;
6925 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6926 align = DECL_ALIGN (exp);
6927 #ifdef CONSTANT_ALIGNMENT
6928 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6929 align = CONSTANT_ALIGNMENT (exp, align);
6931 return MIN (align, max_align);
6939 /* Return the tree node and offset if a given argument corresponds to
6940 a string constant. */
6943 string_constant (arg, ptr_offset)
6949 if (TREE_CODE (arg) == ADDR_EXPR
6950 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6952 *ptr_offset = integer_zero_node;
6953 return TREE_OPERAND (arg, 0);
6955 else if (TREE_CODE (arg) == PLUS_EXPR)
6957 tree arg0 = TREE_OPERAND (arg, 0);
6958 tree arg1 = TREE_OPERAND (arg, 1);
6963 if (TREE_CODE (arg0) == ADDR_EXPR
6964 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6967 return TREE_OPERAND (arg0, 0);
6969 else if (TREE_CODE (arg1) == ADDR_EXPR
6970 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6973 return TREE_OPERAND (arg1, 0);
6980 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6981 way, because it could contain a zero byte in the middle.
6982 TREE_STRING_LENGTH is the size of the character array, not the string.
6984 Unfortunately, string_constant can't access the values of const char
6985 arrays with initializers, so neither can we do so here. */
6995 src = string_constant (src, &offset_node);
6998 max = TREE_STRING_LENGTH (src);
6999 ptr = TREE_STRING_POINTER (src);
7000 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7002 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7003 compute the offset to the following null if we don't know where to
7004 start searching for it. */
7006 for (i = 0; i < max; i++)
7009 /* We don't know the starting offset, but we do know that the string
7010 has no internal zero bytes. We can assume that the offset falls
7011 within the bounds of the string; otherwise, the programmer deserves
7012 what he gets. Subtract the offset from the length of the string,
7014 /* This would perhaps not be valid if we were dealing with named
7015 arrays in addition to literal string constants. */
7016 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7019 /* We have a known offset into the string. Start searching there for
7020 a null character. */
7021 if (offset_node == 0)
7025 /* Did we get a long long offset? If so, punt. */
7026 if (TREE_INT_CST_HIGH (offset_node) != 0)
7028 offset = TREE_INT_CST_LOW (offset_node);
7030 /* If the offset is known to be out of bounds, warn, and call strlen at
7032 if (offset < 0 || offset > max)
7034 warning ("offset outside bounds of constant string");
7037 /* Use strlen to search for the first zero byte. Since any strings
7038 constructed with build_string will have nulls appended, we win even
7039 if we get handed something like (char[4])"abcd".
7041 Since OFFSET is our starting index into the string, no further
7042 calculation is needed. */
7043 return size_int (strlen (ptr + offset));
7046 /* Expand an expression EXP that calls a built-in function,
7047 with result going to TARGET if that's convenient
7048 (and in mode MODE if that's convenient).
7049 SUBTARGET may be used as the target for computing one of EXP's operands.
7050 IGNORE is nonzero if the value is to be ignored. */
7052 #define CALLED_AS_BUILT_IN(NODE) \
7053 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7056 expand_builtin (exp, target, subtarget, mode, ignore)
7060 enum machine_mode mode;
7063 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7064 tree arglist = TREE_OPERAND (exp, 1);
7067 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7068 optab builtin_optab;
7070 switch (DECL_FUNCTION_CODE (fndecl))
7075 /* build_function_call changes these into ABS_EXPR. */
7080 /* Treat these like sqrt, but only if the user asks for them. */
7081 if (! flag_fast_math)
7083 case BUILT_IN_FSQRT:
7084 /* If not optimizing, call the library function. */
7089 /* Arg could be wrong type if user redeclared this fcn wrong. */
7090 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7093 /* Stabilize and compute the argument. */
7094 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7095 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7097 exp = copy_node (exp);
7098 arglist = copy_node (arglist);
7099 TREE_OPERAND (exp, 1) = arglist;
7100 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7102 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7104 /* Make a suitable register to place result in. */
7105 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7110 switch (DECL_FUNCTION_CODE (fndecl))
7113 builtin_optab = sin_optab; break;
7115 builtin_optab = cos_optab; break;
7116 case BUILT_IN_FSQRT:
7117 builtin_optab = sqrt_optab; break;
7122 /* Compute into TARGET.
7123 Set TARGET to wherever the result comes back. */
7124 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7125 builtin_optab, op0, target, 0);
7127 /* If we were unable to expand via the builtin, stop the
7128 sequence (without outputting the insns) and break, causing
7129 a call the the library function. */
7136 /* Check the results by default. But if flag_fast_math is turned on,
7137 then assume sqrt will always be called with valid arguments. */
7139 if (! flag_fast_math)
7141 /* Don't define the builtin FP instructions
7142 if your machine is not IEEE. */
7143 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7146 lab1 = gen_label_rtx ();
7148 /* Test the result; if it is NaN, set errno=EDOM because
7149 the argument was not in the domain. */
7150 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7151 emit_jump_insn (gen_beq (lab1));
7155 #ifdef GEN_ERRNO_RTX
7156 rtx errno_rtx = GEN_ERRNO_RTX;
7159 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7162 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7165 /* We can't set errno=EDOM directly; let the library call do it.
7166 Pop the arguments right away in case the call gets deleted. */
7168 expand_call (exp, target, 0);
7175 /* Output the entire sequence. */
7176 insns = get_insns ();
7182 /* __builtin_apply_args returns block of memory allocated on
7183 the stack into which is stored the arg pointer, structure
7184 value address, static chain, and all the registers that might
7185 possibly be used in performing a function call. The code is
7186 moved to the start of the function so the incoming values are
7188 case BUILT_IN_APPLY_ARGS:
7189 /* Don't do __builtin_apply_args more than once in a function.
7190 Save the result of the first call and reuse it. */
7191 if (apply_args_value != 0)
7192 return apply_args_value;
7194 /* When this function is called, it means that registers must be
7195 saved on entry to this function. So we migrate the
7196 call to the first insn of this function. */
7201 temp = expand_builtin_apply_args ();
7205 apply_args_value = temp;
7207 /* Put the sequence after the NOTE that starts the function.
7208 If this is inside a SEQUENCE, make the outer-level insn
7209 chain current, so the code is placed at the start of the
7211 push_topmost_sequence ();
7212 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7213 pop_topmost_sequence ();
7217 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7218 FUNCTION with a copy of the parameters described by
7219 ARGUMENTS, and ARGSIZE. It returns a block of memory
7220 allocated on the stack into which is stored all the registers
7221 that might possibly be used for returning the result of a
7222 function. ARGUMENTS is the value returned by
7223 __builtin_apply_args. ARGSIZE is the number of bytes of
7224 arguments that must be copied. ??? How should this value be
7225 computed? We'll also need a safe worst case value for varargs
7227 case BUILT_IN_APPLY:
7229 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7230 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7231 || TREE_CHAIN (arglist) == 0
7232 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7233 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7234 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7242 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7243 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7245 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7248 /* __builtin_return (RESULT) causes the function to return the
7249 value described by RESULT. RESULT is address of the block of
7250 memory returned by __builtin_apply. */
7251 case BUILT_IN_RETURN:
7253 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7254 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7255 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7256 NULL_RTX, VOIDmode, 0));
7259 case BUILT_IN_SAVEREGS:
7260 /* Don't do __builtin_saveregs more than once in a function.
7261 Save the result of the first call and reuse it. */
7262 if (saveregs_value != 0)
7263 return saveregs_value;
7265 /* When this function is called, it means that registers must be
7266 saved on entry to this function. So we migrate the
7267 call to the first insn of this function. */
7271 /* Now really call the function. `expand_call' does not call
7272 expand_builtin, so there is no danger of infinite recursion here. */
7275 #ifdef EXPAND_BUILTIN_SAVEREGS
7276 /* Do whatever the machine needs done in this case. */
7277 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7279 /* The register where the function returns its value
7280 is likely to have something else in it, such as an argument.
7281 So preserve that register around the call. */
7283 if (value_mode != VOIDmode)
7285 rtx valreg = hard_libcall_value (value_mode);
7286 rtx saved_valreg = gen_reg_rtx (value_mode);
7288 emit_move_insn (saved_valreg, valreg);
7289 temp = expand_call (exp, target, ignore);
7290 emit_move_insn (valreg, saved_valreg);
7293 /* Generate the call, putting the value in a pseudo. */
7294 temp = expand_call (exp, target, ignore);
7300 saveregs_value = temp;
7302 /* Put the sequence after the NOTE that starts the function.
7303 If this is inside a SEQUENCE, make the outer-level insn
7304 chain current, so the code is placed at the start of the
7306 push_topmost_sequence ();
7307 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7308 pop_topmost_sequence ();
7312 /* __builtin_args_info (N) returns word N of the arg space info
7313 for the current function. The number and meanings of words
7314 is controlled by the definition of CUMULATIVE_ARGS. */
7315 case BUILT_IN_ARGS_INFO:
7317 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7319 int *word_ptr = (int *) ¤t_function_args_info;
7320 tree type, elts, result;
7322 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7323 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7324 __FILE__, __LINE__);
7328 tree arg = TREE_VALUE (arglist);
7329 if (TREE_CODE (arg) != INTEGER_CST)
7330 error ("argument of `__builtin_args_info' must be constant");
7333 int wordnum = TREE_INT_CST_LOW (arg);
7335 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7336 error ("argument of `__builtin_args_info' out of range");
7338 return GEN_INT (word_ptr[wordnum]);
7342 error ("missing argument in `__builtin_args_info'");
7347 for (i = 0; i < nwords; i++)
7348 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7350 type = build_array_type (integer_type_node,
7351 build_index_type (build_int_2 (nwords, 0)));
7352 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7353 TREE_CONSTANT (result) = 1;
7354 TREE_STATIC (result) = 1;
7355 result = build (INDIRECT_REF, build_pointer_type (type), result);
7356 TREE_CONSTANT (result) = 1;
7357 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7361 /* Return the address of the first anonymous stack arg. */
7362 case BUILT_IN_NEXT_ARG:
7364 tree fntype = TREE_TYPE (current_function_decl);
7366 if ((TYPE_ARG_TYPES (fntype) == 0
7367 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7369 && ! current_function_varargs)
7371 error ("`va_start' used in function with fixed args");
7377 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7378 tree arg = TREE_VALUE (arglist);
7380 /* Strip off all nops for the sake of the comparison. This
7381 is not quite the same as STRIP_NOPS. It does more. */
7382 while (TREE_CODE (arg) == NOP_EXPR
7383 || TREE_CODE (arg) == CONVERT_EXPR
7384 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7385 arg = TREE_OPERAND (arg, 0);
7386 if (arg != last_parm)
7387 warning ("second parameter of `va_start' not last named argument");
7390 /* Evidently an out of date version of <stdarg.h>; can't validate
7391 va_start's second argument, but can still work as intended. */
7392 warning ("`__builtin_next_arg' called without an argument");
7395 return expand_binop (Pmode, add_optab,
7396 current_function_internal_arg_pointer,
7397 current_function_arg_offset_rtx,
7398 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7400 case BUILT_IN_CLASSIFY_TYPE:
7403 tree type = TREE_TYPE (TREE_VALUE (arglist));
7404 enum tree_code code = TREE_CODE (type);
7405 if (code == VOID_TYPE)
7406 return GEN_INT (void_type_class);
7407 if (code == INTEGER_TYPE)
7408 return GEN_INT (integer_type_class);
7409 if (code == CHAR_TYPE)
7410 return GEN_INT (char_type_class);
7411 if (code == ENUMERAL_TYPE)
7412 return GEN_INT (enumeral_type_class);
7413 if (code == BOOLEAN_TYPE)
7414 return GEN_INT (boolean_type_class);
7415 if (code == POINTER_TYPE)
7416 return GEN_INT (pointer_type_class);
7417 if (code == REFERENCE_TYPE)
7418 return GEN_INT (reference_type_class);
7419 if (code == OFFSET_TYPE)
7420 return GEN_INT (offset_type_class);
7421 if (code == REAL_TYPE)
7422 return GEN_INT (real_type_class);
7423 if (code == COMPLEX_TYPE)
7424 return GEN_INT (complex_type_class);
7425 if (code == FUNCTION_TYPE)
7426 return GEN_INT (function_type_class);
7427 if (code == METHOD_TYPE)
7428 return GEN_INT (method_type_class);
7429 if (code == RECORD_TYPE)
7430 return GEN_INT (record_type_class);
7431 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7432 return GEN_INT (union_type_class);
7433 if (code == ARRAY_TYPE)
7435 if (TYPE_STRING_FLAG (type))
7436 return GEN_INT (string_type_class);
7438 return GEN_INT (array_type_class);
7440 if (code == SET_TYPE)
7441 return GEN_INT (set_type_class);
7442 if (code == FILE_TYPE)
7443 return GEN_INT (file_type_class);
7444 if (code == LANG_TYPE)
7445 return GEN_INT (lang_type_class);
7447 return GEN_INT (no_type_class);
7449 case BUILT_IN_CONSTANT_P:
7453 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7454 ? const1_rtx : const0_rtx);
7456 case BUILT_IN_FRAME_ADDRESS:
7457 /* The argument must be a nonnegative integer constant.
7458 It counts the number of frames to scan up the stack.
7459 The value is the address of that frame. */
7460 case BUILT_IN_RETURN_ADDRESS:
7461 /* The argument must be a nonnegative integer constant.
7462 It counts the number of frames to scan up the stack.
7463 The value is the return address saved in that frame. */
7465 /* Warning about missing arg was already issued. */
7467 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7469 error ("invalid arg to `__builtin_return_address'");
7472 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7474 error ("invalid arg to `__builtin_return_address'");
7479 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7480 rtx tem = frame_pointer_rtx;
7483 /* Some machines need special handling before we can access arbitrary
7484 frames. For example, on the sparc, we must first flush all
7485 register windows to the stack. */
7486 #ifdef SETUP_FRAME_ADDRESSES
7487 SETUP_FRAME_ADDRESSES ();
7490 /* On the sparc, the return address is not in the frame, it is
7491 in a register. There is no way to access it off of the current
7492 frame pointer, but it can be accessed off the previous frame
7493 pointer by reading the value from the register window save
7495 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7496 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7500 /* Scan back COUNT frames to the specified frame. */
7501 for (i = 0; i < count; i++)
7503 /* Assume the dynamic chain pointer is in the word that
7504 the frame address points to, unless otherwise specified. */
7505 #ifdef DYNAMIC_CHAIN_ADDRESS
7506 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7508 tem = memory_address (Pmode, tem);
7509 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7512 /* For __builtin_frame_address, return what we've got. */
7513 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7516 /* For __builtin_return_address,
7517 Get the return address from that frame. */
7518 #ifdef RETURN_ADDR_RTX
7519 return RETURN_ADDR_RTX (count, tem);
7521 tem = memory_address (Pmode,
7522 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7523 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7527 case BUILT_IN_ALLOCA:
7529 /* Arg could be non-integer if user redeclared this fcn wrong. */
7530 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7533 /* Compute the argument. */
7534 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7536 /* Allocate the desired space. */
7537 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7540 /* If not optimizing, call the library function. */
7541 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7545 /* Arg could be non-integer if user redeclared this fcn wrong. */
7546 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7549 /* Compute the argument. */
7550 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7551 /* Compute ffs, into TARGET if possible.
7552 Set TARGET to wherever the result comes back. */
7553 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7554 ffs_optab, op0, target, 1);
7559 case BUILT_IN_STRLEN:
7560 /* If not optimizing, call the library function. */
7561 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7565 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7566 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7570 tree src = TREE_VALUE (arglist);
7571 tree len = c_strlen (src);
7574 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7576 rtx result, src_rtx, char_rtx;
7577 enum machine_mode insn_mode = value_mode, char_mode;
7578 enum insn_code icode;
7580 /* If the length is known, just return it. */
7582 return expand_expr (len, target, mode, 0);
7584 /* If SRC is not a pointer type, don't do this operation inline. */
7588 /* Call a function if we can't compute strlen in the right mode. */
7590 while (insn_mode != VOIDmode)
7592 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7593 if (icode != CODE_FOR_nothing)
7596 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7598 if (insn_mode == VOIDmode)
7601 /* Make a place to write the result of the instruction. */
7604 && GET_CODE (result) == REG
7605 && GET_MODE (result) == insn_mode
7606 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7607 result = gen_reg_rtx (insn_mode);
7609 /* Make sure the operands are acceptable to the predicates. */
7611 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7612 result = gen_reg_rtx (insn_mode);
7614 src_rtx = memory_address (BLKmode,
7615 expand_expr (src, NULL_RTX, Pmode,
7617 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7618 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7620 char_rtx = const0_rtx;
7621 char_mode = insn_operand_mode[(int)icode][2];
7622 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7623 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7625 emit_insn (GEN_FCN (icode) (result,
7626 gen_rtx (MEM, BLKmode, src_rtx),
7627 char_rtx, GEN_INT (align)));
7629 /* Return the value in the proper mode for this function. */
7630 if (GET_MODE (result) == value_mode)
7632 else if (target != 0)
7634 convert_move (target, result, 0);
7638 return convert_to_mode (value_mode, result, 0);
7641 case BUILT_IN_STRCPY:
7642 /* If not optimizing, call the library function. */
7643 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7647 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7648 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7649 || TREE_CHAIN (arglist) == 0
7650 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7654 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7659 len = size_binop (PLUS_EXPR, len, integer_one_node);
7661 chainon (arglist, build_tree_list (NULL_TREE, len));
7665 case BUILT_IN_MEMCPY:
7666 /* If not optimizing, call the library function. */
7667 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7671 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7672 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7673 || TREE_CHAIN (arglist) == 0
7674 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7675 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7676 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7680 tree dest = TREE_VALUE (arglist);
7681 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7682 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7685 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7687 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7688 rtx dest_rtx, dest_mem, src_mem;
7690 /* If either SRC or DEST is not a pointer type, don't do
7691 this operation in-line. */
7692 if (src_align == 0 || dest_align == 0)
7694 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7695 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7699 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7700 dest_mem = gen_rtx (MEM, BLKmode,
7701 memory_address (BLKmode, dest_rtx));
7702 src_mem = gen_rtx (MEM, BLKmode,
7703 memory_address (BLKmode,
7704 expand_expr (src, NULL_RTX,
7708 /* Copy word part most expediently. */
7709 emit_block_move (dest_mem, src_mem,
7710 expand_expr (len, NULL_RTX, VOIDmode, 0),
7711 MIN (src_align, dest_align));
7715 /* These comparison functions need an instruction that returns an actual
7716 index. An ordinary compare that just sets the condition codes
7718 #ifdef HAVE_cmpstrsi
7719 case BUILT_IN_STRCMP:
7720 /* If not optimizing, call the library function. */
7721 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7725 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7726 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7727 || TREE_CHAIN (arglist) == 0
7728 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7730 else if (!HAVE_cmpstrsi)
7733 tree arg1 = TREE_VALUE (arglist);
7734 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7738 len = c_strlen (arg1);
7740 len = size_binop (PLUS_EXPR, integer_one_node, len);
7741 len2 = c_strlen (arg2);
7743 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7745 /* If we don't have a constant length for the first, use the length
7746 of the second, if we know it. We don't require a constant for
7747 this case; some cost analysis could be done if both are available
7748 but neither is constant. For now, assume they're equally cheap.
7750 If both strings have constant lengths, use the smaller. This
7751 could arise if optimization results in strcpy being called with
7752 two fixed strings, or if the code was machine-generated. We should
7753 add some code to the `memcmp' handler below to deal with such
7754 situations, someday. */
7755 if (!len || TREE_CODE (len) != INTEGER_CST)
7762 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7764 if (tree_int_cst_lt (len2, len))
7768 chainon (arglist, build_tree_list (NULL_TREE, len));
7772 case BUILT_IN_MEMCMP:
7773 /* If not optimizing, call the library function. */
7774 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7778 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7779 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7780 || TREE_CHAIN (arglist) == 0
7781 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7782 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7783 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7785 else if (!HAVE_cmpstrsi)
7788 tree arg1 = TREE_VALUE (arglist);
7789 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7790 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7794 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7796 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7797 enum machine_mode insn_mode
7798 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7800 /* If we don't have POINTER_TYPE, call the function. */
7801 if (arg1_align == 0 || arg2_align == 0)
7803 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7804 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7808 /* Make a place to write the result of the instruction. */
7811 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7812 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7813 result = gen_reg_rtx (insn_mode);
7815 emit_insn (gen_cmpstrsi (result,
7816 gen_rtx (MEM, BLKmode,
7817 expand_expr (arg1, NULL_RTX, Pmode,
7819 gen_rtx (MEM, BLKmode,
7820 expand_expr (arg2, NULL_RTX, Pmode,
7822 expand_expr (len, NULL_RTX, VOIDmode, 0),
7823 GEN_INT (MIN (arg1_align, arg2_align))));
7825 /* Return the value in the proper mode for this function. */
7826 mode = TYPE_MODE (TREE_TYPE (exp));
7827 if (GET_MODE (result) == mode)
7829 else if (target != 0)
7831 convert_move (target, result, 0);
7835 return convert_to_mode (mode, result, 0);
7838 case BUILT_IN_STRCMP:
7839 case BUILT_IN_MEMCMP:
7843 default: /* just do library call, if unknown builtin */
7844 error ("built-in function `%s' not currently supported",
7845 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7848 /* The switch statement above can drop through to cause the function
7849 to be called normally. */
7851 return expand_call (exp, target, ignore);
7854 /* Built-in functions to perform an untyped call and return. */
7856 /* For each register that may be used for calling a function, this
7857 gives a mode used to copy the register's value. VOIDmode indicates
7858 the register is not used for calling a function. If the machine
7859 has register windows, this gives only the outbound registers.
7860 INCOMING_REGNO gives the corresponding inbound register. */
7861 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7863 /* For each register that may be used for returning values, this gives
7864 a mode used to copy the register's value. VOIDmode indicates the
7865 register is not used for returning values. If the machine has
7866 register windows, this gives only the outbound registers.
7867 INCOMING_REGNO gives the corresponding inbound register. */
7868 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7870 /* For each register that may be used for calling a function, this
7871 gives the offset of that register into the block returned by
7872 __bultin_apply_args. 0 indicates that the register is not
7873 used for calling a function. */
7874 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7876 /* Return the offset of register REGNO into the block returned by
7877 __builtin_apply_args. This is not declared static, since it is
7878 needed in objc-act.c. */
7881 apply_args_register_offset (regno)
7886 /* Arguments are always put in outgoing registers (in the argument
7887 block) if such make sense. */
7888 #ifdef OUTGOING_REGNO
7889 regno = OUTGOING_REGNO(regno);
7891 return apply_args_reg_offset[regno];
7894 /* Return the size required for the block returned by __builtin_apply_args,
7895 and initialize apply_args_mode. */
7900 static int size = -1;
7902 enum machine_mode mode;
7904 /* The values computed by this function never change. */
7907 /* The first value is the incoming arg-pointer. */
7908 size = GET_MODE_SIZE (Pmode);
7910 /* The second value is the structure value address unless this is
7911 passed as an "invisible" first argument. */
7912 if (struct_value_rtx)
7913 size += GET_MODE_SIZE (Pmode);
7915 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7916 if (FUNCTION_ARG_REGNO_P (regno))
7918 /* Search for the proper mode for copying this register's
7919 value. I'm not sure this is right, but it works so far. */
7920 enum machine_mode best_mode = VOIDmode;
7922 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7924 mode = GET_MODE_WIDER_MODE (mode))
7925 if (HARD_REGNO_MODE_OK (regno, mode)
7926 && HARD_REGNO_NREGS (regno, mode) == 1)
7929 if (best_mode == VOIDmode)
7930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7932 mode = GET_MODE_WIDER_MODE (mode))
7933 if (HARD_REGNO_MODE_OK (regno, mode)
7934 && (mov_optab->handlers[(int) mode].insn_code
7935 != CODE_FOR_nothing))
7939 if (mode == VOIDmode)
7942 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7943 if (size % align != 0)
7944 size = CEIL (size, align) * align;
7945 apply_args_reg_offset[regno] = size;
7946 size += GET_MODE_SIZE (mode);
7947 apply_args_mode[regno] = mode;
7951 apply_args_mode[regno] = VOIDmode;
7952 apply_args_reg_offset[regno] = 0;
7958 /* Return the size required for the block returned by __builtin_apply,
7959 and initialize apply_result_mode. */
7962 apply_result_size ()
7964 static int size = -1;
7966 enum machine_mode mode;
7968 /* The values computed by this function never change. */
7973 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7974 if (FUNCTION_VALUE_REGNO_P (regno))
7976 /* Search for the proper mode for copying this register's
7977 value. I'm not sure this is right, but it works so far. */
7978 enum machine_mode best_mode = VOIDmode;
7980 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7982 mode = GET_MODE_WIDER_MODE (mode))
7983 if (HARD_REGNO_MODE_OK (regno, mode))
7986 if (best_mode == VOIDmode)
7987 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7989 mode = GET_MODE_WIDER_MODE (mode))
7990 if (HARD_REGNO_MODE_OK (regno, mode)
7991 && (mov_optab->handlers[(int) mode].insn_code
7992 != CODE_FOR_nothing))
7996 if (mode == VOIDmode)
7999 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8000 if (size % align != 0)
8001 size = CEIL (size, align) * align;
8002 size += GET_MODE_SIZE (mode);
8003 apply_result_mode[regno] = mode;
8006 apply_result_mode[regno] = VOIDmode;
8008 /* Allow targets that use untyped_call and untyped_return to override
8009 the size so that machine-specific information can be stored here. */
8010 #ifdef APPLY_RESULT_SIZE
8011 size = APPLY_RESULT_SIZE;
8017 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8018 /* Create a vector describing the result block RESULT. If SAVEP is true,
8019 the result block is used to save the values; otherwise it is used to
8020 restore the values. */
8023 result_vector (savep, result)
8027 int regno, size, align, nelts;
8028 enum machine_mode mode;
8030 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8033 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8034 if ((mode = apply_result_mode[regno]) != VOIDmode)
8036 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8037 if (size % align != 0)
8038 size = CEIL (size, align) * align;
8039 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8040 mem = change_address (result, mode,
8041 plus_constant (XEXP (result, 0), size));
8042 savevec[nelts++] = (savep
8043 ? gen_rtx (SET, VOIDmode, mem, reg)
8044 : gen_rtx (SET, VOIDmode, reg, mem));
8045 size += GET_MODE_SIZE (mode);
8047 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8049 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8051 /* Save the state required to perform an untyped call with the same
8052 arguments as were passed to the current function. */
8055 expand_builtin_apply_args ()
8058 int size, align, regno;
8059 enum machine_mode mode;
8061 /* Create a block where the arg-pointer, structure value address,
8062 and argument registers can be saved. */
8063 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8065 /* Walk past the arg-pointer and structure value address. */
8066 size = GET_MODE_SIZE (Pmode);
8067 if (struct_value_rtx)
8068 size += GET_MODE_SIZE (Pmode);
8070 /* Save each register used in calling a function to the block. */
8071 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8072 if ((mode = apply_args_mode[regno]) != VOIDmode)
8074 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8075 if (size % align != 0)
8076 size = CEIL (size, align) * align;
8077 emit_move_insn (change_address (registers, mode,
8078 plus_constant (XEXP (registers, 0),
8080 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
8081 size += GET_MODE_SIZE (mode);
8084 /* Save the arg pointer to the block. */
8085 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8086 copy_to_reg (virtual_incoming_args_rtx));
8087 size = GET_MODE_SIZE (Pmode);
8089 /* Save the structure value address unless this is passed as an
8090 "invisible" first argument. */
8091 if (struct_value_incoming_rtx)
8093 emit_move_insn (change_address (registers, Pmode,
8094 plus_constant (XEXP (registers, 0),
8096 copy_to_reg (struct_value_incoming_rtx));
8097 size += GET_MODE_SIZE (Pmode);
8100 /* Return the address of the block. */
8101 return copy_addr_to_reg (XEXP (registers, 0));
8104 /* Perform an untyped call and save the state required to perform an
8105 untyped return of whatever value was returned by the given function. */
8108 expand_builtin_apply (function, arguments, argsize)
8109 rtx function, arguments, argsize;
8111 int size, align, regno;
8112 enum machine_mode mode;
8113 rtx incoming_args, result, reg, dest, call_insn;
8114 rtx old_stack_level = 0;
8115 rtx call_fusage = 0;
8117 /* Create a block where the return registers can be saved. */
8118 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8120 /* ??? The argsize value should be adjusted here. */
8122 /* Fetch the arg pointer from the ARGUMENTS block. */
8123 incoming_args = gen_reg_rtx (Pmode);
8124 emit_move_insn (incoming_args,
8125 gen_rtx (MEM, Pmode, arguments));
8126 #ifndef STACK_GROWS_DOWNWARD
8127 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8128 incoming_args, 0, OPTAB_LIB_WIDEN);
8131 /* Perform postincrements before actually calling the function. */
8134 /* Push a new argument block and copy the arguments. */
8135 do_pending_stack_adjust ();
8136 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8138 /* Push a block of memory onto the stack to store the memory arguments.
8139 Save the address in a register, and copy the memory arguments. ??? I
8140 haven't figured out how the calling convention macros effect this,
8141 but it's likely that the source and/or destination addresses in
8142 the block copy will need updating in machine specific ways. */
8143 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8144 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8145 gen_rtx (MEM, BLKmode, incoming_args),
8147 PARM_BOUNDARY / BITS_PER_UNIT);
8149 /* Refer to the argument block. */
8151 arguments = gen_rtx (MEM, BLKmode, arguments);
8153 /* Walk past the arg-pointer and structure value address. */
8154 size = GET_MODE_SIZE (Pmode);
8155 if (struct_value_rtx)
8156 size += GET_MODE_SIZE (Pmode);
8158 /* Restore each of the registers previously saved. Make USE insns
8159 for each of these registers for use in making the call. */
8160 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8161 if ((mode = apply_args_mode[regno]) != VOIDmode)
8163 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8164 if (size % align != 0)
8165 size = CEIL (size, align) * align;
8166 reg = gen_rtx (REG, mode, regno);
8167 emit_move_insn (reg,
8168 change_address (arguments, mode,
8169 plus_constant (XEXP (arguments, 0),
8172 use_reg (&call_fusage, reg);
8173 size += GET_MODE_SIZE (mode);
8176 /* Restore the structure value address unless this is passed as an
8177 "invisible" first argument. */
8178 size = GET_MODE_SIZE (Pmode);
8179 if (struct_value_rtx)
8181 rtx value = gen_reg_rtx (Pmode);
8182 emit_move_insn (value,
8183 change_address (arguments, Pmode,
8184 plus_constant (XEXP (arguments, 0),
8186 emit_move_insn (struct_value_rtx, value);
8187 if (GET_CODE (struct_value_rtx) == REG)
8188 use_reg (&call_fusage, struct_value_rtx);
8189 size += GET_MODE_SIZE (Pmode);
8192 /* All arguments and registers used for the call are set up by now! */
8193 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8195 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8196 and we don't want to load it into a register as an optimization,
8197 because prepare_call_address already did it if it should be done. */
8198 if (GET_CODE (function) != SYMBOL_REF)
8199 function = memory_address (FUNCTION_MODE, function);
8201 /* Generate the actual call instruction and save the return value. */
8202 #ifdef HAVE_untyped_call
8203 if (HAVE_untyped_call)
8204 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8205 result, result_vector (1, result)));
8208 #ifdef HAVE_call_value
8209 if (HAVE_call_value)
8213 /* Locate the unique return register. It is not possible to
8214 express a call that sets more than one return register using
8215 call_value; use untyped_call for that. In fact, untyped_call
8216 only needs to save the return registers in the given block. */
8217 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8218 if ((mode = apply_result_mode[regno]) != VOIDmode)
8221 abort (); /* HAVE_untyped_call required. */
8222 valreg = gen_rtx (REG, mode, regno);
8225 emit_call_insn (gen_call_value (valreg,
8226 gen_rtx (MEM, FUNCTION_MODE, function),
8227 const0_rtx, NULL_RTX, const0_rtx));
8229 emit_move_insn (change_address (result, GET_MODE (valreg),
8237 /* Find the CALL insn we just emitted. */
8238 for (call_insn = get_last_insn ();
8239 call_insn && GET_CODE (call_insn) != CALL_INSN;
8240 call_insn = PREV_INSN (call_insn))
8246 /* Put the register usage information on the CALL. If there is already
8247 some usage information, put ours at the end. */
8248 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8252 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8253 link = XEXP (link, 1))
8256 XEXP (link, 1) = call_fusage;
8259 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8261 /* Restore the stack. */
8262 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8264 /* Return the address of the result block. */
8265 return copy_addr_to_reg (XEXP (result, 0));
8268 /* Perform an untyped return. */
8271 expand_builtin_return (result)
8274 int size, align, regno;
8275 enum machine_mode mode;
8277 rtx call_fusage = 0;
8279 apply_result_size ();
8280 result = gen_rtx (MEM, BLKmode, result);
8282 #ifdef HAVE_untyped_return
8283 if (HAVE_untyped_return)
8285 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8291 /* Restore the return value and note that each value is used. */
8293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8294 if ((mode = apply_result_mode[regno]) != VOIDmode)
8296 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8297 if (size % align != 0)
8298 size = CEIL (size, align) * align;
8299 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8300 emit_move_insn (reg,
8301 change_address (result, mode,
8302 plus_constant (XEXP (result, 0),
8305 push_to_sequence (call_fusage);
8306 emit_insn (gen_rtx (USE, VOIDmode, reg));
8307 call_fusage = get_insns ();
8309 size += GET_MODE_SIZE (mode);
8312 /* Put the USE insns before the return. */
8313 emit_insns (call_fusage);
8315 /* Return whatever values was restored by jumping directly to the end
8317 expand_null_return ();
8320 /* Expand code for a post- or pre- increment or decrement
8321 and return the RTX for the result.
8322 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8325 expand_increment (exp, post)
8329 register rtx op0, op1;
8330 register rtx temp, value;
8331 register tree incremented = TREE_OPERAND (exp, 0);
8332 optab this_optab = add_optab;
8334 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8335 int op0_is_copy = 0;
8336 int single_insn = 0;
8337 /* 1 means we can't store into OP0 directly,
8338 because it is a subreg narrower than a word,
8339 and we don't dare clobber the rest of the word. */
8342 if (output_bytecode)
8344 bc_expand_expr (exp);
8348 /* Stabilize any component ref that might need to be
8349 evaluated more than once below. */
8351 || TREE_CODE (incremented) == BIT_FIELD_REF
8352 || (TREE_CODE (incremented) == COMPONENT_REF
8353 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8354 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8355 incremented = stabilize_reference (incremented);
8356 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8357 ones into save exprs so that they don't accidentally get evaluated
8358 more than once by the code below. */
8359 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8360 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8361 incremented = save_expr (incremented);
8363 /* Compute the operands as RTX.
8364 Note whether OP0 is the actual lvalue or a copy of it:
8365 I believe it is a copy iff it is a register or subreg
8366 and insns were generated in computing it. */
8368 temp = get_last_insn ();
8369 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8371 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8372 in place but intead must do sign- or zero-extension during assignment,
8373 so we copy it into a new register and let the code below use it as
8376 Note that we can safely modify this SUBREG since it is know not to be
8377 shared (it was made by the expand_expr call above). */
8379 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8382 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8386 else if (GET_CODE (op0) == SUBREG
8387 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8389 /* We cannot increment this SUBREG in place. If we are
8390 post-incrementing, get a copy of the old value. Otherwise,
8391 just mark that we cannot increment in place. */
8393 op0 = copy_to_reg (op0);
8398 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8399 && temp != get_last_insn ());
8400 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8402 /* Decide whether incrementing or decrementing. */
8403 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8404 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8405 this_optab = sub_optab;
8407 /* Convert decrement by a constant into a negative increment. */
8408 if (this_optab == sub_optab
8409 && GET_CODE (op1) == CONST_INT)
8411 op1 = GEN_INT (- INTVAL (op1));
8412 this_optab = add_optab;
8415 /* For a preincrement, see if we can do this with a single instruction. */
8418 icode = (int) this_optab->handlers[(int) mode].insn_code;
8419 if (icode != (int) CODE_FOR_nothing
8420 /* Make sure that OP0 is valid for operands 0 and 1
8421 of the insn we want to queue. */
8422 && (*insn_operand_predicate[icode][0]) (op0, mode)
8423 && (*insn_operand_predicate[icode][1]) (op0, mode)
8424 && (*insn_operand_predicate[icode][2]) (op1, mode))
8428 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8429 then we cannot just increment OP0. We must therefore contrive to
8430 increment the original value. Then, for postincrement, we can return
8431 OP0 since it is a copy of the old value. For preincrement, expand here
8432 unless we can do it with a single insn.
8434 Likewise if storing directly into OP0 would clobber high bits
8435 we need to preserve (bad_subreg). */
8436 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8438 /* This is the easiest way to increment the value wherever it is.
8439 Problems with multiple evaluation of INCREMENTED are prevented
8440 because either (1) it is a component_ref or preincrement,
8441 in which case it was stabilized above, or (2) it is an array_ref
8442 with constant index in an array in a register, which is
8443 safe to reevaluate. */
8444 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8445 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8446 ? MINUS_EXPR : PLUS_EXPR),
8449 TREE_OPERAND (exp, 1));
8450 temp = expand_assignment (incremented, newexp, ! post, 0);
8451 return post ? op0 : temp;
8456 /* We have a true reference to the value in OP0.
8457 If there is an insn to add or subtract in this mode, queue it.
8458 Queueing the increment insn avoids the register shuffling
8459 that often results if we must increment now and first save
8460 the old value for subsequent use. */
8462 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8463 op0 = stabilize (op0);
8466 icode = (int) this_optab->handlers[(int) mode].insn_code;
8467 if (icode != (int) CODE_FOR_nothing
8468 /* Make sure that OP0 is valid for operands 0 and 1
8469 of the insn we want to queue. */
8470 && (*insn_operand_predicate[icode][0]) (op0, mode)
8471 && (*insn_operand_predicate[icode][1]) (op0, mode))
8473 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8474 op1 = force_reg (mode, op1);
8476 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8480 /* Preincrement, or we can't increment with one simple insn. */
8482 /* Save a copy of the value before inc or dec, to return it later. */
8483 temp = value = copy_to_reg (op0);
8485 /* Arrange to return the incremented value. */
8486 /* Copy the rtx because expand_binop will protect from the queue,
8487 and the results of that would be invalid for us to return
8488 if our caller does emit_queue before using our result. */
8489 temp = copy_rtx (value = op0);
8491 /* Increment however we can. */
8492 op1 = expand_binop (mode, this_optab, value, op1, op0,
8493 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8494 /* Make sure the value is stored into OP0. */
8496 emit_move_insn (op0, op1);
8501 /* Expand all function calls contained within EXP, innermost ones first.
8502 But don't look within expressions that have sequence points.
8503 For each CALL_EXPR, record the rtx for its value
8504 in the CALL_EXPR_RTL field. */
8507 preexpand_calls (exp)
8510 register int nops, i;
8511 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8513 if (! do_preexpand_calls)
8516 /* Only expressions and references can contain calls. */
8518 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8521 switch (TREE_CODE (exp))
8524 /* Do nothing if already expanded. */
8525 if (CALL_EXPR_RTL (exp) != 0)
8528 /* Do nothing to built-in functions. */
8529 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8530 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8531 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8532 /* Do nothing if the call returns a variable-sized object. */
8533 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8534 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8539 case TRUTH_ANDIF_EXPR:
8540 case TRUTH_ORIF_EXPR:
8541 /* If we find one of these, then we can be sure
8542 the adjust will be done for it (since it makes jumps).
8543 Do it now, so that if this is inside an argument
8544 of a function, we don't get the stack adjustment
8545 after some other args have already been pushed. */
8546 do_pending_stack_adjust ();
8551 case WITH_CLEANUP_EXPR:
8555 if (SAVE_EXPR_RTL (exp) != 0)
8559 nops = tree_code_length[(int) TREE_CODE (exp)];
8560 for (i = 0; i < nops; i++)
8561 if (TREE_OPERAND (exp, i) != 0)
8563 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8564 if (type == 'e' || type == '<' || type == '1' || type == '2'
8566 preexpand_calls (TREE_OPERAND (exp, i));
8570 /* At the start of a function, record that we have no previously-pushed
8571 arguments waiting to be popped. */
8574 init_pending_stack_adjust ()
8576 pending_stack_adjust = 0;
8579 /* When exiting from function, if safe, clear out any pending stack adjust
8580 so the adjustment won't get done. */
8583 clear_pending_stack_adjust ()
8585 #ifdef EXIT_IGNORE_STACK
8586 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8587 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8588 && ! flag_inline_functions)
8589 pending_stack_adjust = 0;
8593 /* Pop any previously-pushed arguments that have not been popped yet. */
8596 do_pending_stack_adjust ()
8598 if (inhibit_defer_pop == 0)
8600 if (pending_stack_adjust != 0)
8601 adjust_stack (GEN_INT (pending_stack_adjust));
8602 pending_stack_adjust = 0;
8606 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8607 Returns the cleanups to be performed. */
8610 defer_cleanups_to (old_cleanups)
8613 tree new_cleanups = NULL_TREE;
8614 tree cleanups = cleanups_this_call;
8615 tree last = NULL_TREE;
8617 while (cleanups_this_call != old_cleanups)
8619 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8620 last = cleanups_this_call;
8621 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8626 /* Remove the list from the chain of cleanups. */
8627 TREE_CHAIN (last) = NULL_TREE;
8629 /* reverse them so that we can build them in the right order. */
8630 cleanups = nreverse (cleanups);
8635 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8636 TREE_VALUE (cleanups), new_cleanups);
8638 new_cleanups = TREE_VALUE (cleanups);
8640 cleanups = TREE_CHAIN (cleanups);
8644 return new_cleanups;
8647 /* Expand all cleanups up to OLD_CLEANUPS.
8648 Needed here, and also for language-dependent calls. */
8651 expand_cleanups_to (old_cleanups)
8654 while (cleanups_this_call != old_cleanups)
8656 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8657 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8658 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8662 /* Expand conditional expressions. */
8664 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8665 LABEL is an rtx of code CODE_LABEL, in this function and all the
8669 jumpifnot (exp, label)
8673 do_jump (exp, label, NULL_RTX);
8676 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8683 do_jump (exp, NULL_RTX, label);
8686 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8687 the result is zero, or IF_TRUE_LABEL if the result is one.
8688 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8689 meaning fall through in that case.
8691 do_jump always does any pending stack adjust except when it does not
8692 actually perform a jump. An example where there is no jump
8693 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8695 This function is responsible for optimizing cases such as
8696 &&, || and comparison operators in EXP. */
8699 do_jump (exp, if_false_label, if_true_label)
8701 rtx if_false_label, if_true_label;
8703 register enum tree_code code = TREE_CODE (exp);
8704 /* Some cases need to create a label to jump to
8705 in order to properly fall through.
8706 These cases set DROP_THROUGH_LABEL nonzero. */
8707 rtx drop_through_label = 0;
8712 enum machine_mode mode;
8722 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8728 /* This is not true with #pragma weak */
8730 /* The address of something can never be zero. */
8732 emit_jump (if_true_label);
8737 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8738 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8739 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8742 /* If we are narrowing the operand, we have to do the compare in the
8744 if ((TYPE_PRECISION (TREE_TYPE (exp))
8745 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8747 case NON_LVALUE_EXPR:
8748 case REFERENCE_EXPR:
8753 /* These cannot change zero->non-zero or vice versa. */
8754 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8758 /* This is never less insns than evaluating the PLUS_EXPR followed by
8759 a test and can be longer if the test is eliminated. */
8761 /* Reduce to minus. */
8762 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8763 TREE_OPERAND (exp, 0),
8764 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8765 TREE_OPERAND (exp, 1))));
8766 /* Process as MINUS. */
8770 /* Non-zero iff operands of minus differ. */
8771 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8772 TREE_OPERAND (exp, 0),
8773 TREE_OPERAND (exp, 1)),
8778 /* If we are AND'ing with a small constant, do this comparison in the
8779 smallest type that fits. If the machine doesn't have comparisons
8780 that small, it will be converted back to the wider comparison.
8781 This helps if we are testing the sign bit of a narrower object.
8782 combine can't do this for us because it can't know whether a
8783 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8785 if (! SLOW_BYTE_ACCESS
8786 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8787 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8788 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8789 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8790 && (type = type_for_mode (mode, 1)) != 0
8791 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8792 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8793 != CODE_FOR_nothing))
8795 do_jump (convert (type, exp), if_false_label, if_true_label);
8800 case TRUTH_NOT_EXPR:
8801 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8804 case TRUTH_ANDIF_EXPR:
8807 tree cleanups, old_cleanups;
8809 if (if_false_label == 0)
8810 if_false_label = drop_through_label = gen_label_rtx ();
8812 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8813 seq1 = get_insns ();
8816 old_cleanups = cleanups_this_call;
8818 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8819 seq2 = get_insns ();
8822 cleanups = defer_cleanups_to (old_cleanups);
8825 rtx flag = gen_reg_rtx (word_mode);
8829 /* Flag cleanups as not needed. */
8830 emit_move_insn (flag, const0_rtx);
8833 /* Flag cleanups as needed. */
8834 emit_move_insn (flag, const1_rtx);
8837 /* convert flag, which is an rtx, into a tree. */
8838 cond = make_node (RTL_EXPR);
8839 TREE_TYPE (cond) = integer_type_node;
8840 RTL_EXPR_RTL (cond) = flag;
8841 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8843 new_cleanups = build (COND_EXPR, void_type_node,
8844 truthvalue_conversion (cond),
8845 cleanups, integer_zero_node);
8846 new_cleanups = fold (new_cleanups);
8848 /* Now add in the conditionalized cleanups. */
8850 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8851 (*interim_eh_hook) (NULL_TREE);
8861 case TRUTH_ORIF_EXPR:
8864 tree cleanups, old_cleanups;
8866 if (if_true_label == 0)
8867 if_true_label = drop_through_label = gen_label_rtx ();
8869 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8870 seq1 = get_insns ();
8873 old_cleanups = cleanups_this_call;
8875 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8876 seq2 = get_insns ();
8879 cleanups = defer_cleanups_to (old_cleanups);
8882 rtx flag = gen_reg_rtx (word_mode);
8886 /* Flag cleanups as not needed. */
8887 emit_move_insn (flag, const0_rtx);
8890 /* Flag cleanups as needed. */
8891 emit_move_insn (flag, const1_rtx);
8894 /* convert flag, which is an rtx, into a tree. */
8895 cond = make_node (RTL_EXPR);
8896 TREE_TYPE (cond) = integer_type_node;
8897 RTL_EXPR_RTL (cond) = flag;
8898 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8900 new_cleanups = build (COND_EXPR, void_type_node,
8901 truthvalue_conversion (cond),
8902 cleanups, integer_zero_node);
8903 new_cleanups = fold (new_cleanups);
8905 /* Now add in the conditionalized cleanups. */
8907 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8908 (*interim_eh_hook) (NULL_TREE);
8920 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8924 do_pending_stack_adjust ();
8925 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8932 int bitsize, bitpos, unsignedp;
8933 enum machine_mode mode;
8938 /* Get description of this reference. We don't actually care
8939 about the underlying object here. */
8940 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8941 &mode, &unsignedp, &volatilep);
8943 type = type_for_size (bitsize, unsignedp);
8944 if (! SLOW_BYTE_ACCESS
8945 && type != 0 && bitsize >= 0
8946 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8947 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8948 != CODE_FOR_nothing))
8950 do_jump (convert (type, exp), if_false_label, if_true_label);
8957 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8958 if (integer_onep (TREE_OPERAND (exp, 1))
8959 && integer_zerop (TREE_OPERAND (exp, 2)))
8960 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8962 else if (integer_zerop (TREE_OPERAND (exp, 1))
8963 && integer_onep (TREE_OPERAND (exp, 2)))
8964 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8968 register rtx label1 = gen_label_rtx ();
8969 drop_through_label = gen_label_rtx ();
8970 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8971 /* Now the THEN-expression. */
8972 do_jump (TREE_OPERAND (exp, 1),
8973 if_false_label ? if_false_label : drop_through_label,
8974 if_true_label ? if_true_label : drop_through_label);
8975 /* In case the do_jump just above never jumps. */
8976 do_pending_stack_adjust ();
8977 emit_label (label1);
8978 /* Now the ELSE-expression. */
8979 do_jump (TREE_OPERAND (exp, 2),
8980 if_false_label ? if_false_label : drop_through_label,
8981 if_true_label ? if_true_label : drop_through_label);
8986 if (integer_zerop (TREE_OPERAND (exp, 1)))
8987 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8988 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8991 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8992 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8993 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8994 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8996 comparison = compare (exp, EQ, EQ);
9000 if (integer_zerop (TREE_OPERAND (exp, 1)))
9001 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9002 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9005 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9006 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
9007 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
9008 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9010 comparison = compare (exp, NE, NE);
9014 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9016 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9017 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9019 comparison = compare (exp, LT, LTU);
9023 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9025 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9026 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9028 comparison = compare (exp, LE, LEU);
9032 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9034 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9035 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9037 comparison = compare (exp, GT, GTU);
9041 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9043 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9044 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9046 comparison = compare (exp, GE, GEU);
9051 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9053 /* This is not needed any more and causes poor code since it causes
9054 comparisons and tests from non-SI objects to have different code
9056 /* Copy to register to avoid generating bad insns by cse
9057 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9058 if (!cse_not_expected && GET_CODE (temp) == MEM)
9059 temp = copy_to_reg (temp);
9061 do_pending_stack_adjust ();
9062 if (GET_CODE (temp) == CONST_INT)
9063 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9064 else if (GET_CODE (temp) == LABEL_REF)
9065 comparison = const_true_rtx;
9066 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9067 && !can_compare_p (GET_MODE (temp)))
9068 /* Note swapping the labels gives us not-equal. */
9069 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9070 else if (GET_MODE (temp) != VOIDmode)
9071 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9072 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9073 GET_MODE (temp), NULL_RTX, 0);
9078 /* Do any postincrements in the expression that was tested. */
9081 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9082 straight into a conditional jump instruction as the jump condition.
9083 Otherwise, all the work has been done already. */
9085 if (comparison == const_true_rtx)
9088 emit_jump (if_true_label);
9090 else if (comparison == const0_rtx)
9093 emit_jump (if_false_label);
9095 else if (comparison)
9096 do_jump_for_compare (comparison, if_false_label, if_true_label);
9098 if (drop_through_label)
9100 /* If do_jump produces code that might be jumped around,
9101 do any stack adjusts from that code, before the place
9102 where control merges in. */
9103 do_pending_stack_adjust ();
9104 emit_label (drop_through_label);
9108 /* Given a comparison expression EXP for values too wide to be compared
9109 with one insn, test the comparison and jump to the appropriate label.
9110 The code of EXP is ignored; we always test GT if SWAP is 0,
9111 and LT if SWAP is 1. */
9114 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9117 rtx if_false_label, if_true_label;
9119 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9120 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9121 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9122 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9123 rtx drop_through_label = 0;
9124 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9127 if (! if_true_label || ! if_false_label)
9128 drop_through_label = gen_label_rtx ();
9129 if (! if_true_label)
9130 if_true_label = drop_through_label;
9131 if (! if_false_label)
9132 if_false_label = drop_through_label;
9134 /* Compare a word at a time, high order first. */
9135 for (i = 0; i < nwords; i++)
9138 rtx op0_word, op1_word;
9140 if (WORDS_BIG_ENDIAN)
9142 op0_word = operand_subword_force (op0, i, mode);
9143 op1_word = operand_subword_force (op1, i, mode);
9147 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9148 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9151 /* All but high-order word must be compared as unsigned. */
9152 comp = compare_from_rtx (op0_word, op1_word,
9153 (unsignedp || i > 0) ? GTU : GT,
9154 unsignedp, word_mode, NULL_RTX, 0);
9155 if (comp == const_true_rtx)
9156 emit_jump (if_true_label);
9157 else if (comp != const0_rtx)
9158 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9160 /* Consider lower words only if these are equal. */
9161 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9163 if (comp == const_true_rtx)
9164 emit_jump (if_false_label);
9165 else if (comp != const0_rtx)
9166 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9170 emit_jump (if_false_label);
9171 if (drop_through_label)
9172 emit_label (drop_through_label);
9175 /* Compare OP0 with OP1, word at a time, in mode MODE.
9176 UNSIGNEDP says to do unsigned comparison.
9177 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9180 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9181 enum machine_mode mode;
9184 rtx if_false_label, if_true_label;
9186 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9187 rtx drop_through_label = 0;
9190 if (! if_true_label || ! if_false_label)
9191 drop_through_label = gen_label_rtx ();
9192 if (! if_true_label)
9193 if_true_label = drop_through_label;
9194 if (! if_false_label)
9195 if_false_label = drop_through_label;
9197 /* Compare a word at a time, high order first. */
9198 for (i = 0; i < nwords; i++)
9201 rtx op0_word, op1_word;
9203 if (WORDS_BIG_ENDIAN)
9205 op0_word = operand_subword_force (op0, i, mode);
9206 op1_word = operand_subword_force (op1, i, mode);
9210 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9211 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9214 /* All but high-order word must be compared as unsigned. */
9215 comp = compare_from_rtx (op0_word, op1_word,
9216 (unsignedp || i > 0) ? GTU : GT,
9217 unsignedp, word_mode, NULL_RTX, 0);
9218 if (comp == const_true_rtx)
9219 emit_jump (if_true_label);
9220 else if (comp != const0_rtx)
9221 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9223 /* Consider lower words only if these are equal. */
9224 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9226 if (comp == const_true_rtx)
9227 emit_jump (if_false_label);
9228 else if (comp != const0_rtx)
9229 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9233 emit_jump (if_false_label);
9234 if (drop_through_label)
9235 emit_label (drop_through_label);
9238 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9239 with one insn, test the comparison and jump to the appropriate label. */
9242 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9244 rtx if_false_label, if_true_label;
9246 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9247 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9248 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9249 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9251 rtx drop_through_label = 0;
9253 if (! if_false_label)
9254 drop_through_label = if_false_label = gen_label_rtx ();
9256 for (i = 0; i < nwords; i++)
9258 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9259 operand_subword_force (op1, i, mode),
9260 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9261 word_mode, NULL_RTX, 0);
9262 if (comp == const_true_rtx)
9263 emit_jump (if_false_label);
9264 else if (comp != const0_rtx)
9265 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9269 emit_jump (if_true_label);
9270 if (drop_through_label)
9271 emit_label (drop_through_label);
9274 /* Jump according to whether OP0 is 0.
9275 We assume that OP0 has an integer mode that is too wide
9276 for the available compare insns. */
9279 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9281 rtx if_false_label, if_true_label;
9283 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9285 rtx drop_through_label = 0;
9287 if (! if_false_label)
9288 drop_through_label = if_false_label = gen_label_rtx ();
9290 for (i = 0; i < nwords; i++)
9292 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9294 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9295 if (comp == const_true_rtx)
9296 emit_jump (if_false_label);
9297 else if (comp != const0_rtx)
9298 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9302 emit_jump (if_true_label);
9303 if (drop_through_label)
9304 emit_label (drop_through_label);
9307 /* Given a comparison expression in rtl form, output conditional branches to
9308 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9311 do_jump_for_compare (comparison, if_false_label, if_true_label)
9312 rtx comparison, if_false_label, if_true_label;
9316 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9317 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9322 emit_jump (if_false_label);
9324 else if (if_false_label)
9327 rtx prev = get_last_insn ();
9330 /* Output the branch with the opposite condition. Then try to invert
9331 what is generated. If more than one insn is a branch, or if the
9332 branch is not the last insn written, abort. If we can't invert
9333 the branch, emit make a true label, redirect this jump to that,
9334 emit a jump to the false label and define the true label. */
9336 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9337 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9341 /* Here we get the first insn that was just emitted. It used to be the
9342 case that, on some machines, emitting the branch would discard
9343 the previous compare insn and emit a replacement. This isn't
9344 done anymore, but abort if we see that PREV is deleted. */
9347 insn = get_insns ();
9348 else if (INSN_DELETED_P (prev))
9351 insn = NEXT_INSN (prev);
9353 for (; insn; insn = NEXT_INSN (insn))
9354 if (GET_CODE (insn) == JUMP_INSN)
9361 if (branch != get_last_insn ())
9364 JUMP_LABEL (branch) = if_false_label;
9365 if (! invert_jump (branch, if_false_label))
9367 if_true_label = gen_label_rtx ();
9368 redirect_jump (branch, if_true_label);
9369 emit_jump (if_false_label);
9370 emit_label (if_true_label);
9375 /* Generate code for a comparison expression EXP
9376 (including code to compute the values to be compared)
9377 and set (CC0) according to the result.
9378 SIGNED_CODE should be the rtx operation for this comparison for
9379 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9381 We force a stack adjustment unless there are currently
9382 things pushed on the stack that aren't yet used. */
9385 compare (exp, signed_code, unsigned_code)
9387 enum rtx_code signed_code, unsigned_code;
9390 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9392 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9393 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9394 register enum machine_mode mode = TYPE_MODE (type);
9395 int unsignedp = TREE_UNSIGNED (type);
9396 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9398 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9400 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9401 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9404 /* Like compare but expects the values to compare as two rtx's.
9405 The decision as to signed or unsigned comparison must be made by the caller.
9407 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9410 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9411 size of MODE should be used. */
9414 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9415 register rtx op0, op1;
9418 enum machine_mode mode;
9424 /* If one operand is constant, make it the second one. Only do this
9425 if the other operand is not constant as well. */
9427 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9428 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9433 code = swap_condition (code);
9438 op0 = force_not_mem (op0);
9439 op1 = force_not_mem (op1);
9442 do_pending_stack_adjust ();
9444 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9445 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9449 /* There's no need to do this now that combine.c can eliminate lots of
9450 sign extensions. This can be less efficient in certain cases on other
9453 /* If this is a signed equality comparison, we can do it as an
9454 unsigned comparison since zero-extension is cheaper than sign
9455 extension and comparisons with zero are done as unsigned. This is
9456 the case even on machines that can do fast sign extension, since
9457 zero-extension is easier to combine with other operations than
9458 sign-extension is. If we are comparing against a constant, we must
9459 convert it to what it would look like unsigned. */
9460 if ((code == EQ || code == NE) && ! unsignedp
9461 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9463 if (GET_CODE (op1) == CONST_INT
9464 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9465 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9470 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9472 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9475 /* Generate code to calculate EXP using a store-flag instruction
9476 and return an rtx for the result. EXP is either a comparison
9477 or a TRUTH_NOT_EXPR whose operand is a comparison.
9479 If TARGET is nonzero, store the result there if convenient.
9481 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9484 Return zero if there is no suitable set-flag instruction
9485 available on this machine.
9487 Once expand_expr has been called on the arguments of the comparison,
9488 we are committed to doing the store flag, since it is not safe to
9489 re-evaluate the expression. We emit the store-flag insn by calling
9490 emit_store_flag, but only expand the arguments if we have a reason
9491 to believe that emit_store_flag will be successful. If we think that
9492 it will, but it isn't, we have to simulate the store-flag with a
9493 set/jump/set sequence. */
9496 do_store_flag (exp, target, mode, only_cheap)
9499 enum machine_mode mode;
9503 tree arg0, arg1, type;
9505 enum machine_mode operand_mode;
9509 enum insn_code icode;
9510 rtx subtarget = target;
9511 rtx result, label, pattern, jump_pat;
9513 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9514 result at the end. We can't simply invert the test since it would
9515 have already been inverted if it were valid. This case occurs for
9516 some floating-point comparisons. */
9518 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9519 invert = 1, exp = TREE_OPERAND (exp, 0);
9521 arg0 = TREE_OPERAND (exp, 0);
9522 arg1 = TREE_OPERAND (exp, 1);
9523 type = TREE_TYPE (arg0);
9524 operand_mode = TYPE_MODE (type);
9525 unsignedp = TREE_UNSIGNED (type);
9527 /* We won't bother with BLKmode store-flag operations because it would mean
9528 passing a lot of information to emit_store_flag. */
9529 if (operand_mode == BLKmode)
9535 /* Get the rtx comparison code to use. We know that EXP is a comparison
9536 operation of some type. Some comparisons against 1 and -1 can be
9537 converted to comparisons with zero. Do so here so that the tests
9538 below will be aware that we have a comparison with zero. These
9539 tests will not catch constants in the first operand, but constants
9540 are rarely passed as the first operand. */
9542 switch (TREE_CODE (exp))
9551 if (integer_onep (arg1))
9552 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9554 code = unsignedp ? LTU : LT;
9557 if (! unsignedp && integer_all_onesp (arg1))
9558 arg1 = integer_zero_node, code = LT;
9560 code = unsignedp ? LEU : LE;
9563 if (! unsignedp && integer_all_onesp (arg1))
9564 arg1 = integer_zero_node, code = GE;
9566 code = unsignedp ? GTU : GT;
9569 if (integer_onep (arg1))
9570 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9572 code = unsignedp ? GEU : GE;
9578 /* Put a constant second. */
9579 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9581 tem = arg0; arg0 = arg1; arg1 = tem;
9582 code = swap_condition (code);
9585 /* If this is an equality or inequality test of a single bit, we can
9586 do this by shifting the bit being tested to the low-order bit and
9587 masking the result with the constant 1. If the condition was EQ,
9588 we xor it with 1. This does not require an scc insn and is faster
9589 than an scc insn even if we have it. */
9591 if ((code == NE || code == EQ)
9592 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9593 && integer_pow2p (TREE_OPERAND (arg0, 1))
9594 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9596 tree inner = TREE_OPERAND (arg0, 0);
9597 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9598 NULL_RTX, VOIDmode, 0)));
9601 /* If INNER is a right shift of a constant and it plus BITNUM does
9602 not overflow, adjust BITNUM and INNER. */
9604 if (TREE_CODE (inner) == RSHIFT_EXPR
9605 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9606 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9607 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9608 < TYPE_PRECISION (type)))
9610 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9611 inner = TREE_OPERAND (inner, 0);
9614 /* If we are going to be able to omit the AND below, we must do our
9615 operations as unsigned. If we must use the AND, we have a choice.
9616 Normally unsigned is faster, but for some machines signed is. */
9617 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9618 #ifdef LOAD_EXTEND_OP
9619 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9625 if (subtarget == 0 || GET_CODE (subtarget) != REG
9626 || GET_MODE (subtarget) != operand_mode
9627 || ! safe_from_p (subtarget, inner))
9630 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9633 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9634 size_int (bitnum), subtarget, ops_unsignedp);
9636 if (GET_MODE (op0) != mode)
9637 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9639 if ((code == EQ && ! invert) || (code == NE && invert))
9640 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9641 ops_unsignedp, OPTAB_LIB_WIDEN);
9643 /* Put the AND last so it can combine with more things. */
9644 if (bitnum != TYPE_PRECISION (type) - 1)
9645 op0 = expand_and (op0, const1_rtx, subtarget);
9650 /* Now see if we are likely to be able to do this. Return if not. */
9651 if (! can_compare_p (operand_mode))
9653 icode = setcc_gen_code[(int) code];
9654 if (icode == CODE_FOR_nothing
9655 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9657 /* We can only do this if it is one of the special cases that
9658 can be handled without an scc insn. */
9659 if ((code == LT && integer_zerop (arg1))
9660 || (! only_cheap && code == GE && integer_zerop (arg1)))
9662 else if (BRANCH_COST >= 0
9663 && ! only_cheap && (code == NE || code == EQ)
9664 && TREE_CODE (type) != REAL_TYPE
9665 && ((abs_optab->handlers[(int) operand_mode].insn_code
9666 != CODE_FOR_nothing)
9667 || (ffs_optab->handlers[(int) operand_mode].insn_code
9668 != CODE_FOR_nothing)))
9674 preexpand_calls (exp);
9675 if (subtarget == 0 || GET_CODE (subtarget) != REG
9676 || GET_MODE (subtarget) != operand_mode
9677 || ! safe_from_p (subtarget, arg1))
9680 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9681 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9684 target = gen_reg_rtx (mode);
9686 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9687 because, if the emit_store_flag does anything it will succeed and
9688 OP0 and OP1 will not be used subsequently. */
9690 result = emit_store_flag (target, code,
9691 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9692 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9693 operand_mode, unsignedp, 1);
9698 result = expand_binop (mode, xor_optab, result, const1_rtx,
9699 result, 0, OPTAB_LIB_WIDEN);
9703 /* If this failed, we have to do this with set/compare/jump/set code. */
9704 if (target == 0 || GET_CODE (target) != REG
9705 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9706 target = gen_reg_rtx (GET_MODE (target));
9708 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9709 result = compare_from_rtx (op0, op1, code, unsignedp,
9710 operand_mode, NULL_RTX, 0);
9711 if (GET_CODE (result) == CONST_INT)
9712 return (((result == const0_rtx && ! invert)
9713 || (result != const0_rtx && invert))
9714 ? const0_rtx : const1_rtx);
9716 label = gen_label_rtx ();
9717 if (bcc_gen_fctn[(int) code] == 0)
9720 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9721 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9727 /* Generate a tablejump instruction (used for switch statements). */
9729 #ifdef HAVE_tablejump
9731 /* INDEX is the value being switched on, with the lowest value
9732 in the table already subtracted.
9733 MODE is its expected mode (needed if INDEX is constant).
9734 RANGE is the length of the jump table.
9735 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9737 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9738 index value is out of range. */
9741 do_tablejump (index, mode, range, table_label, default_label)
9742 rtx index, range, table_label, default_label;
9743 enum machine_mode mode;
9745 register rtx temp, vector;
9747 /* Do an unsigned comparison (in the proper mode) between the index
9748 expression and the value which represents the length of the range.
9749 Since we just finished subtracting the lower bound of the range
9750 from the index expression, this comparison allows us to simultaneously
9751 check that the original index expression value is both greater than
9752 or equal to the minimum value of the range and less than or equal to
9753 the maximum value of the range. */
9755 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9756 emit_jump_insn (gen_bgtu (default_label));
9758 /* If index is in range, it must fit in Pmode.
9759 Convert to Pmode so we can index with it. */
9761 index = convert_to_mode (Pmode, index, 1);
9763 /* Don't let a MEM slip thru, because then INDEX that comes
9764 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9765 and break_out_memory_refs will go to work on it and mess it up. */
9766 #ifdef PIC_CASE_VECTOR_ADDRESS
9767 if (flag_pic && GET_CODE (index) != REG)
9768 index = copy_to_mode_reg (Pmode, index);
9771 /* If flag_force_addr were to affect this address
9772 it could interfere with the tricky assumptions made
9773 about addresses that contain label-refs,
9774 which may be valid only very near the tablejump itself. */
9775 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9776 GET_MODE_SIZE, because this indicates how large insns are. The other
9777 uses should all be Pmode, because they are addresses. This code
9778 could fail if addresses and insns are not the same size. */
9779 index = gen_rtx (PLUS, Pmode,
9780 gen_rtx (MULT, Pmode, index,
9781 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9782 gen_rtx (LABEL_REF, Pmode, table_label));
9783 #ifdef PIC_CASE_VECTOR_ADDRESS
9785 index = PIC_CASE_VECTOR_ADDRESS (index);
9788 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9789 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9790 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9791 RTX_UNCHANGING_P (vector) = 1;
9792 convert_move (temp, vector, 0);
9794 emit_jump_insn (gen_tablejump (temp, table_label));
9796 #ifndef CASE_VECTOR_PC_RELATIVE
9797 /* If we are generating PIC code or if the table is PC-relative, the
9798 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9804 #endif /* HAVE_tablejump */
9807 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9808 to that value is on the top of the stack. The resulting type is TYPE, and
9809 the source declaration is DECL. */
9812 bc_load_memory (type, decl)
9815 enum bytecode_opcode opcode;
9818 /* Bit fields are special. We only know about signed and
9819 unsigned ints, and enums. The latter are treated as
9822 if (DECL_BIT_FIELD (decl))
9823 if (TREE_CODE (type) == ENUMERAL_TYPE
9824 || TREE_CODE (type) == INTEGER_TYPE)
9825 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9829 /* See corresponding comment in bc_store_memory(). */
9830 if (TYPE_MODE (type) == BLKmode
9831 || TYPE_MODE (type) == VOIDmode)
9834 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9836 if (opcode == neverneverland)
9839 bc_emit_bytecode (opcode);
9841 #ifdef DEBUG_PRINT_CODE
9842 fputc ('\n', stderr);
9847 /* Store the contents of the second stack slot to the address in the
9848 top stack slot. DECL is the declaration of the destination and is used
9849 to determine whether we're dealing with a bitfield. */
9852 bc_store_memory (type, decl)
9855 enum bytecode_opcode opcode;
9858 if (DECL_BIT_FIELD (decl))
9860 if (TREE_CODE (type) == ENUMERAL_TYPE
9861 || TREE_CODE (type) == INTEGER_TYPE)
9867 if (TYPE_MODE (type) == BLKmode)
9869 /* Copy structure. This expands to a block copy instruction, storeBLK.
9870 In addition to the arguments expected by the other store instructions,
9871 it also expects a type size (SImode) on top of the stack, which is the
9872 structure size in size units (usually bytes). The two first arguments
9873 are already on the stack; so we just put the size on level 1. For some
9874 other languages, the size may be variable, this is why we don't encode
9875 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9877 bc_expand_expr (TYPE_SIZE (type));
9881 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9883 if (opcode == neverneverland)
9886 bc_emit_bytecode (opcode);
9888 #ifdef DEBUG_PRINT_CODE
9889 fputc ('\n', stderr);
9894 /* Allocate local stack space sufficient to hold a value of the given
9895 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9896 integral power of 2. A special case is locals of type VOID, which
9897 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9898 remapped into the corresponding attribute of SI. */
9901 bc_allocate_local (size, alignment)
9902 int size, alignment;
9910 /* Normalize size and alignment */
9912 size = UNITS_PER_WORD;
9914 if (alignment < BITS_PER_UNIT)
9915 byte_alignment = 1 << (INT_ALIGN - 1);
9918 byte_alignment = alignment / BITS_PER_UNIT;
9920 if (local_vars_size & (byte_alignment - 1))
9921 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9923 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9924 local_vars_size += size;
9930 /* Allocate variable-sized local array. Variable-sized arrays are
9931 actually pointers to the address in memory where they are stored. */
9934 bc_allocate_variable_array (size)
9938 const int ptralign = (1 << (PTR_ALIGN - 1));
9941 if (local_vars_size & ptralign)
9942 local_vars_size += ptralign - (local_vars_size & ptralign);
9944 /* Note down local space needed: pointer to block; also return
9947 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9948 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9953 /* Push the machine address for the given external variable offset. */
9955 bc_load_externaddr (externaddr)
9958 bc_emit_bytecode (constP);
9959 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9960 BYTECODE_BC_LABEL (externaddr)->offset);
9962 #ifdef DEBUG_PRINT_CODE
9963 fputc ('\n', stderr);
9972 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9978 /* Like above, but expects an IDENTIFIER. */
9980 bc_load_externaddr_id (id, offset)
9984 if (!IDENTIFIER_POINTER (id))
9987 bc_emit_bytecode (constP);
9988 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9990 #ifdef DEBUG_PRINT_CODE
9991 fputc ('\n', stderr);
9996 /* Push the machine address for the given local variable offset. */
9998 bc_load_localaddr (localaddr)
10001 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10005 /* Push the machine address for the given parameter offset.
10006 NOTE: offset is in bits. */
10008 bc_load_parmaddr (parmaddr)
10011 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10016 /* Convert a[i] into *(a + i). */
10018 bc_canonicalize_array_ref (exp)
10021 tree type = TREE_TYPE (exp);
10022 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10023 TREE_OPERAND (exp, 0));
10024 tree index = TREE_OPERAND (exp, 1);
10027 /* Convert the integer argument to a type the same size as a pointer
10028 so the multiply won't overflow spuriously. */
10030 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10031 index = convert (type_for_size (POINTER_SIZE, 0), index);
10033 /* The array address isn't volatile even if the array is.
10034 (Of course this isn't terribly relevant since the bytecode
10035 translator treats nearly everything as volatile anyway.) */
10036 TREE_THIS_VOLATILE (array_adr) = 0;
10038 return build1 (INDIRECT_REF, type,
10039 fold (build (PLUS_EXPR,
10040 TYPE_POINTER_TO (type),
10042 fold (build (MULT_EXPR,
10043 TYPE_POINTER_TO (type),
10045 size_in_bytes (type))))));
10049 /* Load the address of the component referenced by the given
10050 COMPONENT_REF expression.
10052 Returns innermost lvalue. */
10055 bc_expand_component_address (exp)
10059 enum machine_mode mode;
10061 HOST_WIDE_INT SIval;
10064 tem = TREE_OPERAND (exp, 1);
10065 mode = DECL_MODE (tem);
10068 /* Compute cumulative bit offset for nested component refs
10069 and array refs, and find the ultimate containing object. */
10071 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10073 if (TREE_CODE (tem) == COMPONENT_REF)
10074 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10076 if (TREE_CODE (tem) == ARRAY_REF
10077 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10078 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10080 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10081 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10082 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10087 bc_expand_expr (tem);
10090 /* For bitfields also push their offset and size */
10091 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10092 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10094 if (SIval = bitpos / BITS_PER_UNIT)
10095 bc_emit_instruction (addconstPSI, SIval);
10097 return (TREE_OPERAND (exp, 1));
10101 /* Emit code to push two SI constants */
10103 bc_push_offset_and_size (offset, size)
10104 HOST_WIDE_INT offset, size;
10106 bc_emit_instruction (constSI, offset);
10107 bc_emit_instruction (constSI, size);
10111 /* Emit byte code to push the address of the given lvalue expression to
10112 the stack. If it's a bit field, we also push offset and size info.
10114 Returns innermost component, which allows us to determine not only
10115 its type, but also whether it's a bitfield. */
10118 bc_expand_address (exp)
10122 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10126 switch (TREE_CODE (exp))
10130 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10132 case COMPONENT_REF:
10134 return (bc_expand_component_address (exp));
10138 bc_expand_expr (TREE_OPERAND (exp, 0));
10140 /* For variable-sized types: retrieve pointer. Sometimes the
10141 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10142 also make sure we have an operand, just in case... */
10144 if (TREE_OPERAND (exp, 0)
10145 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10146 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10147 bc_emit_instruction (loadP);
10149 /* If packed, also return offset and size */
10150 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10152 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10153 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10155 return (TREE_OPERAND (exp, 0));
10157 case FUNCTION_DECL:
10159 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10160 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10165 bc_load_parmaddr (DECL_RTL (exp));
10167 /* For variable-sized types: retrieve pointer */
10168 if (TYPE_SIZE (TREE_TYPE (exp))
10169 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10170 bc_emit_instruction (loadP);
10172 /* If packed, also return offset and size */
10173 if (DECL_BIT_FIELD (exp))
10174 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10175 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10181 bc_emit_instruction (returnP);
10187 if (BYTECODE_LABEL (DECL_RTL (exp)))
10188 bc_load_externaddr (DECL_RTL (exp));
10191 if (DECL_EXTERNAL (exp))
10192 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10193 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10195 bc_load_localaddr (DECL_RTL (exp));
10197 /* For variable-sized types: retrieve pointer */
10198 if (TYPE_SIZE (TREE_TYPE (exp))
10199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10200 bc_emit_instruction (loadP);
10202 /* If packed, also return offset and size */
10203 if (DECL_BIT_FIELD (exp))
10204 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10205 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10213 bc_emit_bytecode (constP);
10214 r = output_constant_def (exp);
10215 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10217 #ifdef DEBUG_PRINT_CODE
10218 fputc ('\n', stderr);
10229 /* Most lvalues don't have components. */
10234 /* Emit a type code to be used by the runtime support in handling
10235 parameter passing. The type code consists of the machine mode
10236 plus the minimal alignment shifted left 8 bits. */
10239 bc_runtime_type_code (type)
10244 switch (TREE_CODE (type))
10250 case ENUMERAL_TYPE:
10254 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10266 return build_int_2 (val, 0);
10270 /* Generate constructor label */
10272 bc_gen_constr_label ()
10274 static int label_counter;
10275 static char label[20];
10277 sprintf (label, "*LR%d", label_counter++);
10279 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10283 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10284 expand the constructor data as static data, and push a pointer to it.
10285 The pointer is put in the pointer table and is retrieved by a constP
10286 bytecode instruction. We then loop and store each constructor member in
10287 the corresponding component. Finally, we return the original pointer on
10291 bc_expand_constructor (constr)
10295 HOST_WIDE_INT ptroffs;
10299 /* Literal constructors are handled as constants, whereas
10300 non-literals are evaluated and stored element by element
10301 into the data segment. */
10303 /* Allocate space in proper segment and push pointer to space on stack.
10306 l = bc_gen_constr_label ();
10308 if (TREE_CONSTANT (constr))
10312 bc_emit_const_labeldef (l);
10313 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10319 bc_emit_data_labeldef (l);
10320 bc_output_data_constructor (constr);
10324 /* Add reference to pointer table and recall pointer to stack;
10325 this code is common for both types of constructors: literals
10326 and non-literals. */
10328 ptroffs = bc_define_pointer (l);
10329 bc_emit_instruction (constP, ptroffs);
10331 /* This is all that has to be done if it's a literal. */
10332 if (TREE_CONSTANT (constr))
10336 /* At this point, we have the pointer to the structure on top of the stack.
10337 Generate sequences of store_memory calls for the constructor. */
10339 /* constructor type is structure */
10340 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10344 /* If the constructor has fewer fields than the structure,
10345 clear the whole structure first. */
10347 if (list_length (CONSTRUCTOR_ELTS (constr))
10348 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10350 bc_emit_instruction (duplicate);
10351 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10352 bc_emit_instruction (clearBLK);
10355 /* Store each element of the constructor into the corresponding
10356 field of TARGET. */
10358 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10360 register tree field = TREE_PURPOSE (elt);
10361 register enum machine_mode mode;
10366 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10367 mode = DECL_MODE (field);
10368 unsignedp = TREE_UNSIGNED (field);
10370 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10372 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10373 /* The alignment of TARGET is
10374 at least what its type requires. */
10376 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10377 int_size_in_bytes (TREE_TYPE (constr)));
10382 /* Constructor type is array */
10383 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10387 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10388 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10389 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10390 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10392 /* If the constructor has fewer fields than the structure,
10393 clear the whole structure first. */
10395 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10397 bc_emit_instruction (duplicate);
10398 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10399 bc_emit_instruction (clearBLK);
10403 /* Store each element of the constructor into the corresponding
10404 element of TARGET, determined by counting the elements. */
10406 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10408 elt = TREE_CHAIN (elt), i++)
10410 register enum machine_mode mode;
10415 mode = TYPE_MODE (elttype);
10416 bitsize = GET_MODE_BITSIZE (mode);
10417 unsignedp = TREE_UNSIGNED (elttype);
10419 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10420 /* * TYPE_SIZE_UNIT (elttype) */ );
10422 bc_store_field (elt, bitsize, bitpos, mode,
10423 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10424 /* The alignment of TARGET is
10425 at least what its type requires. */
10427 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10428 int_size_in_bytes (TREE_TYPE (constr)));
10435 /* Store the value of EXP (an expression tree) into member FIELD of
10436 structure at address on stack, which has type TYPE, mode MODE and
10437 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10440 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10441 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10444 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10445 value_mode, unsignedp, align, total_size)
10446 int bitsize, bitpos;
10447 enum machine_mode mode;
10448 tree field, exp, type;
10449 enum machine_mode value_mode;
10455 /* Expand expression and copy pointer */
10456 bc_expand_expr (exp);
10457 bc_emit_instruction (over);
10460 /* If the component is a bit field, we cannot use addressing to access
10461 it. Use bit-field techniques to store in it. */
10463 if (DECL_BIT_FIELD (field))
10465 bc_store_bit_field (bitpos, bitsize, unsignedp);
10469 /* Not bit field */
10471 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10473 /* Advance pointer to the desired member */
10475 bc_emit_instruction (addconstPSI, offset);
10478 bc_store_memory (type, field);
10483 /* Store SI/SU in bitfield */
10485 bc_store_bit_field (offset, size, unsignedp)
10486 int offset, size, unsignedp;
10488 /* Push bitfield offset and size */
10489 bc_push_offset_and_size (offset, size);
10492 bc_emit_instruction (sstoreBI);
10496 /* Load SI/SU from bitfield */
10498 bc_load_bit_field (offset, size, unsignedp)
10499 int offset, size, unsignedp;
10501 /* Push bitfield offset and size */
10502 bc_push_offset_and_size (offset, size);
10504 /* Load: sign-extend if signed, else zero-extend */
10505 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10509 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10510 (adjust stack pointer upwards), negative means add that number of
10511 levels (adjust the stack pointer downwards). Only positive values
10512 normally make sense. */
10515 bc_adjust_stack (nlevels)
10524 bc_emit_instruction (drop);
10527 bc_emit_instruction (drop);
10532 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10533 stack_depth -= nlevels;
10536 #if defined (VALIDATE_STACK_FOR_BC)
10537 VALIDATE_STACK_FOR_BC ();