1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "insn-flags.h"
30 #include "insn-codes.h"
32 #include "insn-config.h"
35 #include "typeclass.h"
38 #include "bc-opcode.h"
39 #include "bc-typecd.h"
44 #define CEIL(x,y) (((x) + (y) - 1) / (y))
46 /* Decide whether a function's arguments should be processed
47 from first to last or from last to first.
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
54 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
55 #define PUSH_ARGS_REVERSED /* If it's last to first */
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
69 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
71 /* If this is nonzero, we do not bother generating VOLATILE
72 around volatile memory references, and we are willing to
73 output indirect addresses. If cse is to follow, we reject
74 indirect addresses so a useful potential cse is generated;
75 if it is used only once, instruction combination will produce
76 the same indirect address eventually. */
79 /* Nonzero to generate code for all the subroutines within an
80 expression before generating the upper levels of the expression.
81 Nowadays this is never zero. */
82 int do_preexpand_calls = 1;
84 /* Number of units that we should eventually pop off the stack.
85 These are the arguments to function calls that have already returned. */
86 int pending_stack_adjust;
88 /* Nonzero means stack pops must not be deferred, and deferred stack
89 pops must not be output. It is nonzero inside a function call,
90 inside a conditional expression, inside a statement expression,
91 and in other cases as well. */
92 int inhibit_defer_pop;
94 /* A list of all cleanups which belong to the arguments of
95 function calls being expanded by expand_call. */
96 tree cleanups_this_call;
98 /* When temporaries are created by TARGET_EXPRs, they are created at
99 this level of temp_slot_level, so that they can remain allocated
100 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
102 int target_temp_slot_level;
104 /* Nonzero means __builtin_saveregs has already been done in this function.
105 The value is the pseudoreg containing the value __builtin_saveregs
107 static rtx saveregs_value;
109 /* Similarly for __builtin_apply_args. */
110 static rtx apply_args_value;
112 /* This structure is used by move_by_pieces to describe the move to
115 struct move_by_pieces
124 int explicit_inc_from;
130 /* Used to generate bytecodes: keep track of size of local variables,
131 as well as depth of arithmetic stack. (Notice that variables are
132 stored on the machine's stack, not the arithmetic stack.) */
134 extern int local_vars_size;
135 extern int stack_depth;
136 extern int max_stack_depth;
137 extern struct obstack permanent_obstack;
140 static rtx enqueue_insn PROTO((rtx, rtx));
141 static int queued_subexp_p PROTO((rtx));
142 static void init_queue PROTO((void));
143 static void move_by_pieces PROTO((rtx, rtx, int, int));
144 static int move_by_pieces_ninsns PROTO((unsigned int, int));
145 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
146 struct move_by_pieces *));
147 static void store_constructor PROTO((tree, rtx));
148 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
149 enum machine_mode, int, int, int));
150 static int get_inner_unaligned_p PROTO((tree));
151 static tree save_noncopied_parts PROTO((tree, tree));
152 static tree init_noncopied_parts PROTO((tree, tree));
153 static int safe_from_p PROTO((rtx, tree));
154 static int fixed_type_p PROTO((tree));
155 static int get_pointer_alignment PROTO((tree, unsigned));
156 static tree string_constant PROTO((tree, tree *));
157 static tree c_strlen PROTO((tree));
158 static rtx expand_builtin PROTO((tree, rtx, rtx,
159 enum machine_mode, int));
160 static int apply_args_size PROTO((void));
161 static int apply_result_size PROTO((void));
162 static rtx result_vector PROTO((int, rtx));
163 static rtx expand_builtin_apply_args PROTO((void));
164 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
165 static void expand_builtin_return PROTO((rtx));
166 static rtx expand_increment PROTO((tree, int));
167 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
168 tree bc_runtime_type_code PROTO((tree));
169 rtx bc_allocate_local PROTO((int, int));
170 void bc_store_memory PROTO((tree, tree));
171 tree bc_expand_component_address PROTO((tree));
172 tree bc_expand_address PROTO((tree));
173 void bc_expand_constructor PROTO((tree));
174 void bc_adjust_stack PROTO((int));
175 tree bc_canonicalize_array_ref PROTO((tree));
176 void bc_load_memory PROTO((tree, tree));
177 void bc_load_externaddr PROTO((rtx));
178 void bc_load_externaddr_id PROTO((tree, int));
179 void bc_load_localaddr PROTO((rtx));
180 void bc_load_parmaddr PROTO((rtx));
181 static void preexpand_calls PROTO((tree));
182 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
183 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
184 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
185 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
186 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
187 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
188 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
189 static tree defer_cleanups_to PROTO((tree));
190 extern void (*interim_eh_hook) PROTO((tree));
192 /* Record for each mode whether we can move a register directly to or
193 from an object of that mode in memory. If we can't, we won't try
194 to use that mode directly when accessing a field of that mode. */
196 static char direct_load[NUM_MACHINE_MODES];
197 static char direct_store[NUM_MACHINE_MODES];
199 /* MOVE_RATIO is the number of move instructions that is better than
203 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
206 /* A value of around 6 would minimize code size; infinity would minimize
208 #define MOVE_RATIO 15
212 /* This array records the insn_code of insns to perform block moves. */
213 enum insn_code movstr_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS 0
221 /* Register mappings for target machines without register windows. */
222 #ifndef INCOMING_REGNO
223 #define INCOMING_REGNO(OUT) (OUT)
225 #ifndef OUTGOING_REGNO
226 #define OUTGOING_REGNO(IN) (IN)
229 /* Maps used to convert modes to const, load, and store bytecodes. */
230 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
231 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
232 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
234 /* Initialize maps used to convert modes to const, load, and store
237 bc_init_mode_to_opcode_maps ()
241 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
242 mode_to_const_map[mode] =
243 mode_to_load_map[mode] =
244 mode_to_store_map[mode] = neverneverland;
246 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
247 mode_to_const_map[(int) SYM] = CONST; \
248 mode_to_load_map[(int) SYM] = LOAD; \
249 mode_to_store_map[(int) SYM] = STORE;
251 #include "modemap.def"
255 /* This is run once per compilation to set up which modes can be used
256 directly in memory and to initialize the block move optab. */
262 enum machine_mode mode;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
267 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
270 insn = emit_insn (gen_rtx (SET, 0, 0));
271 pat = PATTERN (insn);
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
280 direct_load[(int) mode] = direct_store[(int) mode] = 0;
281 PUT_MODE (mem, mode);
282 PUT_MODE (mem1, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 reg = gen_rtx (REG, mode, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
322 /* This is run at the start of compiling a function. */
329 pending_stack_adjust = 0;
330 inhibit_defer_pop = 0;
331 cleanups_this_call = 0;
333 apply_args_value = 0;
337 /* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
344 /* Instead of saving the postincrement queue, empty it. */
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
349 p->cleanups_this_call = cleanups_this_call;
350 p->saveregs_value = saveregs_value;
351 p->apply_args_value = apply_args_value;
352 p->forced_labels = forced_labels;
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
356 cleanups_this_call = 0;
358 apply_args_value = 0;
362 /* Restore all variables describing the current status from the structure *P.
363 This is used after a nested function. */
366 restore_expr_status (p)
369 pending_stack_adjust = p->pending_stack_adjust;
370 inhibit_defer_pop = p->inhibit_defer_pop;
371 cleanups_this_call = p->cleanups_this_call;
372 saveregs_value = p->saveregs_value;
373 apply_args_value = p->apply_args_value;
374 forced_labels = p->forced_labels;
377 /* Manage the queue of increment instructions to be output
378 for POSTINCREMENT_EXPR expressions, etc. */
380 static rtx pending_chain;
382 /* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
390 enqueue_insn (var, body)
393 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body, pending_chain);
395 return pending_chain;
398 /* Use protect_from_queue to convert a QUEUED expression
399 into something that you can put immediately into an instruction.
400 If the queued incrementation has not happened yet,
401 protect_from_queue returns the variable itself.
402 If the incrementation has happened, protect_from_queue returns a temp
403 that contains a copy of the old value of the variable.
405 Any time an rtx which might possibly be a QUEUED is to be put
406 into an instruction, it must be passed through protect_from_queue first.
407 QUEUED expressions are not meaningful in instructions.
409 Do not pass a value through protect_from_queue and then hold
410 on to it for a while before putting it in an instruction!
411 If the queue is flushed in between, incorrect code will result. */
414 protect_from_queue (x, modify)
418 register RTX_CODE code = GET_CODE (x);
420 #if 0 /* A QUEUED can hang around after the queue is forced out. */
421 /* Shortcut for most common case. */
422 if (pending_chain == 0)
428 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
429 use of autoincrement. Make a copy of the contents of the memory
430 location rather than a copy of the address, but not if the value is
431 of mode BLKmode. Don't modify X in place since it might be
433 if (code == MEM && GET_MODE (x) != BLKmode
434 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 register rtx y = XEXP (x, 0);
437 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
441 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
463 else if (code == PLUS || code == MULT)
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 return QUEUED_COPY (x);
491 /* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
500 register enum rtx_code code = GET_CODE (x);
506 return queued_subexp_p (XEXP (x, 0));
510 return queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1));
516 /* Perform all the pending incrementations. */
522 while (p = pending_chain)
524 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
525 pending_chain = QUEUED_NEXT (p);
536 /* Copy data from FROM to TO, where the machine modes are not the same.
537 Both modes may be integer, or both may be floating.
538 UNSIGNEDP should be nonzero if FROM is an unsigned type.
539 This causes zero-extension instead of sign-extension. */
542 convert_move (to, from, unsignedp)
543 register rtx to, from;
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
556 to = protect_from_queue (to, 1);
557 from = protect_from_queue (from, 0);
559 if (to_real != from_real)
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
586 #ifdef HAVE_extendqfhf2
587 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
589 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
593 #ifdef HAVE_extendqfsf2
594 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
596 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
600 #ifdef HAVE_extendqfdf2
601 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
603 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
607 #ifdef HAVE_extendqfxf2
608 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
610 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
614 #ifdef HAVE_extendqftf2
615 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
617 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
622 #ifdef HAVE_extendhftqf2
623 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
625 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
630 #ifdef HAVE_extendhfsf2
631 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
633 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
637 #ifdef HAVE_extendhfdf2
638 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
640 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
644 #ifdef HAVE_extendhfxf2
645 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
647 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
651 #ifdef HAVE_extendhftf2
652 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
654 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
659 #ifdef HAVE_extendsfdf2
660 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
662 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
666 #ifdef HAVE_extendsfxf2
667 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
669 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
673 #ifdef HAVE_extendsftf2
674 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
676 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
680 #ifdef HAVE_extenddfxf2
681 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
683 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
687 #ifdef HAVE_extenddftf2
688 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
690 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
695 #ifdef HAVE_trunchfqf2
696 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
698 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
702 #ifdef HAVE_truncsfqf2
703 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
705 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
709 #ifdef HAVE_truncdfqf2
710 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
712 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
716 #ifdef HAVE_truncxfqf2
717 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
719 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
723 #ifdef HAVE_trunctfqf2
724 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
726 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
731 #ifdef HAVE_trunctqfhf2
732 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
734 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
738 #ifdef HAVE_truncsfhf2
739 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
741 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
745 #ifdef HAVE_truncdfhf2
746 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
748 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
752 #ifdef HAVE_truncxfhf2
753 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
755 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
759 #ifdef HAVE_trunctfhf2
760 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
762 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
766 #ifdef HAVE_truncdfsf2
767 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
769 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
773 #ifdef HAVE_truncxfsf2
774 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
776 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
780 #ifdef HAVE_trunctfsf2
781 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
783 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
787 #ifdef HAVE_truncxfdf2
788 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
790 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
794 #ifdef HAVE_trunctfdf2
795 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
797 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
809 libcall = extendsfdf2_libfunc;
813 libcall = extendsfxf2_libfunc;
817 libcall = extendsftf2_libfunc;
826 libcall = truncdfsf2_libfunc;
830 libcall = extenddfxf2_libfunc;
834 libcall = extenddftf2_libfunc;
843 libcall = truncxfsf2_libfunc;
847 libcall = truncxfdf2_libfunc;
856 libcall = trunctfsf2_libfunc;
860 libcall = trunctfdf2_libfunc;
866 if (libcall == (rtx) 0)
867 /* This conversion is not implemented yet. */
870 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
872 emit_move_insn (to, value);
876 /* Now both modes are integers. */
878 /* Handle expanding beyond a word. */
879 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
880 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
887 enum machine_mode lowpart_mode;
888 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
890 /* Try converting directly if the insn is supported. */
891 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
894 /* If FROM is a SUBREG, put it into a register. Do this
895 so that we always generate the same set of insns for
896 better cse'ing; if an intermediate assignment occurred,
897 we won't be doing the operation directly on the SUBREG. */
898 if (optimize > 0 && GET_CODE (from) == SUBREG)
899 from = force_reg (from_mode, from);
900 emit_unop_insn (code, to, from, equiv_code);
903 /* Next, try converting via full word. */
904 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
905 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
906 != CODE_FOR_nothing))
908 if (GET_CODE (to) == REG)
909 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
910 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
911 emit_unop_insn (code, to,
912 gen_lowpart (word_mode, to), equiv_code);
916 /* No special multiword conversion insn; do it by hand. */
919 /* Get a copy of FROM widened to a word, if necessary. */
920 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
921 lowpart_mode = word_mode;
923 lowpart_mode = from_mode;
925 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
927 lowpart = gen_lowpart (lowpart_mode, to);
928 emit_move_insn (lowpart, lowfrom);
930 /* Compute the value to put in each remaining word. */
932 fill_value = const0_rtx;
937 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
938 && STORE_FLAG_VALUE == -1)
940 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
942 fill_value = gen_reg_rtx (word_mode);
943 emit_insn (gen_slt (fill_value));
949 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
950 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
952 fill_value = convert_to_mode (word_mode, fill_value, 1);
956 /* Fill the remaining words. */
957 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
959 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
960 rtx subword = operand_subword (to, index, 1, to_mode);
965 if (fill_value != subword)
966 emit_move_insn (subword, fill_value);
969 insns = get_insns ();
972 emit_no_conflict_block (insns, to, from, NULL_RTX,
973 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
977 /* Truncating multi-word to a word or less. */
978 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
979 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
981 if (!((GET_CODE (from) == MEM
982 && ! MEM_VOLATILE_P (from)
983 && direct_load[(int) to_mode]
984 && ! mode_dependent_address_p (XEXP (from, 0)))
985 || GET_CODE (from) == REG
986 || GET_CODE (from) == SUBREG))
987 from = force_reg (from_mode, from);
988 convert_move (to, gen_lowpart (word_mode, from), 0);
992 /* Handle pointer conversion */ /* SPEE 900220 */
993 if (to_mode == PSImode)
995 if (from_mode != SImode)
996 from = convert_to_mode (SImode, from, unsignedp);
998 #ifdef HAVE_truncsipsi2
999 if (HAVE_truncsipsi2)
1001 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1004 #endif /* HAVE_truncsipsi2 */
1008 if (from_mode == PSImode)
1010 if (to_mode != SImode)
1012 from = convert_to_mode (SImode, from, unsignedp);
1017 #ifdef HAVE_extendpsisi2
1018 if (HAVE_extendpsisi2)
1020 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1023 #endif /* HAVE_extendpsisi2 */
1028 if (to_mode == PDImode)
1030 if (from_mode != DImode)
1031 from = convert_to_mode (DImode, from, unsignedp);
1033 #ifdef HAVE_truncdipdi2
1034 if (HAVE_truncdipdi2)
1036 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1039 #endif /* HAVE_truncdipdi2 */
1043 if (from_mode == PDImode)
1045 if (to_mode != DImode)
1047 from = convert_to_mode (DImode, from, unsignedp);
1052 #ifdef HAVE_extendpdidi2
1053 if (HAVE_extendpdidi2)
1055 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1058 #endif /* HAVE_extendpdidi2 */
1063 /* Now follow all the conversions between integers
1064 no more than a word long. */
1066 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1067 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1068 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1069 GET_MODE_BITSIZE (from_mode)))
1071 if (!((GET_CODE (from) == MEM
1072 && ! MEM_VOLATILE_P (from)
1073 && direct_load[(int) to_mode]
1074 && ! mode_dependent_address_p (XEXP (from, 0)))
1075 || GET_CODE (from) == REG
1076 || GET_CODE (from) == SUBREG))
1077 from = force_reg (from_mode, from);
1078 emit_move_insn (to, gen_lowpart (to_mode, from));
1082 /* Handle extension. */
1083 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1085 /* Convert directly if that works. */
1086 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1087 != CODE_FOR_nothing)
1089 emit_unop_insn (code, to, from, equiv_code);
1094 enum machine_mode intermediate;
1096 /* Search for a mode to convert via. */
1097 for (intermediate = from_mode; intermediate != VOIDmode;
1098 intermediate = GET_MODE_WIDER_MODE (intermediate))
1099 if (((can_extend_p (to_mode, intermediate, unsignedp)
1100 != CODE_FOR_nothing)
1101 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1102 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1103 && (can_extend_p (intermediate, from_mode, unsignedp)
1104 != CODE_FOR_nothing))
1106 convert_move (to, convert_to_mode (intermediate, from,
1107 unsignedp), unsignedp);
1111 /* No suitable intermediate mode. */
1116 /* Support special truncate insns for certain modes. */
1118 if (from_mode == DImode && to_mode == SImode)
1120 #ifdef HAVE_truncdisi2
1121 if (HAVE_truncdisi2)
1123 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1131 if (from_mode == DImode && to_mode == HImode)
1133 #ifdef HAVE_truncdihi2
1134 if (HAVE_truncdihi2)
1136 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1144 if (from_mode == DImode && to_mode == QImode)
1146 #ifdef HAVE_truncdiqi2
1147 if (HAVE_truncdiqi2)
1149 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1157 if (from_mode == SImode && to_mode == HImode)
1159 #ifdef HAVE_truncsihi2
1160 if (HAVE_truncsihi2)
1162 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1170 if (from_mode == SImode && to_mode == QImode)
1172 #ifdef HAVE_truncsiqi2
1173 if (HAVE_truncsiqi2)
1175 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 if (from_mode == HImode && to_mode == QImode)
1185 #ifdef HAVE_trunchiqi2
1186 if (HAVE_trunchiqi2)
1188 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 if (from_mode == TImode && to_mode == DImode)
1198 #ifdef HAVE_trunctidi2
1199 if (HAVE_trunctidi2)
1201 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 if (from_mode == TImode && to_mode == SImode)
1211 #ifdef HAVE_trunctisi2
1212 if (HAVE_trunctisi2)
1214 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1218 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 if (from_mode == TImode && to_mode == HImode)
1224 #ifdef HAVE_trunctihi2
1225 if (HAVE_trunctihi2)
1227 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1231 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 if (from_mode == TImode && to_mode == QImode)
1237 #ifdef HAVE_trunctiqi2
1238 if (HAVE_trunctiqi2)
1240 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1244 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 /* Handle truncation of volatile memrefs, and so on;
1249 the things that couldn't be truncated directly,
1250 and for which there was no special instruction. */
1251 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1253 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1254 emit_move_insn (to, temp);
1258 /* Mode combination is not recognized. */
1262 /* Return an rtx for a value that would result
1263 from converting X to mode MODE.
1264 Both X and MODE may be floating, or both integer.
1265 UNSIGNEDP is nonzero if X is an unsigned value.
1266 This can be done by referring to a part of X in place
1267 or by copying to a new temporary with conversion.
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1273 convert_to_mode (mode, x, unsignedp)
1274 enum machine_mode mode;
1278 return convert_modes (mode, VOIDmode, x, unsignedp);
1281 /* Return an rtx for a value that would result
1282 from converting X from mode OLDMODE to mode MODE.
1283 Both modes may be floating, or both integer.
1284 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
1287 or by copying to a new temporary with conversion.
1289 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1291 This function *must not* call protect_from_queue
1292 except when putting X into an insn (in which case convert_move does it). */
1295 convert_modes (mode, oldmode, x, unsignedp)
1296 enum machine_mode mode, oldmode;
1302 /* If FROM is a SUBREG that indicates that we have already done at least
1303 the required extension, strip it. */
1305 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1306 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1307 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1308 x = gen_lowpart (mode, x);
1310 if (GET_MODE (x) != VOIDmode)
1311 oldmode = GET_MODE (x);
1313 if (mode == oldmode)
1316 /* There is one case that we must handle specially: If we are converting
1317 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1318 we are to interpret the constant as unsigned, gen_lowpart will do
1319 the wrong if the constant appears negative. What we want to do is
1320 make the high-order word of the constant zero, not all ones. */
1322 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1323 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1324 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1325 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1327 /* We can do this with a gen_lowpart if both desired and current modes
1328 are integer, and this is either a constant integer, a register, or a
1329 non-volatile MEM. Except for the constant case where MODE is no
1330 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1332 if ((GET_CODE (x) == CONST_INT
1333 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1334 || (GET_MODE_CLASS (mode) == MODE_INT
1335 && GET_MODE_CLASS (oldmode) == MODE_INT
1336 && (GET_CODE (x) == CONST_DOUBLE
1337 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1338 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1339 && direct_load[(int) mode])
1340 || (GET_CODE (x) == REG
1341 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1342 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1344 /* ?? If we don't know OLDMODE, we have to assume here that
1345 X does not need sign- or zero-extension. This may not be
1346 the case, but it's the best we can do. */
1347 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1348 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1350 HOST_WIDE_INT val = INTVAL (x);
1351 int width = GET_MODE_BITSIZE (oldmode);
1353 /* We must sign or zero-extend in this case. Start by
1354 zero-extending, then sign extend if we need to. */
1355 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1357 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1358 val |= (HOST_WIDE_INT) (-1) << width;
1360 return GEN_INT (val);
1363 return gen_lowpart (mode, x);
1366 temp = gen_reg_rtx (mode);
1367 convert_move (temp, x, unsignedp);
1371 /* Generate several move instructions to copy LEN bytes
1372 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1373 The caller must pass FROM and TO
1374 through protect_from_queue before calling.
1375 ALIGN (in bytes) is maximum alignment we can assume. */
1378 move_by_pieces (to, from, len, align)
1382 struct move_by_pieces data;
1383 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1384 int max_size = MOVE_MAX + 1;
1387 data.to_addr = to_addr;
1388 data.from_addr = from_addr;
1392 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1393 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1395 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1396 || GET_CODE (from_addr) == POST_INC
1397 || GET_CODE (from_addr) == POST_DEC);
1399 data.explicit_inc_from = 0;
1400 data.explicit_inc_to = 0;
1402 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1403 if (data.reverse) data.offset = len;
1406 /* If copying requires more than two move insns,
1407 copy addresses to registers (to make displacements shorter)
1408 and use post-increment if available. */
1409 if (!(data.autinc_from && data.autinc_to)
1410 && move_by_pieces_ninsns (len, align) > 2)
1412 #ifdef HAVE_PRE_DECREMENT
1413 if (data.reverse && ! data.autinc_from)
1415 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1416 data.autinc_from = 1;
1417 data.explicit_inc_from = -1;
1420 #ifdef HAVE_POST_INCREMENT
1421 if (! data.autinc_from)
1423 data.from_addr = copy_addr_to_reg (from_addr);
1424 data.autinc_from = 1;
1425 data.explicit_inc_from = 1;
1428 if (!data.autinc_from && CONSTANT_P (from_addr))
1429 data.from_addr = copy_addr_to_reg (from_addr);
1430 #ifdef HAVE_PRE_DECREMENT
1431 if (data.reverse && ! data.autinc_to)
1433 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1435 data.explicit_inc_to = -1;
1438 #ifdef HAVE_POST_INCREMENT
1439 if (! data.reverse && ! data.autinc_to)
1441 data.to_addr = copy_addr_to_reg (to_addr);
1443 data.explicit_inc_to = 1;
1446 if (!data.autinc_to && CONSTANT_P (to_addr))
1447 data.to_addr = copy_addr_to_reg (to_addr);
1450 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1451 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1454 /* First move what we can in the largest integer mode, then go to
1455 successively smaller modes. */
1457 while (max_size > 1)
1459 enum machine_mode mode = VOIDmode, tmode;
1460 enum insn_code icode;
1462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1464 if (GET_MODE_SIZE (tmode) < max_size)
1467 if (mode == VOIDmode)
1470 icode = mov_optab->handlers[(int) mode].insn_code;
1471 if (icode != CODE_FOR_nothing
1472 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1473 GET_MODE_SIZE (mode)))
1474 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1476 max_size = GET_MODE_SIZE (mode);
1479 /* The code above should have handled everything. */
1484 /* Return number of insns required to move L bytes by pieces.
1485 ALIGN (in bytes) is maximum alignment we can assume. */
1488 move_by_pieces_ninsns (l, align)
1492 register int n_insns = 0;
1493 int max_size = MOVE_MAX + 1;
1495 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1496 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1499 while (max_size > 1)
1501 enum machine_mode mode = VOIDmode, tmode;
1502 enum insn_code icode;
1504 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1505 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1506 if (GET_MODE_SIZE (tmode) < max_size)
1509 if (mode == VOIDmode)
1512 icode = mov_optab->handlers[(int) mode].insn_code;
1513 if (icode != CODE_FOR_nothing
1514 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1515 GET_MODE_SIZE (mode)))
1516 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1518 max_size = GET_MODE_SIZE (mode);
1524 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1525 with move instructions for mode MODE. GENFUN is the gen_... function
1526 to make a move insn for that mode. DATA has all the other info. */
1529 move_by_pieces_1 (genfun, mode, data)
1531 enum machine_mode mode;
1532 struct move_by_pieces *data;
1534 register int size = GET_MODE_SIZE (mode);
1535 register rtx to1, from1;
1537 while (data->len >= size)
1539 if (data->reverse) data->offset -= size;
1541 to1 = (data->autinc_to
1542 ? gen_rtx (MEM, mode, data->to_addr)
1543 : change_address (data->to, mode,
1544 plus_constant (data->to_addr, data->offset)));
1547 ? gen_rtx (MEM, mode, data->from_addr)
1548 : change_address (data->from, mode,
1549 plus_constant (data->from_addr, data->offset)));
1551 #ifdef HAVE_PRE_DECREMENT
1552 if (data->explicit_inc_to < 0)
1553 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1554 if (data->explicit_inc_from < 0)
1555 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1558 emit_insn ((*genfun) (to1, from1));
1559 #ifdef HAVE_POST_INCREMENT
1560 if (data->explicit_inc_to > 0)
1561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1562 if (data->explicit_inc_from > 0)
1563 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1566 if (! data->reverse) data->offset += size;
1572 /* Emit code to move a block Y to a block X.
1573 This may be done with string-move instructions,
1574 with multiple scalar move instructions, or with a library call.
1576 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1578 SIZE is an rtx that says how long they are.
1579 ALIGN is the maximum alignment we can assume they have,
1580 measured in bytes. */
1583 emit_block_move (x, y, size, align)
1588 if (GET_MODE (x) != BLKmode)
1591 if (GET_MODE (y) != BLKmode)
1594 x = protect_from_queue (x, 1);
1595 y = protect_from_queue (y, 0);
1596 size = protect_from_queue (size, 0);
1598 if (GET_CODE (x) != MEM)
1600 if (GET_CODE (y) != MEM)
1605 if (GET_CODE (size) == CONST_INT
1606 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1607 move_by_pieces (x, y, INTVAL (size), align);
1610 /* Try the most limited insn first, because there's no point
1611 including more than one in the machine description unless
1612 the more limited one has some advantage. */
1614 rtx opalign = GEN_INT (align);
1615 enum machine_mode mode;
1617 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1618 mode = GET_MODE_WIDER_MODE (mode))
1620 enum insn_code code = movstr_optab[(int) mode];
1622 if (code != CODE_FOR_nothing
1623 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1624 here because if SIZE is less than the mode mask, as it is
1625 returned by the macro, it will definitely be less than the
1626 actual mode mask. */
1627 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1628 && (insn_operand_predicate[(int) code][0] == 0
1629 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1630 && (insn_operand_predicate[(int) code][1] == 0
1631 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1632 && (insn_operand_predicate[(int) code][3] == 0
1633 || (*insn_operand_predicate[(int) code][3]) (opalign,
1637 rtx last = get_last_insn ();
1640 op2 = convert_to_mode (mode, size, 1);
1641 if (insn_operand_predicate[(int) code][2] != 0
1642 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1643 op2 = copy_to_mode_reg (mode, op2);
1645 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1652 delete_insns_since (last);
1656 #ifdef TARGET_MEM_FUNCTIONS
1657 emit_library_call (memcpy_libfunc, 0,
1658 VOIDmode, 3, XEXP (x, 0), Pmode,
1660 convert_to_mode (TYPE_MODE (sizetype), size,
1661 TREE_UNSIGNED (sizetype)),
1662 TYPE_MODE (sizetype));
1664 emit_library_call (bcopy_libfunc, 0,
1665 VOIDmode, 3, XEXP (y, 0), Pmode,
1667 convert_to_mode (TYPE_MODE (sizetype), size,
1668 TREE_UNSIGNED (sizetype)),
1669 TYPE_MODE (sizetype));
1674 /* Copy all or part of a value X into registers starting at REGNO.
1675 The number of registers to be filled is NREGS. */
1678 move_block_to_reg (regno, x, nregs, mode)
1682 enum machine_mode mode;
1690 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1691 x = validize_mem (force_const_mem (mode, x));
1693 /* See if the machine can do this with a load multiple insn. */
1694 #ifdef HAVE_load_multiple
1695 if (HAVE_load_multiple)
1697 last = get_last_insn ();
1698 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1706 delete_insns_since (last);
1710 for (i = 0; i < nregs; i++)
1711 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1712 operand_subword_force (x, i, mode));
1715 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1716 The number of registers to be filled is NREGS. SIZE indicates the number
1717 of bytes in the object X. */
1721 move_block_from_reg (regno, x, nregs, size)
1730 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1731 to the left before storing to memory. */
1732 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1734 rtx tem = operand_subword (x, 0, 1, BLKmode);
1740 shift = expand_shift (LSHIFT_EXPR, word_mode,
1741 gen_rtx (REG, word_mode, regno),
1742 build_int_2 ((UNITS_PER_WORD - size)
1743 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1744 emit_move_insn (tem, shift);
1748 /* See if the machine can do this with a store multiple insn. */
1749 #ifdef HAVE_store_multiple
1750 if (HAVE_store_multiple)
1752 last = get_last_insn ();
1753 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1761 delete_insns_since (last);
1765 for (i = 0; i < nregs; i++)
1767 rtx tem = operand_subword (x, i, 1, BLKmode);
1772 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1776 /* Add a USE expression for REG to the (possibly empty) list pointed
1777 to by CALL_FUSAGE. REG must denote a hard register. */
1780 use_reg (call_fusage, reg)
1781 rtx *call_fusage, reg;
1783 if (GET_CODE (reg) != REG
1784 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1788 = gen_rtx (EXPR_LIST, VOIDmode,
1789 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1792 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1793 starting at REGNO. All of these registers must be hard registers. */
1796 use_regs (call_fusage, regno, nregs)
1803 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1806 for (i = 0; i < nregs; i++)
1807 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1810 /* Write zeros through the storage of OBJECT.
1811 If OBJECT has BLKmode, SIZE is its length in bytes. */
1814 clear_storage (object, size)
1818 if (GET_MODE (object) == BLKmode)
1820 #ifdef TARGET_MEM_FUNCTIONS
1821 emit_library_call (memset_libfunc, 0,
1823 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1824 GEN_INT (size), Pmode);
1826 emit_library_call (bzero_libfunc, 0,
1828 XEXP (object, 0), Pmode,
1829 GEN_INT (size), Pmode);
1833 emit_move_insn (object, const0_rtx);
1836 /* Generate code to copy Y into X.
1837 Both Y and X must have the same mode, except that
1838 Y can be a constant with VOIDmode.
1839 This mode cannot be BLKmode; use emit_block_move for that.
1841 Return the last instruction emitted. */
1844 emit_move_insn (x, y)
1847 enum machine_mode mode = GET_MODE (x);
1849 x = protect_from_queue (x, 1);
1850 y = protect_from_queue (y, 0);
1852 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1855 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1856 y = force_const_mem (mode, y);
1858 /* If X or Y are memory references, verify that their addresses are valid
1860 if (GET_CODE (x) == MEM
1861 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1862 && ! push_operand (x, GET_MODE (x)))
1864 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1865 x = change_address (x, VOIDmode, XEXP (x, 0));
1867 if (GET_CODE (y) == MEM
1868 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1870 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1871 y = change_address (y, VOIDmode, XEXP (y, 0));
1873 if (mode == BLKmode)
1876 return emit_move_insn_1 (x, y);
1879 /* Low level part of emit_move_insn.
1880 Called just like emit_move_insn, but assumes X and Y
1881 are basically valid. */
1884 emit_move_insn_1 (x, y)
1887 enum machine_mode mode = GET_MODE (x);
1888 enum machine_mode submode;
1889 enum mode_class class = GET_MODE_CLASS (mode);
1892 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1894 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1896 /* Expand complex moves by moving real part and imag part, if possible. */
1897 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1898 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1900 (class == MODE_COMPLEX_INT
1901 ? MODE_INT : MODE_FLOAT),
1903 && (mov_optab->handlers[(int) submode].insn_code
1904 != CODE_FOR_nothing))
1906 /* Don't split destination if it is a stack push. */
1907 int stack = push_operand (x, GET_MODE (x));
1910 /* If this is a stack, push the highpart first, so it
1911 will be in the argument order.
1913 In that case, change_address is used only to convert
1914 the mode, not to change the address. */
1917 /* Note that the real part always precedes the imag part in memory
1918 regardless of machine's endianness. */
1919 #ifdef STACK_GROWS_DOWNWARD
1920 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1921 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1922 gen_imagpart (submode, y)));
1923 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1924 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1925 gen_realpart (submode, y)));
1927 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1928 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1929 gen_realpart (submode, y)));
1930 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1931 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1932 gen_imagpart (submode, y)));
1937 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1938 (gen_realpart (submode, x), gen_realpart (submode, y)));
1939 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1940 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1943 return get_last_insn ();
1946 /* This will handle any multi-word mode that lacks a move_insn pattern.
1947 However, you will get better code if you define such patterns,
1948 even if they must turn into multiple assembler instructions. */
1949 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1954 #ifdef PUSH_ROUNDING
1956 /* If X is a push on the stack, do the push now and replace
1957 X with a reference to the stack pointer. */
1958 if (push_operand (x, GET_MODE (x)))
1960 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1961 x = change_address (x, VOIDmode, stack_pointer_rtx);
1966 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1969 rtx xpart = operand_subword (x, i, 1, mode);
1970 rtx ypart = operand_subword (y, i, 1, mode);
1972 /* If we can't get a part of Y, put Y into memory if it is a
1973 constant. Otherwise, force it into a register. If we still
1974 can't get a part of Y, abort. */
1975 if (ypart == 0 && CONSTANT_P (y))
1977 y = force_const_mem (mode, y);
1978 ypart = operand_subword (y, i, 1, mode);
1980 else if (ypart == 0)
1981 ypart = operand_subword_force (y, i, mode);
1983 if (xpart == 0 || ypart == 0)
1986 last_insn = emit_move_insn (xpart, ypart);
1995 /* Pushing data onto the stack. */
1997 /* Push a block of length SIZE (perhaps variable)
1998 and return an rtx to address the beginning of the block.
1999 Note that it is not possible for the value returned to be a QUEUED.
2000 The value may be virtual_outgoing_args_rtx.
2002 EXTRA is the number of bytes of padding to push in addition to SIZE.
2003 BELOW nonzero means this padding comes at low addresses;
2004 otherwise, the padding comes at high addresses. */
2007 push_block (size, extra, below)
2012 if (CONSTANT_P (size))
2013 anti_adjust_stack (plus_constant (size, extra));
2014 else if (GET_CODE (size) == REG && extra == 0)
2015 anti_adjust_stack (size);
2018 rtx temp = copy_to_mode_reg (Pmode, size);
2020 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2021 temp, 0, OPTAB_LIB_WIDEN);
2022 anti_adjust_stack (temp);
2025 #ifdef STACK_GROWS_DOWNWARD
2026 temp = virtual_outgoing_args_rtx;
2027 if (extra != 0 && below)
2028 temp = plus_constant (temp, extra);
2030 if (GET_CODE (size) == CONST_INT)
2031 temp = plus_constant (virtual_outgoing_args_rtx,
2032 - INTVAL (size) - (below ? 0 : extra));
2033 else if (extra != 0 && !below)
2034 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2035 negate_rtx (Pmode, plus_constant (size, extra)));
2037 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2038 negate_rtx (Pmode, size));
2041 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2047 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2050 /* Generate code to push X onto the stack, assuming it has mode MODE and
2052 MODE is redundant except when X is a CONST_INT (since they don't
2054 SIZE is an rtx for the size of data to be copied (in bytes),
2055 needed only if X is BLKmode.
2057 ALIGN (in bytes) is maximum alignment we can assume.
2059 If PARTIAL and REG are both nonzero, then copy that many of the first
2060 words of X into registers starting with REG, and push the rest of X.
2061 The amount of space pushed is decreased by PARTIAL words,
2062 rounded *down* to a multiple of PARM_BOUNDARY.
2063 REG must be a hard register in this case.
2064 If REG is zero but PARTIAL is not, take any all others actions for an
2065 argument partially in registers, but do not actually load any
2068 EXTRA is the amount in bytes of extra space to leave next to this arg.
2069 This is ignored if an argument block has already been allocated.
2071 On a machine that lacks real push insns, ARGS_ADDR is the address of
2072 the bottom of the argument block for this call. We use indexing off there
2073 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2074 argument block has not been preallocated.
2076 ARGS_SO_FAR is the size of args previously pushed for this call. */
2079 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2080 args_addr, args_so_far)
2082 enum machine_mode mode;
2093 enum direction stack_direction
2094 #ifdef STACK_GROWS_DOWNWARD
2100 /* Decide where to pad the argument: `downward' for below,
2101 `upward' for above, or `none' for don't pad it.
2102 Default is below for small data on big-endian machines; else above. */
2103 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2105 /* Invert direction if stack is post-update. */
2106 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2107 if (where_pad != none)
2108 where_pad = (where_pad == downward ? upward : downward);
2110 xinner = x = protect_from_queue (x, 0);
2112 if (mode == BLKmode)
2114 /* Copy a block into the stack, entirely or partially. */
2117 int used = partial * UNITS_PER_WORD;
2118 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2126 /* USED is now the # of bytes we need not copy to the stack
2127 because registers will take care of them. */
2130 xinner = change_address (xinner, BLKmode,
2131 plus_constant (XEXP (xinner, 0), used));
2133 /* If the partial register-part of the arg counts in its stack size,
2134 skip the part of stack space corresponding to the registers.
2135 Otherwise, start copying to the beginning of the stack space,
2136 by setting SKIP to 0. */
2137 #ifndef REG_PARM_STACK_SPACE
2143 #ifdef PUSH_ROUNDING
2144 /* Do it with several push insns if that doesn't take lots of insns
2145 and if there is no difficulty with push insns that skip bytes
2146 on the stack for alignment purposes. */
2148 && GET_CODE (size) == CONST_INT
2150 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2152 /* Here we avoid the case of a structure whose weak alignment
2153 forces many pushes of a small amount of data,
2154 and such small pushes do rounding that causes trouble. */
2155 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2156 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2157 || PUSH_ROUNDING (align) == align)
2158 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2160 /* Push padding now if padding above and stack grows down,
2161 or if padding below and stack grows up.
2162 But if space already allocated, this has already been done. */
2163 if (extra && args_addr == 0
2164 && where_pad != none && where_pad != stack_direction)
2165 anti_adjust_stack (GEN_INT (extra));
2167 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2168 INTVAL (size) - used, align);
2171 #endif /* PUSH_ROUNDING */
2173 /* Otherwise make space on the stack and copy the data
2174 to the address of that space. */
2176 /* Deduct words put into registers from the size we must copy. */
2179 if (GET_CODE (size) == CONST_INT)
2180 size = GEN_INT (INTVAL (size) - used);
2182 size = expand_binop (GET_MODE (size), sub_optab, size,
2183 GEN_INT (used), NULL_RTX, 0,
2187 /* Get the address of the stack space.
2188 In this case, we do not deal with EXTRA separately.
2189 A single stack adjust will do. */
2192 temp = push_block (size, extra, where_pad == downward);
2195 else if (GET_CODE (args_so_far) == CONST_INT)
2196 temp = memory_address (BLKmode,
2197 plus_constant (args_addr,
2198 skip + INTVAL (args_so_far)));
2200 temp = memory_address (BLKmode,
2201 plus_constant (gen_rtx (PLUS, Pmode,
2202 args_addr, args_so_far),
2205 /* TEMP is the address of the block. Copy the data there. */
2206 if (GET_CODE (size) == CONST_INT
2207 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2210 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2211 INTVAL (size), align);
2214 /* Try the most limited insn first, because there's no point
2215 including more than one in the machine description unless
2216 the more limited one has some advantage. */
2217 #ifdef HAVE_movstrqi
2219 && GET_CODE (size) == CONST_INT
2220 && ((unsigned) INTVAL (size)
2221 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2223 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2224 xinner, size, GEN_INT (align));
2232 #ifdef HAVE_movstrhi
2234 && GET_CODE (size) == CONST_INT
2235 && ((unsigned) INTVAL (size)
2236 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2238 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2239 xinner, size, GEN_INT (align));
2247 #ifdef HAVE_movstrsi
2250 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2251 xinner, size, GEN_INT (align));
2259 #ifdef HAVE_movstrdi
2262 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2263 xinner, size, GEN_INT (align));
2272 #ifndef ACCUMULATE_OUTGOING_ARGS
2273 /* If the source is referenced relative to the stack pointer,
2274 copy it to another register to stabilize it. We do not need
2275 to do this if we know that we won't be changing sp. */
2277 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2278 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2279 temp = copy_to_reg (temp);
2282 /* Make inhibit_defer_pop nonzero around the library call
2283 to force it to pop the bcopy-arguments right away. */
2285 #ifdef TARGET_MEM_FUNCTIONS
2286 emit_library_call (memcpy_libfunc, 0,
2287 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2288 convert_to_mode (TYPE_MODE (sizetype),
2289 size, TREE_UNSIGNED (sizetype)),
2290 TYPE_MODE (sizetype));
2292 emit_library_call (bcopy_libfunc, 0,
2293 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2294 convert_to_mode (TYPE_MODE (sizetype),
2295 size, TREE_UNSIGNED (sizetype)),
2296 TYPE_MODE (sizetype));
2301 else if (partial > 0)
2303 /* Scalar partly in registers. */
2305 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2308 /* # words of start of argument
2309 that we must make space for but need not store. */
2310 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2311 int args_offset = INTVAL (args_so_far);
2314 /* Push padding now if padding above and stack grows down,
2315 or if padding below and stack grows up.
2316 But if space already allocated, this has already been done. */
2317 if (extra && args_addr == 0
2318 && where_pad != none && where_pad != stack_direction)
2319 anti_adjust_stack (GEN_INT (extra));
2321 /* If we make space by pushing it, we might as well push
2322 the real data. Otherwise, we can leave OFFSET nonzero
2323 and leave the space uninitialized. */
2327 /* Now NOT_STACK gets the number of words that we don't need to
2328 allocate on the stack. */
2329 not_stack = partial - offset;
2331 /* If the partial register-part of the arg counts in its stack size,
2332 skip the part of stack space corresponding to the registers.
2333 Otherwise, start copying to the beginning of the stack space,
2334 by setting SKIP to 0. */
2335 #ifndef REG_PARM_STACK_SPACE
2341 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2342 x = validize_mem (force_const_mem (mode, x));
2344 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2345 SUBREGs of such registers are not allowed. */
2346 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2347 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2348 x = copy_to_reg (x);
2350 /* Loop over all the words allocated on the stack for this arg. */
2351 /* We can do it by words, because any scalar bigger than a word
2352 has a size a multiple of a word. */
2353 #ifndef PUSH_ARGS_REVERSED
2354 for (i = not_stack; i < size; i++)
2356 for (i = size - 1; i >= not_stack; i--)
2358 if (i >= not_stack + offset)
2359 emit_push_insn (operand_subword_force (x, i, mode),
2360 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2362 GEN_INT (args_offset + ((i - not_stack + skip)
2363 * UNITS_PER_WORD)));
2369 /* Push padding now if padding above and stack grows down,
2370 or if padding below and stack grows up.
2371 But if space already allocated, this has already been done. */
2372 if (extra && args_addr == 0
2373 && where_pad != none && where_pad != stack_direction)
2374 anti_adjust_stack (GEN_INT (extra));
2376 #ifdef PUSH_ROUNDING
2378 addr = gen_push_operand ();
2381 if (GET_CODE (args_so_far) == CONST_INT)
2383 = memory_address (mode,
2384 plus_constant (args_addr, INTVAL (args_so_far)));
2386 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2389 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2393 /* If part should go in registers, copy that part
2394 into the appropriate registers. Do this now, at the end,
2395 since mem-to-mem copies above may do function calls. */
2396 if (partial > 0 && reg != 0)
2397 move_block_to_reg (REGNO (reg), x, partial, mode);
2399 if (extra && args_addr == 0 && where_pad == stack_direction)
2400 anti_adjust_stack (GEN_INT (extra));
2403 /* Expand an assignment that stores the value of FROM into TO.
2404 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2405 (This may contain a QUEUED rtx;
2406 if the value is constant, this rtx is a constant.)
2407 Otherwise, the returned value is NULL_RTX.
2409 SUGGEST_REG is no longer actually used.
2410 It used to mean, copy the value through a register
2411 and return that register, if that is possible.
2412 We now use WANT_VALUE to decide whether to do this. */
2415 expand_assignment (to, from, want_value, suggest_reg)
2420 register rtx to_rtx = 0;
2423 /* Don't crash if the lhs of the assignment was erroneous. */
2425 if (TREE_CODE (to) == ERROR_MARK)
2427 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2428 return want_value ? result : NULL_RTX;
2431 if (output_bytecode)
2433 tree dest_innermost;
2435 bc_expand_expr (from);
2436 bc_emit_instruction (duplicate);
2438 dest_innermost = bc_expand_address (to);
2440 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2441 take care of it here. */
2443 bc_store_memory (TREE_TYPE (to), dest_innermost);
2447 /* Assignment of a structure component needs special treatment
2448 if the structure component's rtx is not simply a MEM.
2449 Assignment of an array element at a constant index, and assignment of
2450 an array element in an unaligned packed structure field, has the same
2453 if (TREE_CODE (to) == COMPONENT_REF
2454 || TREE_CODE (to) == BIT_FIELD_REF
2455 || (TREE_CODE (to) == ARRAY_REF
2456 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2457 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2458 || (STRICT_ALIGNMENT && get_inner_unaligned_p (to)))))
2460 enum machine_mode mode1;
2470 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2471 &mode1, &unsignedp, &volatilep);
2473 /* If we are going to use store_bit_field and extract_bit_field,
2474 make sure to_rtx will be safe for multiple use. */
2476 if (mode1 == VOIDmode && want_value)
2477 tem = stabilize_reference (tem);
2479 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2480 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2483 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2485 if (GET_CODE (to_rtx) != MEM)
2487 to_rtx = change_address (to_rtx, VOIDmode,
2488 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2489 force_reg (Pmode, offset_rtx)));
2490 /* If we have a variable offset, the known alignment
2491 is only that of the innermost structure containing the field.
2492 (Actually, we could sometimes do better by using the
2493 align of an element of the innermost array, but no need.) */
2494 if (TREE_CODE (to) == COMPONENT_REF
2495 || TREE_CODE (to) == BIT_FIELD_REF)
2497 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2501 if (GET_CODE (to_rtx) == MEM)
2502 MEM_VOLATILE_P (to_rtx) = 1;
2503 #if 0 /* This was turned off because, when a field is volatile
2504 in an object which is not volatile, the object may be in a register,
2505 and then we would abort over here. */
2511 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2513 /* Spurious cast makes HPUX compiler happy. */
2514 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2517 /* Required alignment of containing datum. */
2519 int_size_in_bytes (TREE_TYPE (tem)));
2520 preserve_temp_slots (result);
2524 /* If the value is meaningful, convert RESULT to the proper mode.
2525 Otherwise, return nothing. */
2526 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2527 TYPE_MODE (TREE_TYPE (from)),
2529 TREE_UNSIGNED (TREE_TYPE (to)))
2533 /* If the rhs is a function call and its value is not an aggregate,
2534 call the function before we start to compute the lhs.
2535 This is needed for correct code for cases such as
2536 val = setjmp (buf) on machines where reference to val
2537 requires loading up part of an address in a separate insn.
2539 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2540 a promoted variable where the zero- or sign- extension needs to be done.
2541 Handling this in the normal way is safe because no computation is done
2543 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2544 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2549 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2551 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2552 emit_move_insn (to_rtx, value);
2553 preserve_temp_slots (to_rtx);
2556 return want_value ? to_rtx : NULL_RTX;
2559 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2560 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2563 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2565 /* Don't move directly into a return register. */
2566 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2571 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2572 emit_move_insn (to_rtx, temp);
2573 preserve_temp_slots (to_rtx);
2576 return want_value ? to_rtx : NULL_RTX;
2579 /* In case we are returning the contents of an object which overlaps
2580 the place the value is being stored, use a safe function when copying
2581 a value through a pointer into a structure value return block. */
2582 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2583 && current_function_returns_struct
2584 && !current_function_returns_pcc_struct)
2589 size = expr_size (from);
2590 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2592 #ifdef TARGET_MEM_FUNCTIONS
2593 emit_library_call (memcpy_libfunc, 0,
2594 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2595 XEXP (from_rtx, 0), Pmode,
2596 convert_to_mode (TYPE_MODE (sizetype),
2597 size, TREE_UNSIGNED (sizetype)),
2598 TYPE_MODE (sizetype));
2600 emit_library_call (bcopy_libfunc, 0,
2601 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2602 XEXP (to_rtx, 0), Pmode,
2603 convert_to_mode (TYPE_MODE (sizetype),
2604 size, TREE_UNSIGNED (sizetype)),
2605 TYPE_MODE (sizetype));
2608 preserve_temp_slots (to_rtx);
2611 return want_value ? to_rtx : NULL_RTX;
2614 /* Compute FROM and store the value in the rtx we got. */
2617 result = store_expr (from, to_rtx, want_value);
2618 preserve_temp_slots (result);
2621 return want_value ? result : NULL_RTX;
2624 /* Generate code for computing expression EXP,
2625 and storing the value into TARGET.
2626 TARGET may contain a QUEUED rtx.
2628 If WANT_VALUE is nonzero, return a copy of the value
2629 not in TARGET, so that we can be sure to use the proper
2630 value in a containing expression even if TARGET has something
2631 else stored in it. If possible, we copy the value through a pseudo
2632 and return that pseudo. Or, if the value is constant, we try to
2633 return the constant. In some cases, we return a pseudo
2634 copied *from* TARGET.
2636 If the mode is BLKmode then we may return TARGET itself.
2637 It turns out that in BLKmode it doesn't cause a problem.
2638 because C has no operators that could combine two different
2639 assignments into the same BLKmode object with different values
2640 with no sequence point. Will other languages need this to
2643 If WANT_VALUE is 0, we return NULL, to make sure
2644 to catch quickly any cases where the caller uses the value
2645 and fails to set WANT_VALUE. */
2648 store_expr (exp, target, want_value)
2650 register rtx target;
2654 int dont_return_target = 0;
2656 if (TREE_CODE (exp) == COMPOUND_EXPR)
2658 /* Perform first part of compound expression, then assign from second
2660 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2662 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2664 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2666 /* For conditional expression, get safe form of the target. Then
2667 test the condition, doing the appropriate assignment on either
2668 side. This avoids the creation of unnecessary temporaries.
2669 For non-BLKmode, it is more efficient not to do this. */
2671 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2674 target = protect_from_queue (target, 1);
2677 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2678 store_expr (TREE_OPERAND (exp, 1), target, 0);
2680 emit_jump_insn (gen_jump (lab2));
2683 store_expr (TREE_OPERAND (exp, 2), target, 0);
2687 return want_value ? target : NULL_RTX;
2689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2690 && GET_MODE (target) != BLKmode)
2691 /* If target is in memory and caller wants value in a register instead,
2692 arrange that. Pass TARGET as target for expand_expr so that,
2693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2694 We know expand_expr will not use the target in that case.
2695 Don't do this if TARGET is volatile because we are supposed
2696 to write it and then read it. */
2698 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2699 GET_MODE (target), 0);
2700 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2701 temp = copy_to_reg (temp);
2702 dont_return_target = 1;
2704 else if (queued_subexp_p (target))
2705 /* If target contains a postincrement, let's not risk
2706 using it as the place to generate the rhs. */
2708 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2710 /* Expand EXP into a new pseudo. */
2711 temp = gen_reg_rtx (GET_MODE (target));
2712 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2715 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2717 /* If target is volatile, ANSI requires accessing the value
2718 *from* the target, if it is accessed. So make that happen.
2719 In no case return the target itself. */
2720 if (! MEM_VOLATILE_P (target) && want_value)
2721 dont_return_target = 1;
2723 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2724 /* If this is an scalar in a register that is stored in a wider mode
2725 than the declared mode, compute the result into its declared mode
2726 and then convert to the wider mode. Our value is the computed
2729 /* If we don't want a value, we can do the conversion inside EXP,
2730 which will often result in some optimizations. */
2732 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2733 SUBREG_PROMOTED_UNSIGNED_P (target)),
2736 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2738 /* If TEMP is a volatile MEM and we want a result value, make
2739 the access now so it gets done only once. */
2740 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2741 temp = copy_to_reg (temp);
2743 /* If TEMP is a VOIDmode constant, use convert_modes to make
2744 sure that we properly convert it. */
2745 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2746 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2747 TYPE_MODE (TREE_TYPE (exp)), temp,
2748 SUBREG_PROMOTED_UNSIGNED_P (target));
2750 convert_move (SUBREG_REG (target), temp,
2751 SUBREG_PROMOTED_UNSIGNED_P (target));
2752 return want_value ? temp : NULL_RTX;
2756 temp = expand_expr (exp, target, GET_MODE (target), 0);
2757 /* Return TARGET if it's a specified hardware register.
2758 If TARGET is a volatile mem ref, either return TARGET
2759 or return a reg copied *from* TARGET; ANSI requires this.
2761 Otherwise, if TEMP is not TARGET, return TEMP
2762 if it is constant (for efficiency),
2763 or if we really want the correct value. */
2764 if (!(target && GET_CODE (target) == REG
2765 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2766 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2768 && (CONSTANT_P (temp) || want_value))
2769 dont_return_target = 1;
2772 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2773 the same as that of TARGET, adjust the constant. This is needed, for
2774 example, in case it is a CONST_DOUBLE and we want only a word-sized
2776 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2777 && TREE_CODE (exp) != ERROR_MARK
2778 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2779 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2780 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2782 /* If value was not generated in the target, store it there.
2783 Convert the value to TARGET's type first if nec. */
2785 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2787 target = protect_from_queue (target, 1);
2788 if (GET_MODE (temp) != GET_MODE (target)
2789 && GET_MODE (temp) != VOIDmode)
2791 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2792 if (dont_return_target)
2794 /* In this case, we will return TEMP,
2795 so make sure it has the proper mode.
2796 But don't forget to store the value into TARGET. */
2797 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2798 emit_move_insn (target, temp);
2801 convert_move (target, temp, unsignedp);
2804 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2806 /* Handle copying a string constant into an array.
2807 The string constant may be shorter than the array.
2808 So copy just the string's actual length, and clear the rest. */
2812 /* Get the size of the data type of the string,
2813 which is actually the size of the target. */
2814 size = expr_size (exp);
2815 if (GET_CODE (size) == CONST_INT
2816 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2817 emit_block_move (target, temp, size,
2818 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2821 /* Compute the size of the data to copy from the string. */
2823 = size_binop (MIN_EXPR,
2824 make_tree (sizetype, size),
2826 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2827 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2831 /* Copy that much. */
2832 emit_block_move (target, temp, copy_size_rtx,
2833 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2835 /* Figure out how much is left in TARGET
2836 that we have to clear. */
2837 if (GET_CODE (copy_size_rtx) == CONST_INT)
2839 addr = plus_constant (XEXP (target, 0),
2840 TREE_STRING_LENGTH (exp));
2841 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2845 enum machine_mode size_mode = Pmode;
2847 addr = force_reg (Pmode, XEXP (target, 0));
2848 addr = expand_binop (size_mode, add_optab, addr,
2849 copy_size_rtx, NULL_RTX, 0,
2852 size = expand_binop (size_mode, sub_optab, size,
2853 copy_size_rtx, NULL_RTX, 0,
2856 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2857 GET_MODE (size), 0, 0);
2858 label = gen_label_rtx ();
2859 emit_jump_insn (gen_blt (label));
2862 if (size != const0_rtx)
2864 #ifdef TARGET_MEM_FUNCTIONS
2865 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2866 Pmode, const0_rtx, Pmode, size, Pmode);
2868 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2869 addr, Pmode, size, Pmode);
2877 else if (GET_MODE (temp) == BLKmode)
2878 emit_block_move (target, temp, expr_size (exp),
2879 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2881 emit_move_insn (target, temp);
2884 /* If we don't want a value, return NULL_RTX. */
2888 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2889 ??? The latter test doesn't seem to make sense. */
2890 else if (dont_return_target && GET_CODE (temp) != MEM)
2893 /* Return TARGET itself if it is a hard register. */
2894 else if (want_value && GET_MODE (target) != BLKmode
2895 && ! (GET_CODE (target) == REG
2896 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2897 return copy_to_reg (target);
2903 /* Store the value of constructor EXP into the rtx TARGET.
2904 TARGET is either a REG or a MEM. */
2907 store_constructor (exp, target)
2911 tree type = TREE_TYPE (exp);
2913 /* We know our target cannot conflict, since safe_from_p has been called. */
2915 /* Don't try copying piece by piece into a hard register
2916 since that is vulnerable to being clobbered by EXP.
2917 Instead, construct in a pseudo register and then copy it all. */
2918 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2920 rtx temp = gen_reg_rtx (GET_MODE (target));
2921 store_constructor (exp, temp);
2922 emit_move_insn (target, temp);
2927 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2928 || TREE_CODE (type) == QUAL_UNION_TYPE)
2932 /* Inform later passes that the whole union value is dead. */
2933 if (TREE_CODE (type) == UNION_TYPE
2934 || TREE_CODE (type) == QUAL_UNION_TYPE)
2935 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2937 /* If we are building a static constructor into a register,
2938 set the initial value as zero so we can fold the value into
2940 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2941 emit_move_insn (target, const0_rtx);
2943 /* If the constructor has fewer fields than the structure,
2944 clear the whole structure first. */
2945 else if (list_length (CONSTRUCTOR_ELTS (exp))
2946 != list_length (TYPE_FIELDS (type)))
2947 clear_storage (target, int_size_in_bytes (type));
2949 /* Inform later passes that the old value is dead. */
2950 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2952 /* Store each element of the constructor into
2953 the corresponding field of TARGET. */
2955 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2957 register tree field = TREE_PURPOSE (elt);
2958 register enum machine_mode mode;
2962 tree pos, constant = 0, offset = 0;
2963 rtx to_rtx = target;
2965 /* Just ignore missing fields.
2966 We cleared the whole structure, above,
2967 if any fields are missing. */
2971 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2972 unsignedp = TREE_UNSIGNED (field);
2973 mode = DECL_MODE (field);
2974 if (DECL_BIT_FIELD (field))
2977 pos = DECL_FIELD_BITPOS (field);
2978 if (TREE_CODE (pos) == INTEGER_CST)
2980 else if (TREE_CODE (pos) == PLUS_EXPR
2981 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2982 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2987 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2993 if (contains_placeholder_p (offset))
2994 offset = build (WITH_RECORD_EXPR, sizetype,
2997 offset = size_binop (FLOOR_DIV_EXPR, offset,
2998 size_int (BITS_PER_UNIT));
3000 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3001 if (GET_CODE (to_rtx) != MEM)
3005 = change_address (to_rtx, VOIDmode,
3006 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
3007 force_reg (Pmode, offset_rtx)));
3010 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3011 /* The alignment of TARGET is
3012 at least what its type requires. */
3014 TYPE_ALIGN (type) / BITS_PER_UNIT,
3015 int_size_in_bytes (type));
3018 else if (TREE_CODE (type) == ARRAY_TYPE)
3022 tree domain = TYPE_DOMAIN (type);
3023 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3024 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3025 tree elttype = TREE_TYPE (type);
3027 /* If the constructor has fewer fields than the structure,
3028 clear the whole structure first. Similarly if this this is
3029 static constructor of a non-BLKmode object. */
3031 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3032 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3033 clear_storage (target, int_size_in_bytes (type));
3035 /* Inform later passes that the old value is dead. */
3036 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3038 /* Store each element of the constructor into
3039 the corresponding element of TARGET, determined
3040 by counting the elements. */
3041 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3043 elt = TREE_CHAIN (elt), i++)
3045 register enum machine_mode mode;
3049 tree index = TREE_PURPOSE (elt);
3050 rtx xtarget = target;
3052 mode = TYPE_MODE (elttype);
3053 bitsize = GET_MODE_BITSIZE (mode);
3054 unsignedp = TREE_UNSIGNED (elttype);
3056 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
3058 /* We don't currently allow variable indices in a
3059 C initializer, but let's try here to support them. */
3060 rtx pos_rtx, addr, xtarget;
3063 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
3064 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3065 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3066 xtarget = change_address (target, mode, addr);
3067 store_expr (TREE_VALUE (elt), xtarget, 0);
3072 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3073 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3075 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3077 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3078 /* The alignment of TARGET is
3079 at least what its type requires. */
3081 TYPE_ALIGN (type) / BITS_PER_UNIT,
3082 int_size_in_bytes (type));
3091 /* Store the value of EXP (an expression tree)
3092 into a subfield of TARGET which has mode MODE and occupies
3093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3094 If MODE is VOIDmode, it means that we are storing into a bit-field.
3096 If VALUE_MODE is VOIDmode, return nothing in particular.
3097 UNSIGNEDP is not used in this case.
3099 Otherwise, return an rtx for the value stored. This rtx
3100 has mode VALUE_MODE if that is convenient to do.
3101 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3103 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3104 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3107 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3108 unsignedp, align, total_size)
3110 int bitsize, bitpos;
3111 enum machine_mode mode;
3113 enum machine_mode value_mode;
3118 HOST_WIDE_INT width_mask = 0;
3120 if (bitsize < HOST_BITS_PER_WIDE_INT)
3121 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3123 /* If we are storing into an unaligned field of an aligned union that is
3124 in a register, we may have the mode of TARGET being an integer mode but
3125 MODE == BLKmode. In that case, get an aligned object whose size and
3126 alignment are the same as TARGET and store TARGET into it (we can avoid
3127 the store if the field being stored is the entire width of TARGET). Then
3128 call ourselves recursively to store the field into a BLKmode version of
3129 that object. Finally, load from the object into TARGET. This is not
3130 very efficient in general, but should only be slightly more expensive
3131 than the otherwise-required unaligned accesses. Perhaps this can be
3132 cleaned up later. */
3135 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3137 rtx object = assign_stack_temp (GET_MODE (target),
3138 GET_MODE_SIZE (GET_MODE (target)), 0);
3139 rtx blk_object = copy_rtx (object);
3141 MEM_IN_STRUCT_P (object) = 1;
3142 MEM_IN_STRUCT_P (blk_object) = 1;
3143 PUT_MODE (blk_object, BLKmode);
3145 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3146 emit_move_insn (object, target);
3148 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3151 /* Even though we aren't returning target, we need to
3152 give it the updated value. */
3153 emit_move_insn (target, object);
3158 /* If the structure is in a register or if the component
3159 is a bit field, we cannot use addressing to access it.
3160 Use bit-field techniques or SUBREG to store in it. */
3162 if (mode == VOIDmode
3163 || (mode != BLKmode && ! direct_store[(int) mode])
3164 || GET_CODE (target) == REG
3165 || GET_CODE (target) == SUBREG
3166 /* If the field isn't aligned enough to store as an ordinary memref,
3167 store it as a bit field. */
3168 || (STRICT_ALIGNMENT
3169 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3170 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3172 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3174 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3176 if (mode != VOIDmode && mode != BLKmode
3177 && mode != TYPE_MODE (TREE_TYPE (exp)))
3178 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3180 /* Store the value in the bitfield. */
3181 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3182 if (value_mode != VOIDmode)
3184 /* The caller wants an rtx for the value. */
3185 /* If possible, avoid refetching from the bitfield itself. */
3187 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3190 enum machine_mode tmode;
3193 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3194 tmode = GET_MODE (temp);
3195 if (tmode == VOIDmode)
3197 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3198 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3199 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3201 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3202 NULL_RTX, value_mode, 0, align,
3209 rtx addr = XEXP (target, 0);
3212 /* If a value is wanted, it must be the lhs;
3213 so make the address stable for multiple use. */
3215 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3216 && ! CONSTANT_ADDRESS_P (addr)
3217 /* A frame-pointer reference is already stable. */
3218 && ! (GET_CODE (addr) == PLUS
3219 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3220 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3221 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3222 addr = copy_to_reg (addr);
3224 /* Now build a reference to just the desired component. */
3226 to_rtx = change_address (target, mode,
3227 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3228 MEM_IN_STRUCT_P (to_rtx) = 1;
3230 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3234 /* Return true if any object containing the innermost array is an unaligned
3235 packed structure field. */
3238 get_inner_unaligned_p (exp)
3241 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3245 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3247 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3251 else if (TREE_CODE (exp) != ARRAY_REF
3252 && TREE_CODE (exp) != NON_LVALUE_EXPR
3253 && ! ((TREE_CODE (exp) == NOP_EXPR
3254 || TREE_CODE (exp) == CONVERT_EXPR)
3255 && (TYPE_MODE (TREE_TYPE (exp))
3256 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3259 exp = TREE_OPERAND (exp, 0);
3265 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3266 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3267 ARRAY_REFs and find the ultimate containing object, which we return.
3269 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3270 bit position, and *PUNSIGNEDP to the signedness of the field.
3271 If the position of the field is variable, we store a tree
3272 giving the variable offset (in units) in *POFFSET.
3273 This offset is in addition to the bit position.
3274 If the position is not variable, we store 0 in *POFFSET.
3276 If any of the extraction expressions is volatile,
3277 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3279 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3280 is a mode that can be used to access the field. In that case, *PBITSIZE
3283 If the field describes a variable-sized object, *PMODE is set to
3284 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3285 this case, but the address of the object can be found. */
3288 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3289 punsignedp, pvolatilep)
3294 enum machine_mode *pmode;
3298 tree orig_exp = exp;
3300 enum machine_mode mode = VOIDmode;
3301 tree offset = integer_zero_node;
3303 if (TREE_CODE (exp) == COMPONENT_REF)
3305 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3306 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3307 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3308 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3310 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3312 size_tree = TREE_OPERAND (exp, 1);
3313 *punsignedp = TREE_UNSIGNED (exp);
3317 mode = TYPE_MODE (TREE_TYPE (exp));
3318 *pbitsize = GET_MODE_BITSIZE (mode);
3319 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3324 if (TREE_CODE (size_tree) != INTEGER_CST)
3325 mode = BLKmode, *pbitsize = -1;
3327 *pbitsize = TREE_INT_CST_LOW (size_tree);
3330 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3331 and find the ultimate containing object. */
3337 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3339 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3340 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3341 : TREE_OPERAND (exp, 2));
3343 /* If this field hasn't been filled in yet, don't go
3344 past it. This should only happen when folding expressions
3345 made during type construction. */
3349 if (TREE_CODE (pos) == PLUS_EXPR)
3352 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3354 constant = TREE_OPERAND (pos, 0);
3355 var = TREE_OPERAND (pos, 1);
3357 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3359 constant = TREE_OPERAND (pos, 1);
3360 var = TREE_OPERAND (pos, 0);
3365 *pbitpos += TREE_INT_CST_LOW (constant);
3366 offset = size_binop (PLUS_EXPR, offset,
3367 size_binop (FLOOR_DIV_EXPR, var,
3368 size_int (BITS_PER_UNIT)));
3370 else if (TREE_CODE (pos) == INTEGER_CST)
3371 *pbitpos += TREE_INT_CST_LOW (pos);
3374 /* Assume here that the offset is a multiple of a unit.
3375 If not, there should be an explicitly added constant. */
3376 offset = size_binop (PLUS_EXPR, offset,
3377 size_binop (FLOOR_DIV_EXPR, pos,
3378 size_int (BITS_PER_UNIT)));
3382 else if (TREE_CODE (exp) == ARRAY_REF)
3384 /* This code is based on the code in case ARRAY_REF in expand_expr
3385 below. We assume here that the size of an array element is
3386 always an integral multiple of BITS_PER_UNIT. */
3388 tree index = TREE_OPERAND (exp, 1);
3389 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3391 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3392 tree index_type = TREE_TYPE (index);
3394 if (! integer_zerop (low_bound))
3395 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3397 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3399 index = convert (type_for_size (POINTER_SIZE, 0), index);
3400 index_type = TREE_TYPE (index);
3403 index = fold (build (MULT_EXPR, index_type, index,
3404 TYPE_SIZE (TREE_TYPE (exp))));
3406 if (TREE_CODE (index) == INTEGER_CST
3407 && TREE_INT_CST_HIGH (index) == 0)
3408 *pbitpos += TREE_INT_CST_LOW (index);
3410 offset = size_binop (PLUS_EXPR, offset,
3411 size_binop (FLOOR_DIV_EXPR, index,
3412 size_int (BITS_PER_UNIT)));
3414 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3415 && ! ((TREE_CODE (exp) == NOP_EXPR
3416 || TREE_CODE (exp) == CONVERT_EXPR)
3417 && (TYPE_MODE (TREE_TYPE (exp))
3418 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3421 /* If any reference in the chain is volatile, the effect is volatile. */
3422 if (TREE_THIS_VOLATILE (exp))
3424 exp = TREE_OPERAND (exp, 0);
3427 /* If this was a bit-field, see if there is a mode that allows direct
3428 access in case EXP is in memory. */
3429 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3431 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3432 if (mode == BLKmode)
3436 if (integer_zerop (offset))
3439 if (offset != 0 && contains_placeholder_p (offset))
3440 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3447 /* Given an rtx VALUE that may contain additions and multiplications,
3448 return an equivalent value that just refers to a register or memory.
3449 This is done by generating instructions to perform the arithmetic
3450 and returning a pseudo-register containing the value.
3452 The returned value may be a REG, SUBREG, MEM or constant. */
3455 force_operand (value, target)
3458 register optab binoptab = 0;
3459 /* Use a temporary to force order of execution of calls to
3463 /* Use subtarget as the target for operand 0 of a binary operation. */
3464 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3466 if (GET_CODE (value) == PLUS)
3467 binoptab = add_optab;
3468 else if (GET_CODE (value) == MINUS)
3469 binoptab = sub_optab;
3470 else if (GET_CODE (value) == MULT)
3472 op2 = XEXP (value, 1);
3473 if (!CONSTANT_P (op2)
3474 && !(GET_CODE (op2) == REG && op2 != subtarget))
3476 tmp = force_operand (XEXP (value, 0), subtarget);
3477 return expand_mult (GET_MODE (value), tmp,
3478 force_operand (op2, NULL_RTX),
3484 op2 = XEXP (value, 1);
3485 if (!CONSTANT_P (op2)
3486 && !(GET_CODE (op2) == REG && op2 != subtarget))
3488 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3490 binoptab = add_optab;
3491 op2 = negate_rtx (GET_MODE (value), op2);
3494 /* Check for an addition with OP2 a constant integer and our first
3495 operand a PLUS of a virtual register and something else. In that
3496 case, we want to emit the sum of the virtual register and the
3497 constant first and then add the other value. This allows virtual
3498 register instantiation to simply modify the constant rather than
3499 creating another one around this addition. */
3500 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3501 && GET_CODE (XEXP (value, 0)) == PLUS
3502 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3503 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3504 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3506 rtx temp = expand_binop (GET_MODE (value), binoptab,
3507 XEXP (XEXP (value, 0), 0), op2,
3508 subtarget, 0, OPTAB_LIB_WIDEN);
3509 return expand_binop (GET_MODE (value), binoptab, temp,
3510 force_operand (XEXP (XEXP (value, 0), 1), 0),
3511 target, 0, OPTAB_LIB_WIDEN);
3514 tmp = force_operand (XEXP (value, 0), subtarget);
3515 return expand_binop (GET_MODE (value), binoptab, tmp,
3516 force_operand (op2, NULL_RTX),
3517 target, 0, OPTAB_LIB_WIDEN);
3518 /* We give UNSIGNEDP = 0 to expand_binop
3519 because the only operations we are expanding here are signed ones. */
3524 /* Subroutine of expand_expr:
3525 save the non-copied parts (LIST) of an expr (LHS), and return a list
3526 which can restore these values to their previous values,
3527 should something modify their storage. */
3530 save_noncopied_parts (lhs, list)
3537 for (tail = list; tail; tail = TREE_CHAIN (tail))
3538 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3539 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3542 tree part = TREE_VALUE (tail);
3543 tree part_type = TREE_TYPE (part);
3544 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3545 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3546 int_size_in_bytes (part_type), 0);
3547 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3548 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3549 parts = tree_cons (to_be_saved,
3550 build (RTL_EXPR, part_type, NULL_TREE,
3553 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3558 /* Subroutine of expand_expr:
3559 record the non-copied parts (LIST) of an expr (LHS), and return a list
3560 which specifies the initial values of these parts. */
3563 init_noncopied_parts (lhs, list)
3570 for (tail = list; tail; tail = TREE_CHAIN (tail))
3571 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3572 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3575 tree part = TREE_VALUE (tail);
3576 tree part_type = TREE_TYPE (part);
3577 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3578 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3583 /* Subroutine of expand_expr: return nonzero iff there is no way that
3584 EXP can reference X, which is being modified. */
3587 safe_from_p (x, exp)
3595 /* If EXP has varying size, we MUST use a target since we currently
3596 have no way of allocating temporaries of variable size. So we
3597 assume here that something at a higher level has prevented a
3598 clash. This is somewhat bogus, but the best we can do. */
3599 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3600 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3603 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3604 find the underlying pseudo. */
3605 if (GET_CODE (x) == SUBREG)
3608 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3612 /* If X is a location in the outgoing argument area, it is always safe. */
3613 if (GET_CODE (x) == MEM
3614 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3615 || (GET_CODE (XEXP (x, 0)) == PLUS
3616 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3619 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3622 exp_rtl = DECL_RTL (exp);
3629 if (TREE_CODE (exp) == TREE_LIST)
3630 return ((TREE_VALUE (exp) == 0
3631 || safe_from_p (x, TREE_VALUE (exp)))
3632 && (TREE_CHAIN (exp) == 0
3633 || safe_from_p (x, TREE_CHAIN (exp))));
3638 return safe_from_p (x, TREE_OPERAND (exp, 0));
3642 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3643 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3647 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3648 the expression. If it is set, we conflict iff we are that rtx or
3649 both are in memory. Otherwise, we check all operands of the
3650 expression recursively. */
3652 switch (TREE_CODE (exp))
3655 return (staticp (TREE_OPERAND (exp, 0))
3656 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3659 if (GET_CODE (x) == MEM)
3664 exp_rtl = CALL_EXPR_RTL (exp);
3667 /* Assume that the call will clobber all hard registers and
3669 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3670 || GET_CODE (x) == MEM)
3677 exp_rtl = RTL_EXPR_RTL (exp);
3679 /* We don't know what this can modify. */
3684 case WITH_CLEANUP_EXPR:
3685 exp_rtl = RTL_EXPR_RTL (exp);
3688 case CLEANUP_POINT_EXPR:
3689 return safe_from_p (x, TREE_OPERAND (exp, 0));
3692 exp_rtl = SAVE_EXPR_RTL (exp);
3696 /* The only operand we look at is operand 1. The rest aren't
3697 part of the expression. */
3698 return safe_from_p (x, TREE_OPERAND (exp, 1));
3700 case METHOD_CALL_EXPR:
3701 /* This takes a rtx argument, but shouldn't appear here. */
3705 /* If we have an rtx, we do not need to scan our operands. */
3709 nops = tree_code_length[(int) TREE_CODE (exp)];
3710 for (i = 0; i < nops; i++)
3711 if (TREE_OPERAND (exp, i) != 0
3712 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3716 /* If we have an rtl, find any enclosed object. Then see if we conflict
3720 if (GET_CODE (exp_rtl) == SUBREG)
3722 exp_rtl = SUBREG_REG (exp_rtl);
3723 if (GET_CODE (exp_rtl) == REG
3724 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3728 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3729 are memory and EXP is not readonly. */
3730 return ! (rtx_equal_p (x, exp_rtl)
3731 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3732 && ! TREE_READONLY (exp)));
3735 /* If we reach here, it is safe. */
3739 /* Subroutine of expand_expr: return nonzero iff EXP is an
3740 expression whose type is statically determinable. */
3746 if (TREE_CODE (exp) == PARM_DECL
3747 || TREE_CODE (exp) == VAR_DECL
3748 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3749 || TREE_CODE (exp) == COMPONENT_REF
3750 || TREE_CODE (exp) == ARRAY_REF)
3755 /* expand_expr: generate code for computing expression EXP.
3756 An rtx for the computed value is returned. The value is never null.
3757 In the case of a void EXP, const0_rtx is returned.
3759 The value may be stored in TARGET if TARGET is nonzero.
3760 TARGET is just a suggestion; callers must assume that
3761 the rtx returned may not be the same as TARGET.
3763 If TARGET is CONST0_RTX, it means that the value will be ignored.
3765 If TMODE is not VOIDmode, it suggests generating the
3766 result in mode TMODE. But this is done only when convenient.
3767 Otherwise, TMODE is ignored and the value generated in its natural mode.
3768 TMODE is just a suggestion; callers must assume that
3769 the rtx returned may not have mode TMODE.
3771 Note that TARGET may have neither TMODE nor MODE. In that case, it
3772 probably will not be used.
3774 If MODIFIER is EXPAND_SUM then when EXP is an addition
3775 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3776 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3777 products as above, or REG or MEM, or constant.
3778 Ordinarily in such cases we would output mul or add instructions
3779 and then return a pseudo reg containing the sum.
3781 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3782 it also marks a label as absolutely required (it can't be dead).
3783 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3784 This is used for outputting expressions used in initializers.
3786 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3787 with a constant address even if that address is not normally legitimate.
3788 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3791 expand_expr (exp, target, tmode, modifier)
3794 enum machine_mode tmode;
3795 enum expand_modifier modifier;
3797 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3798 This is static so it will be accessible to our recursive callees. */
3799 static tree placeholder_list = 0;
3800 register rtx op0, op1, temp;
3801 tree type = TREE_TYPE (exp);
3802 int unsignedp = TREE_UNSIGNED (type);
3803 register enum machine_mode mode = TYPE_MODE (type);
3804 register enum tree_code code = TREE_CODE (exp);
3806 /* Use subtarget as the target for operand 0 of a binary operation. */
3807 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3808 rtx original_target = target;
3809 /* Maybe defer this until sure not doing bytecode? */
3810 int ignore = (target == const0_rtx
3811 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3812 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3813 || code == COND_EXPR)
3814 && TREE_CODE (type) == VOID_TYPE));
3818 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3820 bc_expand_expr (exp);
3824 /* Don't use hard regs as subtargets, because the combiner
3825 can only handle pseudo regs. */
3826 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3828 /* Avoid subtargets inside loops,
3829 since they hide some invariant expressions. */
3830 if (preserve_subexpressions_p ())
3833 /* If we are going to ignore this result, we need only do something
3834 if there is a side-effect somewhere in the expression. If there
3835 is, short-circuit the most common cases here. Note that we must
3836 not call expand_expr with anything but const0_rtx in case this
3837 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3841 if (! TREE_SIDE_EFFECTS (exp))
3844 /* Ensure we reference a volatile object even if value is ignored. */
3845 if (TREE_THIS_VOLATILE (exp)
3846 && TREE_CODE (exp) != FUNCTION_DECL
3847 && mode != VOIDmode && mode != BLKmode)
3849 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3850 if (GET_CODE (temp) == MEM)
3851 temp = copy_to_reg (temp);
3855 if (TREE_CODE_CLASS (code) == '1')
3856 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3857 VOIDmode, modifier);
3858 else if (TREE_CODE_CLASS (code) == '2'
3859 || TREE_CODE_CLASS (code) == '<')
3861 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3862 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3865 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3866 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3867 /* If the second operand has no side effects, just evaluate
3869 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3870 VOIDmode, modifier);
3875 /* If will do cse, generate all results into pseudo registers
3876 since 1) that allows cse to find more things
3877 and 2) otherwise cse could produce an insn the machine
3880 if (! cse_not_expected && mode != BLKmode && target
3881 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3888 tree function = decl_function_context (exp);
3889 /* Handle using a label in a containing function. */
3890 if (function != current_function_decl && function != 0)
3892 struct function *p = find_function_data (function);
3893 /* Allocate in the memory associated with the function
3894 that the label is in. */
3895 push_obstacks (p->function_obstack,
3896 p->function_maybepermanent_obstack);
3898 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3899 label_rtx (exp), p->forced_labels);
3902 else if (modifier == EXPAND_INITIALIZER)
3903 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3904 label_rtx (exp), forced_labels);
3905 temp = gen_rtx (MEM, FUNCTION_MODE,
3906 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3907 if (function != current_function_decl && function != 0)
3908 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3913 if (DECL_RTL (exp) == 0)
3915 error_with_decl (exp, "prior parameter's size depends on `%s'");
3916 return CONST0_RTX (mode);
3919 /* ... fall through ... */
3922 /* If a static var's type was incomplete when the decl was written,
3923 but the type is complete now, lay out the decl now. */
3924 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3925 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3927 push_obstacks_nochange ();
3928 end_temporary_allocation ();
3929 layout_decl (exp, 0);
3930 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3934 /* ... fall through ... */
3938 if (DECL_RTL (exp) == 0)
3941 /* Ensure variable marked as used even if it doesn't go through
3942 a parser. If it hasn't be used yet, write out an external
3944 if (! TREE_USED (exp))
3946 assemble_external (exp);
3947 TREE_USED (exp) = 1;
3950 /* Handle variables inherited from containing functions. */
3951 context = decl_function_context (exp);
3953 /* We treat inline_function_decl as an alias for the current function
3954 because that is the inline function whose vars, types, etc.
3955 are being merged into the current function.
3956 See expand_inline_function. */
3958 if (context != 0 && context != current_function_decl
3959 && context != inline_function_decl
3960 /* If var is static, we don't need a static chain to access it. */
3961 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3962 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3966 /* Mark as non-local and addressable. */
3967 DECL_NONLOCAL (exp) = 1;
3968 mark_addressable (exp);
3969 if (GET_CODE (DECL_RTL (exp)) != MEM)
3971 addr = XEXP (DECL_RTL (exp), 0);
3972 if (GET_CODE (addr) == MEM)
3973 addr = gen_rtx (MEM, Pmode,
3974 fix_lexical_addr (XEXP (addr, 0), exp));
3976 addr = fix_lexical_addr (addr, exp);
3977 return change_address (DECL_RTL (exp), mode, addr);
3980 /* This is the case of an array whose size is to be determined
3981 from its initializer, while the initializer is still being parsed.
3984 if (GET_CODE (DECL_RTL (exp)) == MEM
3985 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3986 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3987 XEXP (DECL_RTL (exp), 0));
3989 /* If DECL_RTL is memory, we are in the normal case and either
3990 the address is not valid or it is not a register and -fforce-addr
3991 is specified, get the address into a register. */
3993 if (GET_CODE (DECL_RTL (exp)) == MEM
3994 && modifier != EXPAND_CONST_ADDRESS
3995 && modifier != EXPAND_SUM
3996 && modifier != EXPAND_INITIALIZER
3997 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3999 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4000 return change_address (DECL_RTL (exp), VOIDmode,
4001 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4003 /* If the mode of DECL_RTL does not match that of the decl, it
4004 must be a promoted value. We return a SUBREG of the wanted mode,
4005 but mark it so that we know that it was already extended. */
4007 if (GET_CODE (DECL_RTL (exp)) == REG
4008 && GET_MODE (DECL_RTL (exp)) != mode)
4010 /* Get the signedness used for this variable. Ensure we get the
4011 same mode we got when the variable was declared. */
4012 if (GET_MODE (DECL_RTL (exp))
4013 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4016 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4017 SUBREG_PROMOTED_VAR_P (temp) = 1;
4018 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4022 return DECL_RTL (exp);
4025 return immed_double_const (TREE_INT_CST_LOW (exp),
4026 TREE_INT_CST_HIGH (exp),
4030 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4033 /* If optimized, generate immediate CONST_DOUBLE
4034 which will be turned into memory by reload if necessary.
4036 We used to force a register so that loop.c could see it. But
4037 this does not allow gen_* patterns to perform optimizations with
4038 the constants. It also produces two insns in cases like "x = 1.0;".
4039 On most machines, floating-point constants are not permitted in
4040 many insns, so we'd end up copying it to a register in any case.
4042 Now, we do the copying in expand_binop, if appropriate. */
4043 return immed_real_const (exp);
4047 if (! TREE_CST_RTL (exp))
4048 output_constant_def (exp);
4050 /* TREE_CST_RTL probably contains a constant address.
4051 On RISC machines where a constant address isn't valid,
4052 make some insns to get that address into a register. */
4053 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4054 && modifier != EXPAND_CONST_ADDRESS
4055 && modifier != EXPAND_INITIALIZER
4056 && modifier != EXPAND_SUM
4057 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4059 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4060 return change_address (TREE_CST_RTL (exp), VOIDmode,
4061 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4062 return TREE_CST_RTL (exp);
4065 context = decl_function_context (exp);
4067 /* We treat inline_function_decl as an alias for the current function
4068 because that is the inline function whose vars, types, etc.
4069 are being merged into the current function.
4070 See expand_inline_function. */
4071 if (context == current_function_decl || context == inline_function_decl)
4074 /* If this is non-local, handle it. */
4077 temp = SAVE_EXPR_RTL (exp);
4078 if (temp && GET_CODE (temp) == REG)
4080 put_var_into_stack (exp);
4081 temp = SAVE_EXPR_RTL (exp);
4083 if (temp == 0 || GET_CODE (temp) != MEM)
4085 return change_address (temp, mode,
4086 fix_lexical_addr (XEXP (temp, 0), exp));
4088 if (SAVE_EXPR_RTL (exp) == 0)
4090 if (mode == BLKmode)
4093 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4094 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4097 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4099 SAVE_EXPR_RTL (exp) = temp;
4100 if (!optimize && GET_CODE (temp) == REG)
4101 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4104 /* If the mode of TEMP does not match that of the expression, it
4105 must be a promoted value. We pass store_expr a SUBREG of the
4106 wanted mode but mark it so that we know that it was already
4107 extended. Note that `unsignedp' was modified above in
4110 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4112 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4113 SUBREG_PROMOTED_VAR_P (temp) = 1;
4114 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4117 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4120 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4121 must be a promoted value. We return a SUBREG of the wanted mode,
4122 but mark it so that we know that it was already extended. */
4124 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4125 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4127 /* Compute the signedness and make the proper SUBREG. */
4128 promote_mode (type, mode, &unsignedp, 0);
4129 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4130 SUBREG_PROMOTED_VAR_P (temp) = 1;
4131 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4135 return SAVE_EXPR_RTL (exp);
4137 case PLACEHOLDER_EXPR:
4138 /* If there is an object on the head of the placeholder list,
4139 see if some object in it's references is of type TYPE. For
4140 further information, see tree.def. */
4141 if (placeholder_list)
4144 tree old_list = placeholder_list;
4146 for (object = TREE_PURPOSE (placeholder_list);
4147 TREE_TYPE (object) != type
4148 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4149 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4150 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4151 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4152 object = TREE_OPERAND (object, 0))
4155 if (object && TREE_TYPE (object) == type)
4157 /* Expand this object skipping the list entries before
4158 it was found in case it is also a PLACEHOLDER_EXPR.
4159 In that case, we want to translate it using subsequent
4161 placeholder_list = TREE_CHAIN (placeholder_list);
4162 temp = expand_expr (object, original_target, tmode, modifier);
4163 placeholder_list = old_list;
4168 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4171 case WITH_RECORD_EXPR:
4172 /* Put the object on the placeholder list, expand our first operand,
4173 and pop the list. */
4174 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4176 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4178 placeholder_list = TREE_CHAIN (placeholder_list);
4182 expand_exit_loop_if_false (NULL_PTR,
4183 invert_truthvalue (TREE_OPERAND (exp, 0)));
4188 expand_start_loop (1);
4189 expand_expr_stmt (TREE_OPERAND (exp, 0));
4197 tree vars = TREE_OPERAND (exp, 0);
4198 int vars_need_expansion = 0;
4200 /* Need to open a binding contour here because
4201 if there are any cleanups they most be contained here. */
4202 expand_start_bindings (0);
4204 /* Mark the corresponding BLOCK for output in its proper place. */
4205 if (TREE_OPERAND (exp, 2) != 0
4206 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4207 insert_block (TREE_OPERAND (exp, 2));
4209 /* If VARS have not yet been expanded, expand them now. */
4212 if (DECL_RTL (vars) == 0)
4214 vars_need_expansion = 1;
4217 expand_decl_init (vars);
4218 vars = TREE_CHAIN (vars);
4221 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4223 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4229 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4231 emit_insns (RTL_EXPR_SEQUENCE (exp));
4232 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4233 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4234 free_temps_for_rtl_expr (exp);
4235 return RTL_EXPR_RTL (exp);
4238 /* If we don't need the result, just ensure we evaluate any
4243 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4244 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4248 /* All elts simple constants => refer to a constant in memory. But
4249 if this is a non-BLKmode mode, let it store a field at a time
4250 since that should make a CONST_INT or CONST_DOUBLE when we
4251 fold. Likewise, if we have a target we can use, it is best to
4252 store directly into the target unless the type is large enough
4253 that memcpy will be used. If we are making an initializer and
4254 all operands are constant, put it in memory as well. */
4255 else if ((TREE_STATIC (exp)
4256 && ((mode == BLKmode
4257 && ! (target != 0 && safe_from_p (target, exp)))
4258 || TREE_ADDRESSABLE (exp)
4259 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4260 && (move_by_pieces_ninsns
4261 (TREE_INT_CST_LOW (TYPE_SIZE (type)),
4264 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4266 rtx constructor = output_constant_def (exp);
4267 if (modifier != EXPAND_CONST_ADDRESS
4268 && modifier != EXPAND_INITIALIZER
4269 && modifier != EXPAND_SUM
4270 && (! memory_address_p (GET_MODE (constructor),
4271 XEXP (constructor, 0))
4273 && GET_CODE (XEXP (constructor, 0)) != REG)))
4274 constructor = change_address (constructor, VOIDmode,
4275 XEXP (constructor, 0));
4281 if (target == 0 || ! safe_from_p (target, exp))
4283 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4284 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4288 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4289 if (AGGREGATE_TYPE_P (type))
4290 MEM_IN_STRUCT_P (target) = 1;
4293 store_constructor (exp, target);
4299 tree exp1 = TREE_OPERAND (exp, 0);
4302 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4303 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4304 This code has the same general effect as simply doing
4305 expand_expr on the save expr, except that the expression PTR
4306 is computed for use as a memory address. This means different
4307 code, suitable for indexing, may be generated. */
4308 if (TREE_CODE (exp1) == SAVE_EXPR
4309 && SAVE_EXPR_RTL (exp1) == 0
4310 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4311 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4312 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4314 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4315 VOIDmode, EXPAND_SUM);
4316 op0 = memory_address (mode, temp);
4317 op0 = copy_all_regs (op0);
4318 SAVE_EXPR_RTL (exp1) = op0;
4322 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4323 op0 = memory_address (mode, op0);
4326 temp = gen_rtx (MEM, mode, op0);
4327 /* If address was computed by addition,
4328 mark this as an element of an aggregate. */
4329 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4330 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4331 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4332 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4333 || (TREE_CODE (exp1) == ADDR_EXPR
4334 && (exp2 = TREE_OPERAND (exp1, 0))
4335 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4336 MEM_IN_STRUCT_P (temp) = 1;
4337 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4338 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4339 a location is accessed through a pointer to const does not mean
4340 that the value there can never change. */
4341 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4347 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4351 tree array = TREE_OPERAND (exp, 0);
4352 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4353 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4354 tree index = TREE_OPERAND (exp, 1);
4355 tree index_type = TREE_TYPE (index);
4358 if (TREE_CODE (low_bound) != INTEGER_CST
4359 && contains_placeholder_p (low_bound))
4360 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4362 /* Optimize the special-case of a zero lower bound.
4364 We convert the low_bound to sizetype to avoid some problems
4365 with constant folding. (E.g. suppose the lower bound is 1,
4366 and its mode is QI. Without the conversion, (ARRAY
4367 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4368 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4370 But sizetype isn't quite right either (especially if
4371 the lowbound is negative). FIXME */
4373 if (! integer_zerop (low_bound))
4374 index = fold (build (MINUS_EXPR, index_type, index,
4375 convert (sizetype, low_bound)));
4377 if ((TREE_CODE (index) != INTEGER_CST
4378 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4379 && (! STRICT_ALIGNMENT || ! get_inner_unaligned_p (exp)))
4381 /* Nonconstant array index or nonconstant element size, and
4382 not an array in an unaligned (packed) structure field.
4383 Generate the tree for *(&array+index) and expand that,
4384 except do it in a language-independent way
4385 and don't complain about non-lvalue arrays.
4386 `mark_addressable' should already have been called
4387 for any array for which this case will be reached. */
4389 /* Don't forget the const or volatile flag from the array
4391 tree variant_type = build_type_variant (type,
4392 TREE_READONLY (exp),
4393 TREE_THIS_VOLATILE (exp));
4394 tree array_adr = build1 (ADDR_EXPR,
4395 build_pointer_type (variant_type), array);
4397 tree size = size_in_bytes (type);
4399 /* Convert the integer argument to a type the same size as a
4400 pointer so the multiply won't overflow spuriously. */
4401 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4402 index = convert (type_for_size (POINTER_SIZE, 0), index);
4404 if (TREE_CODE (size) != INTEGER_CST
4405 && contains_placeholder_p (size))
4406 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4408 /* Don't think the address has side effects
4409 just because the array does.
4410 (In some cases the address might have side effects,
4411 and we fail to record that fact here. However, it should not
4412 matter, since expand_expr should not care.) */
4413 TREE_SIDE_EFFECTS (array_adr) = 0;
4415 elt = build1 (INDIRECT_REF, type,
4416 fold (build (PLUS_EXPR,
4417 TYPE_POINTER_TO (variant_type),
4419 fold (build (MULT_EXPR,
4420 TYPE_POINTER_TO (variant_type),
4423 /* Volatility, etc., of new expression is same as old
4425 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4426 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4427 TREE_READONLY (elt) = TREE_READONLY (exp);
4429 return expand_expr (elt, target, tmode, modifier);
4432 /* Fold an expression like: "foo"[2].
4433 This is not done in fold so it won't happen inside &.
4434 Don't fold if this is for wide characters since it's too
4435 difficult to do correctly and this is a very rare case. */
4437 if (TREE_CODE (array) == STRING_CST
4438 && TREE_CODE (index) == INTEGER_CST
4439 && !TREE_INT_CST_HIGH (index)
4440 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4441 && GET_MODE_CLASS (mode) == MODE_INT
4442 && GET_MODE_SIZE (mode) == 1)
4443 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4445 /* If this is a constant index into a constant array,
4446 just get the value from the array. Handle both the cases when
4447 we have an explicit constructor and when our operand is a variable
4448 that was declared const. */
4450 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4452 if (TREE_CODE (index) == INTEGER_CST
4453 && TREE_INT_CST_HIGH (index) == 0)
4455 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4457 i = TREE_INT_CST_LOW (index);
4459 elem = TREE_CHAIN (elem);
4461 return expand_expr (fold (TREE_VALUE (elem)), target,
4466 else if (optimize >= 1
4467 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4468 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4469 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4471 if (TREE_CODE (index) == INTEGER_CST
4472 && TREE_INT_CST_HIGH (index) == 0)
4474 tree init = DECL_INITIAL (array);
4476 i = TREE_INT_CST_LOW (index);
4477 if (TREE_CODE (init) == CONSTRUCTOR)
4479 tree elem = CONSTRUCTOR_ELTS (init);
4482 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4483 elem = TREE_CHAIN (elem);
4485 return expand_expr (fold (TREE_VALUE (elem)), target,
4488 else if (TREE_CODE (init) == STRING_CST
4489 && i < TREE_STRING_LENGTH (init))
4490 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4495 /* Treat array-ref with constant index as a component-ref. */
4499 /* If the operand is a CONSTRUCTOR, we can just extract the
4500 appropriate field if it is present. Don't do this if we have
4501 already written the data since we want to refer to that copy
4502 and varasm.c assumes that's what we'll do. */
4503 if (code != ARRAY_REF
4504 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4505 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4509 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4510 elt = TREE_CHAIN (elt))
4511 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4512 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4516 enum machine_mode mode1;
4521 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4522 &mode1, &unsignedp, &volatilep);
4525 /* If we got back the original object, something is wrong. Perhaps
4526 we are evaluating an expression too early. In any event, don't
4527 infinitely recurse. */
4531 /* In some cases, we will be offsetting OP0's address by a constant.
4532 So get it as a sum, if possible. If we will be using it
4533 directly in an insn, we validate it. */
4534 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4536 /* If this is a constant, put it into a register if it is a
4537 legitimate constant and memory if it isn't. */
4538 if (CONSTANT_P (op0))
4540 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4541 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4542 op0 = force_reg (mode, op0);
4544 op0 = validize_mem (force_const_mem (mode, op0));
4547 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4550 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4552 if (GET_CODE (op0) != MEM)
4554 op0 = change_address (op0, VOIDmode,
4555 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4556 force_reg (Pmode, offset_rtx)));
4557 /* If we have a variable offset, the known alignment
4558 is only that of the innermost structure containing the field.
4559 (Actually, we could sometimes do better by using the
4560 size of an element of the innermost array, but no need.) */
4561 if (TREE_CODE (exp) == COMPONENT_REF
4562 || TREE_CODE (exp) == BIT_FIELD_REF)
4563 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4567 /* Don't forget about volatility even if this is a bitfield. */
4568 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4570 op0 = copy_rtx (op0);
4571 MEM_VOLATILE_P (op0) = 1;
4574 /* In cases where an aligned union has an unaligned object
4575 as a field, we might be extracting a BLKmode value from
4576 an integer-mode (e.g., SImode) object. Handle this case
4577 by doing the extract into an object as wide as the field
4578 (which we know to be the width of a basic mode), then
4579 storing into memory, and changing the mode to BLKmode. */
4580 if (mode1 == VOIDmode
4581 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4582 && modifier != EXPAND_CONST_ADDRESS
4583 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4584 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4585 /* If the field isn't aligned enough to fetch as a memref,
4586 fetch it as a bit field. */
4587 || (STRICT_ALIGNMENT
4588 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4589 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4591 enum machine_mode ext_mode = mode;
4593 if (ext_mode == BLKmode)
4594 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4596 if (ext_mode == BLKmode)
4599 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4600 unsignedp, target, ext_mode, ext_mode,
4602 int_size_in_bytes (TREE_TYPE (tem)));
4603 if (mode == BLKmode)
4605 rtx new = assign_stack_temp (ext_mode,
4606 bitsize / BITS_PER_UNIT, 0);
4608 emit_move_insn (new, op0);
4609 op0 = copy_rtx (new);
4610 PUT_MODE (op0, BLKmode);
4611 MEM_IN_STRUCT_P (op0) = 1;
4617 /* Get a reference to just this component. */
4618 if (modifier == EXPAND_CONST_ADDRESS
4619 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4620 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4621 (bitpos / BITS_PER_UNIT)));
4623 op0 = change_address (op0, mode1,
4624 plus_constant (XEXP (op0, 0),
4625 (bitpos / BITS_PER_UNIT)));
4626 MEM_IN_STRUCT_P (op0) = 1;
4627 MEM_VOLATILE_P (op0) |= volatilep;
4628 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4631 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4632 convert_move (target, op0, unsignedp);
4638 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4639 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4640 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4641 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4642 MEM_IN_STRUCT_P (temp) = 1;
4643 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4644 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4645 a location is accessed through a pointer to const does not mean
4646 that the value there can never change. */
4647 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4652 /* Intended for a reference to a buffer of a file-object in Pascal.
4653 But it's not certain that a special tree code will really be
4654 necessary for these. INDIRECT_REF might work for them. */
4660 /* Pascal set IN expression.
4663 rlo = set_low - (set_low%bits_per_word);
4664 the_word = set [ (index - rlo)/bits_per_word ];
4665 bit_index = index % bits_per_word;
4666 bitmask = 1 << bit_index;
4667 return !!(the_word & bitmask); */
4669 tree set = TREE_OPERAND (exp, 0);
4670 tree index = TREE_OPERAND (exp, 1);
4671 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4672 tree set_type = TREE_TYPE (set);
4673 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4674 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4675 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4676 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4677 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4678 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4679 rtx setaddr = XEXP (setval, 0);
4680 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4682 rtx diff, quo, rem, addr, bit, result;
4684 preexpand_calls (exp);
4686 /* If domain is empty, answer is no. Likewise if index is constant
4687 and out of bounds. */
4688 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4689 && TREE_CODE (set_low_bound) == INTEGER_CST
4690 && tree_int_cst_lt (set_high_bound, set_low_bound)
4691 || (TREE_CODE (index) == INTEGER_CST
4692 && TREE_CODE (set_low_bound) == INTEGER_CST
4693 && tree_int_cst_lt (index, set_low_bound))
4694 || (TREE_CODE (set_high_bound) == INTEGER_CST
4695 && TREE_CODE (index) == INTEGER_CST
4696 && tree_int_cst_lt (set_high_bound, index))))
4700 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4702 /* If we get here, we have to generate the code for both cases
4703 (in range and out of range). */
4705 op0 = gen_label_rtx ();
4706 op1 = gen_label_rtx ();
4708 if (! (GET_CODE (index_val) == CONST_INT
4709 && GET_CODE (lo_r) == CONST_INT))
4711 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4712 GET_MODE (index_val), iunsignedp, 0);
4713 emit_jump_insn (gen_blt (op1));
4716 if (! (GET_CODE (index_val) == CONST_INT
4717 && GET_CODE (hi_r) == CONST_INT))
4719 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4720 GET_MODE (index_val), iunsignedp, 0);
4721 emit_jump_insn (gen_bgt (op1));
4724 /* Calculate the element number of bit zero in the first word
4726 if (GET_CODE (lo_r) == CONST_INT)
4727 rlow = GEN_INT (INTVAL (lo_r)
4728 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4730 rlow = expand_binop (index_mode, and_optab, lo_r,
4731 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4732 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4734 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4735 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4737 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4738 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4739 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4740 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4742 addr = memory_address (byte_mode,
4743 expand_binop (index_mode, add_optab, diff,
4744 setaddr, NULL_RTX, iunsignedp,
4747 /* Extract the bit we want to examine */
4748 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4749 gen_rtx (MEM, byte_mode, addr),
4750 make_tree (TREE_TYPE (index), rem),
4752 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4753 GET_MODE (target) == byte_mode ? target : 0,
4754 1, OPTAB_LIB_WIDEN);
4756 if (result != target)
4757 convert_move (target, result, 1);
4759 /* Output the code to handle the out-of-range case. */
4762 emit_move_insn (target, const0_rtx);
4767 case WITH_CLEANUP_EXPR:
4768 if (RTL_EXPR_RTL (exp) == 0)
4771 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4773 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4774 /* That's it for this cleanup. */
4775 TREE_OPERAND (exp, 2) = 0;
4776 (*interim_eh_hook) (NULL_TREE);
4778 return RTL_EXPR_RTL (exp);
4780 case CLEANUP_POINT_EXPR:
4782 extern int temp_slot_level;
4783 tree old_cleanups = cleanups_this_call;
4784 int old_temp_level = target_temp_slot_level;
4786 target_temp_slot_level = temp_slot_level;
4787 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4788 expand_cleanups_to (old_cleanups);
4789 preserve_temp_slots (op0);
4792 target_temp_slot_level = old_temp_level;
4797 /* Check for a built-in function. */
4798 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4799 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4801 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4802 return expand_builtin (exp, target, subtarget, tmode, ignore);
4804 /* If this call was expanded already by preexpand_calls,
4805 just return the result we got. */
4806 if (CALL_EXPR_RTL (exp) != 0)
4807 return CALL_EXPR_RTL (exp);
4809 return expand_call (exp, target, ignore);
4811 case NON_LVALUE_EXPR:
4814 case REFERENCE_EXPR:
4815 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4817 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
4820 /* If the signedness of the conversion differs and OP0 is
4821 a promoted SUBREG, clear that indication since we now
4822 have to do the proper extension. */
4823 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
4824 && GET_CODE (op0) == SUBREG)
4825 SUBREG_PROMOTED_VAR_P (op0) = 0;
4830 if (TREE_CODE (type) == UNION_TYPE)
4832 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4835 if (mode == BLKmode)
4837 if (TYPE_SIZE (type) == 0
4838 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4840 target = assign_stack_temp (BLKmode,
4841 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4842 + BITS_PER_UNIT - 1)
4843 / BITS_PER_UNIT, 0);
4846 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4849 if (GET_CODE (target) == MEM)
4850 /* Store data into beginning of memory target. */
4851 store_expr (TREE_OPERAND (exp, 0),
4852 change_address (target, TYPE_MODE (valtype), 0), 0);
4854 else if (GET_CODE (target) == REG)
4855 /* Store this field into a union of the proper type. */
4856 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4857 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4859 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4863 /* Return the entire union. */
4867 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4868 if (GET_MODE (op0) == mode)
4871 /* If OP0 is a constant, just convert it into the proper mode. */
4872 if (CONSTANT_P (op0))
4874 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4875 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4877 if (modifier == EXPAND_INITIALIZER)
4878 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4880 if (flag_force_mem && GET_CODE (op0) == MEM)
4881 op0 = copy_to_reg (op0);
4885 convert_to_mode (mode, op0,
4886 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4888 convert_move (target, op0,
4889 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4893 /* We come here from MINUS_EXPR when the second operand is a constant. */
4895 this_optab = add_optab;
4897 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4898 something else, make sure we add the register to the constant and
4899 then to the other thing. This case can occur during strength
4900 reduction and doing it this way will produce better code if the
4901 frame pointer or argument pointer is eliminated.
4903 fold-const.c will ensure that the constant is always in the inner
4904 PLUS_EXPR, so the only case we need to do anything about is if
4905 sp, ap, or fp is our second argument, in which case we must swap
4906 the innermost first argument and our second argument. */
4908 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4909 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4910 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4911 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4912 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4913 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4915 tree t = TREE_OPERAND (exp, 1);
4917 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4918 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4921 /* If the result is to be Pmode and we are adding an integer to
4922 something, we might be forming a constant. So try to use
4923 plus_constant. If it produces a sum and we can't accept it,
4924 use force_operand. This allows P = &ARR[const] to generate
4925 efficient code on machines where a SYMBOL_REF is not a valid
4928 If this is an EXPAND_SUM call, always return the sum. */
4929 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4932 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4933 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4934 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4936 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4938 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4939 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4940 op1 = force_operand (op1, target);
4944 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4945 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4946 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4948 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4950 if (! CONSTANT_P (op0))
4952 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4953 VOIDmode, modifier);
4954 /* Don't go to both_summands if modifier
4955 says it's not right to return a PLUS. */
4956 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4960 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4961 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4962 op0 = force_operand (op0, target);
4967 /* No sense saving up arithmetic to be done
4968 if it's all in the wrong mode to form part of an address.
4969 And force_operand won't know whether to sign-extend or
4971 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4975 preexpand_calls (exp);
4976 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4979 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4980 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4983 /* Make sure any term that's a sum with a constant comes last. */
4984 if (GET_CODE (op0) == PLUS
4985 && CONSTANT_P (XEXP (op0, 1)))
4991 /* If adding to a sum including a constant,
4992 associate it to put the constant outside. */
4993 if (GET_CODE (op1) == PLUS
4994 && CONSTANT_P (XEXP (op1, 1)))
4996 rtx constant_term = const0_rtx;
4998 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5001 /* Ensure that MULT comes first if there is one. */
5002 else if (GET_CODE (op0) == MULT)
5003 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5005 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5007 /* Let's also eliminate constants from op0 if possible. */
5008 op0 = eliminate_constant_term (op0, &constant_term);
5010 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5011 their sum should be a constant. Form it into OP1, since the
5012 result we want will then be OP0 + OP1. */
5014 temp = simplify_binary_operation (PLUS, mode, constant_term,
5019 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5022 /* Put a constant term last and put a multiplication first. */
5023 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5024 temp = op1, op1 = op0, op0 = temp;
5026 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5027 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5030 /* For initializers, we are allowed to return a MINUS of two
5031 symbolic constants. Here we handle all cases when both operands
5033 /* Handle difference of two symbolic constants,
5034 for the sake of an initializer. */
5035 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5036 && really_constant_p (TREE_OPERAND (exp, 0))
5037 && really_constant_p (TREE_OPERAND (exp, 1)))
5039 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5040 VOIDmode, modifier);
5041 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5042 VOIDmode, modifier);
5044 /* If one operand is a CONST_INT, put it last. */
5045 if (GET_CODE (op0) == CONST_INT)
5046 temp = op0, op0 = op1, op1 = temp;
5048 /* If the last operand is a CONST_INT, use plus_constant of
5049 the negated constant. Else make the MINUS. */
5050 if (GET_CODE (op1) == CONST_INT)
5051 return plus_constant (op0, - INTVAL (op1));
5053 return gen_rtx (MINUS, mode, op0, op1);
5055 /* Convert A - const to A + (-const). */
5056 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5058 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
5059 fold (build1 (NEGATE_EXPR, type,
5060 TREE_OPERAND (exp, 1))));
5063 this_optab = sub_optab;
5067 preexpand_calls (exp);
5068 /* If first operand is constant, swap them.
5069 Thus the following special case checks need only
5070 check the second operand. */
5071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5073 register tree t1 = TREE_OPERAND (exp, 0);
5074 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5075 TREE_OPERAND (exp, 1) = t1;
5078 /* Attempt to return something suitable for generating an
5079 indexed address, for machines that support that. */
5081 if (modifier == EXPAND_SUM && mode == Pmode
5082 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5083 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5085 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5087 /* Apply distributive law if OP0 is x+c. */
5088 if (GET_CODE (op0) == PLUS
5089 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5090 return gen_rtx (PLUS, mode,
5091 gen_rtx (MULT, mode, XEXP (op0, 0),
5092 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5093 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5094 * INTVAL (XEXP (op0, 1))));
5096 if (GET_CODE (op0) != REG)
5097 op0 = force_operand (op0, NULL_RTX);
5098 if (GET_CODE (op0) != REG)
5099 op0 = copy_to_mode_reg (mode, op0);
5101 return gen_rtx (MULT, mode, op0,
5102 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5105 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5108 /* Check for multiplying things that have been extended
5109 from a narrower type. If this machine supports multiplying
5110 in that narrower type with a result in the desired type,
5111 do it that way, and avoid the explicit type-conversion. */
5112 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5113 && TREE_CODE (type) == INTEGER_TYPE
5114 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5116 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5117 && int_fits_type_p (TREE_OPERAND (exp, 1),
5118 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5119 /* Don't use a widening multiply if a shift will do. */
5120 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5121 > HOST_BITS_PER_WIDE_INT)
5122 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5124 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5125 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5127 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5128 /* If both operands are extended, they must either both
5129 be zero-extended or both be sign-extended. */
5130 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5132 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5134 enum machine_mode innermode
5135 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5136 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5137 ? umul_widen_optab : smul_widen_optab);
5138 if (mode == GET_MODE_WIDER_MODE (innermode)
5139 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5141 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5142 NULL_RTX, VOIDmode, 0);
5143 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5144 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5147 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5148 NULL_RTX, VOIDmode, 0);
5152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5153 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5154 return expand_mult (mode, op0, op1, target, unsignedp);
5156 case TRUNC_DIV_EXPR:
5157 case FLOOR_DIV_EXPR:
5159 case ROUND_DIV_EXPR:
5160 case EXACT_DIV_EXPR:
5161 preexpand_calls (exp);
5162 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5164 /* Possible optimization: compute the dividend with EXPAND_SUM
5165 then if the divisor is constant can optimize the case
5166 where some terms of the dividend have coeffs divisible by it. */
5167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5168 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5169 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5172 this_optab = flodiv_optab;
5175 case TRUNC_MOD_EXPR:
5176 case FLOOR_MOD_EXPR:
5178 case ROUND_MOD_EXPR:
5179 preexpand_calls (exp);
5180 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5182 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5183 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5184 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5186 case FIX_ROUND_EXPR:
5187 case FIX_FLOOR_EXPR:
5189 abort (); /* Not used for C. */
5191 case FIX_TRUNC_EXPR:
5192 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5194 target = gen_reg_rtx (mode);
5195 expand_fix (target, op0, unsignedp);
5199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5201 target = gen_reg_rtx (mode);
5202 /* expand_float can't figure out what to do if FROM has VOIDmode.
5203 So give it the correct mode. With -O, cse will optimize this. */
5204 if (GET_MODE (op0) == VOIDmode)
5205 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5207 expand_float (target, op0,
5208 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5213 temp = expand_unop (mode, neg_optab, op0, target, 0);
5219 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5221 /* Handle complex values specially. */
5222 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5223 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5224 return expand_complex_abs (mode, op0, target, unsignedp);
5226 /* Unsigned abs is simply the operand. Testing here means we don't
5227 risk generating incorrect code below. */
5228 if (TREE_UNSIGNED (type))
5231 return expand_abs (mode, op0, target, unsignedp,
5232 safe_from_p (target, TREE_OPERAND (exp, 0)));
5236 target = original_target;
5237 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5238 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5239 || GET_MODE (target) != mode
5240 || (GET_CODE (target) == REG
5241 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5242 target = gen_reg_rtx (mode);
5243 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5244 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5246 /* First try to do it with a special MIN or MAX instruction.
5247 If that does not win, use a conditional jump to select the proper
5249 this_optab = (TREE_UNSIGNED (type)
5250 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5251 : (code == MIN_EXPR ? smin_optab : smax_optab));
5253 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5259 emit_move_insn (target, op0);
5261 op0 = gen_label_rtx ();
5263 /* If this mode is an integer too wide to compare properly,
5264 compare word by word. Rely on cse to optimize constant cases. */
5265 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5267 if (code == MAX_EXPR)
5268 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5269 target, op1, NULL_RTX, op0);
5271 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5272 op1, target, NULL_RTX, op0);
5273 emit_move_insn (target, op1);
5277 if (code == MAX_EXPR)
5278 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5279 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5280 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5282 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5283 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5284 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5285 if (temp == const0_rtx)
5286 emit_move_insn (target, op1);
5287 else if (temp != const_true_rtx)
5289 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5290 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5293 emit_move_insn (target, op1);
5300 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5301 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5307 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5308 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5313 /* ??? Can optimize bitwise operations with one arg constant.
5314 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5315 and (a bitwise1 b) bitwise2 b (etc)
5316 but that is probably not worth while. */
5318 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5319 boolean values when we want in all cases to compute both of them. In
5320 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5321 as actual zero-or-1 values and then bitwise anding. In cases where
5322 there cannot be any side effects, better code would be made by
5323 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5324 how to recognize those cases. */
5326 case TRUTH_AND_EXPR:
5328 this_optab = and_optab;
5333 this_optab = ior_optab;
5336 case TRUTH_XOR_EXPR:
5338 this_optab = xor_optab;
5345 preexpand_calls (exp);
5346 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5348 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5349 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5352 /* Could determine the answer when only additive constants differ. Also,
5353 the addition of one can be handled by changing the condition. */
5360 preexpand_calls (exp);
5361 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5365 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5366 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5368 && GET_CODE (original_target) == REG
5369 && (GET_MODE (original_target)
5370 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5372 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5375 if (temp != original_target)
5376 temp = copy_to_reg (temp);
5378 op1 = gen_label_rtx ();
5379 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5380 GET_MODE (temp), unsignedp, 0);
5381 emit_jump_insn (gen_beq (op1));
5382 emit_move_insn (temp, const1_rtx);
5387 /* If no set-flag instruction, must generate a conditional
5388 store into a temporary variable. Drop through
5389 and handle this like && and ||. */
5391 case TRUTH_ANDIF_EXPR:
5392 case TRUTH_ORIF_EXPR:
5394 && (target == 0 || ! safe_from_p (target, exp)
5395 /* Make sure we don't have a hard reg (such as function's return
5396 value) live across basic blocks, if not optimizing. */
5397 || (!optimize && GET_CODE (target) == REG
5398 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5399 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5402 emit_clr_insn (target);
5404 op1 = gen_label_rtx ();
5405 jumpifnot (exp, op1);
5408 emit_0_to_1_insn (target);
5411 return ignore ? const0_rtx : target;
5413 case TRUTH_NOT_EXPR:
5414 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5415 /* The parser is careful to generate TRUTH_NOT_EXPR
5416 only with operands that are always zero or one. */
5417 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5418 target, 1, OPTAB_LIB_WIDEN);
5424 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5426 return expand_expr (TREE_OPERAND (exp, 1),
5427 (ignore ? const0_rtx : target),
5432 rtx flag = NULL_RTX;
5433 tree left_cleanups = NULL_TREE;
5434 tree right_cleanups = NULL_TREE;
5436 /* Used to save a pointer to the place to put the setting of
5437 the flag that indicates if this side of the conditional was
5438 taken. We backpatch the code, if we find out later that we
5439 have any conditional cleanups that need to be performed. */
5440 rtx dest_right_flag = NULL_RTX;
5441 rtx dest_left_flag = NULL_RTX;
5443 /* Note that COND_EXPRs whose type is a structure or union
5444 are required to be constructed to contain assignments of
5445 a temporary variable, so that we can evaluate them here
5446 for side effect only. If type is void, we must do likewise. */
5448 /* If an arm of the branch requires a cleanup,
5449 only that cleanup is performed. */
5452 tree binary_op = 0, unary_op = 0;
5453 tree old_cleanups = cleanups_this_call;
5455 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5456 convert it to our mode, if necessary. */
5457 if (integer_onep (TREE_OPERAND (exp, 1))
5458 && integer_zerop (TREE_OPERAND (exp, 2))
5459 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5463 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5468 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5469 if (GET_MODE (op0) == mode)
5473 target = gen_reg_rtx (mode);
5474 convert_move (target, op0, unsignedp);
5478 /* If we are not to produce a result, we have no target. Otherwise,
5479 if a target was specified use it; it will not be used as an
5480 intermediate target unless it is safe. If no target, use a
5485 else if (original_target
5486 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5487 && GET_MODE (original_target) == mode
5488 && ! (GET_CODE (original_target) == MEM
5489 && MEM_VOLATILE_P (original_target)))
5490 temp = original_target;
5491 else if (mode == BLKmode)
5493 if (TYPE_SIZE (type) == 0
5494 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5497 temp = assign_stack_temp (BLKmode,
5498 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5499 + BITS_PER_UNIT - 1)
5500 / BITS_PER_UNIT, 0);
5501 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5504 temp = gen_reg_rtx (mode);
5506 /* Check for X ? A + B : A. If we have this, we can copy
5507 A to the output and conditionally add B. Similarly for unary
5508 operations. Don't do this if X has side-effects because
5509 those side effects might affect A or B and the "?" operation is
5510 a sequence point in ANSI. (We test for side effects later.) */
5512 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5513 && operand_equal_p (TREE_OPERAND (exp, 2),
5514 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5515 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5516 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5517 && operand_equal_p (TREE_OPERAND (exp, 1),
5518 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5519 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5520 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5521 && operand_equal_p (TREE_OPERAND (exp, 2),
5522 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5523 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5524 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5525 && operand_equal_p (TREE_OPERAND (exp, 1),
5526 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5527 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5529 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5530 operation, do this as A + (X != 0). Similarly for other simple
5531 binary operators. */
5532 if (temp && singleton && binary_op
5533 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5534 && (TREE_CODE (binary_op) == PLUS_EXPR
5535 || TREE_CODE (binary_op) == MINUS_EXPR
5536 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5537 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5538 && integer_onep (TREE_OPERAND (binary_op, 1))
5539 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5542 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5543 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5544 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5547 /* If we had X ? A : A + 1, do this as A + (X == 0).
5549 We have to invert the truth value here and then put it
5550 back later if do_store_flag fails. We cannot simply copy
5551 TREE_OPERAND (exp, 0) to another variable and modify that
5552 because invert_truthvalue can modify the tree pointed to
5554 if (singleton == TREE_OPERAND (exp, 1))
5555 TREE_OPERAND (exp, 0)
5556 = invert_truthvalue (TREE_OPERAND (exp, 0));
5558 result = do_store_flag (TREE_OPERAND (exp, 0),
5559 (safe_from_p (temp, singleton)
5561 mode, BRANCH_COST <= 1);
5565 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5566 return expand_binop (mode, boptab, op1, result, temp,
5567 unsignedp, OPTAB_LIB_WIDEN);
5569 else if (singleton == TREE_OPERAND (exp, 1))
5570 TREE_OPERAND (exp, 0)
5571 = invert_truthvalue (TREE_OPERAND (exp, 0));
5575 op0 = gen_label_rtx ();
5577 flag = gen_reg_rtx (word_mode);
5578 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5582 /* If the target conflicts with the other operand of the
5583 binary op, we can't use it. Also, we can't use the target
5584 if it is a hard register, because evaluating the condition
5585 might clobber it. */
5587 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5588 || (GET_CODE (temp) == REG
5589 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5590 temp = gen_reg_rtx (mode);
5591 store_expr (singleton, temp, 0);
5594 expand_expr (singleton,
5595 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5596 dest_left_flag = get_last_insn ();
5597 if (singleton == TREE_OPERAND (exp, 1))
5598 jumpif (TREE_OPERAND (exp, 0), op0);
5600 jumpifnot (TREE_OPERAND (exp, 0), op0);
5602 /* Allows cleanups up to here. */
5603 old_cleanups = cleanups_this_call;
5604 if (binary_op && temp == 0)
5605 /* Just touch the other operand. */
5606 expand_expr (TREE_OPERAND (binary_op, 1),
5607 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5609 store_expr (build (TREE_CODE (binary_op), type,
5610 make_tree (type, temp),
5611 TREE_OPERAND (binary_op, 1)),
5614 store_expr (build1 (TREE_CODE (unary_op), type,
5615 make_tree (type, temp)),
5618 dest_right_flag = get_last_insn ();
5621 /* This is now done in jump.c and is better done there because it
5622 produces shorter register lifetimes. */
5624 /* Check for both possibilities either constants or variables
5625 in registers (but not the same as the target!). If so, can
5626 save branches by assigning one, branching, and assigning the
5628 else if (temp && GET_MODE (temp) != BLKmode
5629 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5630 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5631 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5632 && DECL_RTL (TREE_OPERAND (exp, 1))
5633 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5634 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5635 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5636 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5637 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5638 && DECL_RTL (TREE_OPERAND (exp, 2))
5639 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5640 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5642 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5643 temp = gen_reg_rtx (mode);
5644 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5645 dest_left_flag = get_last_insn ();
5646 jumpifnot (TREE_OPERAND (exp, 0), op0);
5648 /* Allows cleanups up to here. */
5649 old_cleanups = cleanups_this_call;
5650 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5652 dest_right_flag = get_last_insn ();
5655 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5656 comparison operator. If we have one of these cases, set the
5657 output to A, branch on A (cse will merge these two references),
5658 then set the output to FOO. */
5660 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5661 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5662 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5663 TREE_OPERAND (exp, 1), 0)
5664 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5665 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5667 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5668 temp = gen_reg_rtx (mode);
5669 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5670 dest_left_flag = get_last_insn ();
5671 jumpif (TREE_OPERAND (exp, 0), op0);
5673 /* Allows cleanups up to here. */
5674 old_cleanups = cleanups_this_call;
5675 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5677 dest_right_flag = get_last_insn ();
5680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5683 TREE_OPERAND (exp, 2), 0)
5684 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5685 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5687 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5688 temp = gen_reg_rtx (mode);
5689 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5690 dest_left_flag = get_last_insn ();
5691 jumpifnot (TREE_OPERAND (exp, 0), op0);
5693 /* Allows cleanups up to here. */
5694 old_cleanups = cleanups_this_call;
5695 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5697 dest_right_flag = get_last_insn ();
5701 op1 = gen_label_rtx ();
5702 jumpifnot (TREE_OPERAND (exp, 0), op0);
5704 /* Allows cleanups up to here. */
5705 old_cleanups = cleanups_this_call;
5707 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5709 expand_expr (TREE_OPERAND (exp, 1),
5710 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5711 dest_left_flag = get_last_insn ();
5713 /* Handle conditional cleanups, if any. */
5714 left_cleanups = defer_cleanups_to (old_cleanups);
5717 emit_jump_insn (gen_jump (op1));
5721 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5723 expand_expr (TREE_OPERAND (exp, 2),
5724 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5725 dest_right_flag = get_last_insn ();
5728 /* Handle conditional cleanups, if any. */
5729 right_cleanups = defer_cleanups_to (old_cleanups);
5735 /* Add back in, any conditional cleanups. */
5736 if (left_cleanups || right_cleanups)
5742 /* Now that we know that a flag is needed, go back and add in the
5743 setting of the flag. */
5745 /* Do the left side flag. */
5746 last = get_last_insn ();
5747 /* Flag left cleanups as needed. */
5748 emit_move_insn (flag, const1_rtx);
5749 /* ??? deprecated, use sequences instead. */
5750 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
5752 /* Do the right side flag. */
5753 last = get_last_insn ();
5754 /* Flag left cleanups as needed. */
5755 emit_move_insn (flag, const0_rtx);
5756 /* ??? deprecated, use sequences instead. */
5757 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
5759 /* convert flag, which is an rtx, into a tree. */
5760 cond = make_node (RTL_EXPR);
5761 TREE_TYPE (cond) = integer_type_node;
5762 RTL_EXPR_RTL (cond) = flag;
5763 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
5765 if (! left_cleanups)
5766 left_cleanups = integer_zero_node;
5767 if (! right_cleanups)
5768 right_cleanups = integer_zero_node;
5769 new_cleanups = build (COND_EXPR, void_type_node,
5770 truthvalue_conversion (cond),
5771 left_cleanups, right_cleanups);
5772 new_cleanups = fold (new_cleanups);
5774 /* Now add in the conditionalized cleanups. */
5776 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
5777 (*interim_eh_hook) (NULL_TREE);
5784 int need_exception_region = 0;
5785 /* Something needs to be initialized, but we didn't know
5786 where that thing was when building the tree. For example,
5787 it could be the return value of a function, or a parameter
5788 to a function which lays down in the stack, or a temporary
5789 variable which must be passed by reference.
5791 We guarantee that the expression will either be constructed
5792 or copied into our original target. */
5794 tree slot = TREE_OPERAND (exp, 0);
5798 if (TREE_CODE (slot) != VAR_DECL)
5803 if (DECL_RTL (slot) != 0)
5805 target = DECL_RTL (slot);
5806 /* If we have already expanded the slot, so don't do
5808 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5813 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
5814 /* All temp slots at this level must not conflict. */
5815 preserve_temp_slots (target);
5816 DECL_RTL (slot) = target;
5818 /* Since SLOT is not known to the called function
5819 to belong to its stack frame, we must build an explicit
5820 cleanup. This case occurs when we must build up a reference
5821 to pass the reference as an argument. In this case,
5822 it is very likely that such a reference need not be
5825 if (TREE_OPERAND (exp, 2) == 0)
5826 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5827 if (TREE_OPERAND (exp, 2))
5829 cleanups_this_call = tree_cons (NULL_TREE,
5830 TREE_OPERAND (exp, 2),
5831 cleanups_this_call);
5832 need_exception_region = 1;
5838 /* This case does occur, when expanding a parameter which
5839 needs to be constructed on the stack. The target
5840 is the actual stack address that we want to initialize.
5841 The function we call will perform the cleanup in this case. */
5843 /* If we have already assigned it space, use that space,
5844 not target that we were passed in, as our target
5845 parameter is only a hint. */
5846 if (DECL_RTL (slot) != 0)
5848 target = DECL_RTL (slot);
5849 /* If we have already expanded the slot, so don't do
5851 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5855 DECL_RTL (slot) = target;
5858 exp1 = TREE_OPERAND (exp, 1);
5859 /* Mark it as expanded. */
5860 TREE_OPERAND (exp, 1) = NULL_TREE;
5862 temp = expand_expr (exp1, target, tmode, modifier);
5864 if (need_exception_region)
5865 (*interim_eh_hook) (NULL_TREE);
5872 tree lhs = TREE_OPERAND (exp, 0);
5873 tree rhs = TREE_OPERAND (exp, 1);
5874 tree noncopied_parts = 0;
5875 tree lhs_type = TREE_TYPE (lhs);
5877 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5878 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5879 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5880 TYPE_NONCOPIED_PARTS (lhs_type));
5881 while (noncopied_parts != 0)
5883 expand_assignment (TREE_VALUE (noncopied_parts),
5884 TREE_PURPOSE (noncopied_parts), 0, 0);
5885 noncopied_parts = TREE_CHAIN (noncopied_parts);
5892 /* If lhs is complex, expand calls in rhs before computing it.
5893 That's so we don't compute a pointer and save it over a call.
5894 If lhs is simple, compute it first so we can give it as a
5895 target if the rhs is just a call. This avoids an extra temp and copy
5896 and that prevents a partial-subsumption which makes bad code.
5897 Actually we could treat component_ref's of vars like vars. */
5899 tree lhs = TREE_OPERAND (exp, 0);
5900 tree rhs = TREE_OPERAND (exp, 1);
5901 tree noncopied_parts = 0;
5902 tree lhs_type = TREE_TYPE (lhs);
5906 if (TREE_CODE (lhs) != VAR_DECL
5907 && TREE_CODE (lhs) != RESULT_DECL
5908 && TREE_CODE (lhs) != PARM_DECL)
5909 preexpand_calls (exp);
5911 /* Check for |= or &= of a bitfield of size one into another bitfield
5912 of size 1. In this case, (unless we need the result of the
5913 assignment) we can do this more efficiently with a
5914 test followed by an assignment, if necessary.
5916 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5917 things change so we do, this code should be enhanced to
5920 && TREE_CODE (lhs) == COMPONENT_REF
5921 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5922 || TREE_CODE (rhs) == BIT_AND_EXPR)
5923 && TREE_OPERAND (rhs, 0) == lhs
5924 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5925 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5926 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5928 rtx label = gen_label_rtx ();
5930 do_jump (TREE_OPERAND (rhs, 1),
5931 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5932 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5933 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5934 (TREE_CODE (rhs) == BIT_IOR_EXPR
5936 : integer_zero_node)),
5938 do_pending_stack_adjust ();
5943 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5944 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5945 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5946 TYPE_NONCOPIED_PARTS (lhs_type));
5948 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5949 while (noncopied_parts != 0)
5951 expand_assignment (TREE_PURPOSE (noncopied_parts),
5952 TREE_VALUE (noncopied_parts), 0, 0);
5953 noncopied_parts = TREE_CHAIN (noncopied_parts);
5958 case PREINCREMENT_EXPR:
5959 case PREDECREMENT_EXPR:
5960 return expand_increment (exp, 0);
5962 case POSTINCREMENT_EXPR:
5963 case POSTDECREMENT_EXPR:
5964 /* Faster to treat as pre-increment if result is not used. */
5965 return expand_increment (exp, ! ignore);
5968 /* If nonzero, TEMP will be set to the address of something that might
5969 be a MEM corresponding to a stack slot. */
5972 /* Are we taking the address of a nested function? */
5973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5974 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5976 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5977 op0 = force_operand (op0, target);
5979 /* If we are taking the address of something erroneous, just
5981 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
5985 /* We make sure to pass const0_rtx down if we came in with
5986 ignore set, to avoid doing the cleanups twice for something. */
5987 op0 = expand_expr (TREE_OPERAND (exp, 0),
5988 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5989 (modifier == EXPAND_INITIALIZER
5990 ? modifier : EXPAND_CONST_ADDRESS));
5992 /* If we are going to ignore the result, OP0 will have been set
5993 to const0_rtx, so just return it. Don't get confused and
5994 think we are taking the address of the constant. */
5998 /* We would like the object in memory. If it is a constant,
5999 we can have it be statically allocated into memory. For
6000 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6001 memory and store the value into it. */
6003 if (CONSTANT_P (op0))
6004 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6006 else if (GET_CODE (op0) == MEM)
6008 mark_temp_addr_taken (op0);
6009 temp = XEXP (op0, 0);
6012 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6013 || GET_CODE (op0) == CONCAT)
6015 /* If this object is in a register, it must be not
6017 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6018 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6020 = assign_stack_temp (inner_mode,
6021 int_size_in_bytes (inner_type), 1);
6023 mark_temp_addr_taken (memloc);
6024 emit_move_insn (memloc, op0);
6028 if (GET_CODE (op0) != MEM)
6031 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6032 return XEXP (op0, 0);
6034 op0 = force_operand (XEXP (op0, 0), target);
6037 if (flag_force_addr && GET_CODE (op0) != REG)
6038 op0 = force_reg (Pmode, op0);
6040 if (GET_CODE (op0) == REG)
6041 mark_reg_pointer (op0);
6043 /* If we might have had a temp slot, add an equivalent address
6046 update_temp_slot_address (temp, op0);
6050 case ENTRY_VALUE_EXPR:
6053 /* COMPLEX type for Extended Pascal & Fortran */
6056 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6059 /* Get the rtx code of the operands. */
6060 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6061 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6064 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6068 /* Move the real (op0) and imaginary (op1) parts to their location. */
6069 emit_move_insn (gen_realpart (mode, target), op0);
6070 emit_move_insn (gen_imagpart (mode, target), op1);
6072 insns = get_insns ();
6075 /* Complex construction should appear as a single unit. */
6076 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6077 each with a separate pseudo as destination.
6078 It's not correct for flow to treat them as a unit. */
6079 if (GET_CODE (target) != CONCAT)
6080 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6088 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6089 return gen_realpart (mode, op0);
6092 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6093 return gen_imagpart (mode, op0);
6097 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6101 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6104 target = gen_reg_rtx (mode);
6108 /* Store the realpart and the negated imagpart to target. */
6109 emit_move_insn (gen_realpart (partmode, target),
6110 gen_realpart (partmode, op0));
6112 imag_t = gen_imagpart (partmode, target);
6113 temp = expand_unop (partmode, neg_optab,
6114 gen_imagpart (partmode, op0), imag_t, 0);
6116 emit_move_insn (imag_t, temp);
6118 insns = get_insns ();
6121 /* Conjugate should appear as a single unit
6122 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6123 each with a separate pseudo as destination.
6124 It's not correct for flow to treat them as a unit. */
6125 if (GET_CODE (target) != CONCAT)
6126 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6134 op0 = CONST0_RTX (tmode);
6140 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6143 /* Here to do an ordinary binary operator, generating an instruction
6144 from the optab already placed in `this_optab'. */
6146 preexpand_calls (exp);
6147 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6150 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6152 temp = expand_binop (mode, this_optab, op0, op1, target,
6153 unsignedp, OPTAB_LIB_WIDEN);
6160 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6162 bc_expand_expr (exp)
6165 enum tree_code code;
6168 struct binary_operator *binoptab;
6169 struct unary_operator *unoptab;
6170 struct increment_operator *incroptab;
6171 struct bc_label *lab, *lab1;
6172 enum bytecode_opcode opcode;
6175 code = TREE_CODE (exp);
6181 if (DECL_RTL (exp) == 0)
6183 error_with_decl (exp, "prior parameter's size depends on `%s'");
6187 bc_load_parmaddr (DECL_RTL (exp));
6188 bc_load_memory (TREE_TYPE (exp), exp);
6194 if (DECL_RTL (exp) == 0)
6198 if (BYTECODE_LABEL (DECL_RTL (exp)))
6199 bc_load_externaddr (DECL_RTL (exp));
6201 bc_load_localaddr (DECL_RTL (exp));
6203 if (TREE_PUBLIC (exp))
6204 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6205 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6207 bc_load_localaddr (DECL_RTL (exp));
6209 bc_load_memory (TREE_TYPE (exp), exp);
6214 #ifdef DEBUG_PRINT_CODE
6215 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6217 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6219 : TYPE_MODE (TREE_TYPE (exp)))],
6220 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6226 #ifdef DEBUG_PRINT_CODE
6227 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6229 /* FIX THIS: find a better way to pass real_cst's. -bson */
6230 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6231 (double) TREE_REAL_CST (exp));
6240 /* We build a call description vector describing the type of
6241 the return value and of the arguments; this call vector,
6242 together with a pointer to a location for the return value
6243 and the base of the argument list, is passed to the low
6244 level machine dependent call subroutine, which is responsible
6245 for putting the arguments wherever real functions expect
6246 them, as well as getting the return value back. */
6248 tree calldesc = 0, arg;
6252 /* Push the evaluated args on the evaluation stack in reverse
6253 order. Also make an entry for each arg in the calldesc
6254 vector while we're at it. */
6256 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6258 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6261 bc_expand_expr (TREE_VALUE (arg));
6263 calldesc = tree_cons ((tree) 0,
6264 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6266 calldesc = tree_cons ((tree) 0,
6267 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6271 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6273 /* Allocate a location for the return value and push its
6274 address on the evaluation stack. Also make an entry
6275 at the front of the calldesc for the return value type. */
6277 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6278 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6279 bc_load_localaddr (retval);
6281 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6282 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6284 /* Prepend the argument count. */
6285 calldesc = tree_cons ((tree) 0,
6286 build_int_2 (nargs, 0),
6289 /* Push the address of the call description vector on the stack. */
6290 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6291 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6292 build_index_type (build_int_2 (nargs * 2, 0)));
6293 r = output_constant_def (calldesc);
6294 bc_load_externaddr (r);
6296 /* Push the address of the function to be called. */
6297 bc_expand_expr (TREE_OPERAND (exp, 0));
6299 /* Call the function, popping its address and the calldesc vector
6300 address off the evaluation stack in the process. */
6301 bc_emit_instruction (call);
6303 /* Pop the arguments off the stack. */
6304 bc_adjust_stack (nargs);
6306 /* Load the return value onto the stack. */
6307 bc_load_localaddr (retval);
6308 bc_load_memory (type, TREE_OPERAND (exp, 0));
6314 if (!SAVE_EXPR_RTL (exp))
6316 /* First time around: copy to local variable */
6317 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6318 TYPE_ALIGN (TREE_TYPE(exp)));
6319 bc_expand_expr (TREE_OPERAND (exp, 0));
6320 bc_emit_instruction (duplicate);
6322 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6323 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6327 /* Consecutive reference: use saved copy */
6328 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6329 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6334 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6335 how are they handled instead? */
6338 TREE_USED (exp) = 1;
6339 bc_expand_expr (STMT_BODY (exp));
6346 bc_expand_expr (TREE_OPERAND (exp, 0));
6347 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6352 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6357 bc_expand_address (TREE_OPERAND (exp, 0));
6362 bc_expand_expr (TREE_OPERAND (exp, 0));
6363 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6368 bc_expand_expr (bc_canonicalize_array_ref (exp));
6373 bc_expand_component_address (exp);
6375 /* If we have a bitfield, generate a proper load */
6376 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6381 bc_expand_expr (TREE_OPERAND (exp, 0));
6382 bc_emit_instruction (drop);
6383 bc_expand_expr (TREE_OPERAND (exp, 1));
6388 bc_expand_expr (TREE_OPERAND (exp, 0));
6389 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6390 lab = bc_get_bytecode_label ();
6391 bc_emit_bytecode (xjumpifnot);
6392 bc_emit_bytecode_labelref (lab);
6394 #ifdef DEBUG_PRINT_CODE
6395 fputc ('\n', stderr);
6397 bc_expand_expr (TREE_OPERAND (exp, 1));
6398 lab1 = bc_get_bytecode_label ();
6399 bc_emit_bytecode (jump);
6400 bc_emit_bytecode_labelref (lab1);
6402 #ifdef DEBUG_PRINT_CODE
6403 fputc ('\n', stderr);
6406 bc_emit_bytecode_labeldef (lab);
6407 bc_expand_expr (TREE_OPERAND (exp, 2));
6408 bc_emit_bytecode_labeldef (lab1);
6411 case TRUTH_ANDIF_EXPR:
6413 opcode = xjumpifnot;
6416 case TRUTH_ORIF_EXPR:
6423 binoptab = optab_plus_expr;
6428 binoptab = optab_minus_expr;
6433 binoptab = optab_mult_expr;
6436 case TRUNC_DIV_EXPR:
6437 case FLOOR_DIV_EXPR:
6439 case ROUND_DIV_EXPR:
6440 case EXACT_DIV_EXPR:
6442 binoptab = optab_trunc_div_expr;
6445 case TRUNC_MOD_EXPR:
6446 case FLOOR_MOD_EXPR:
6448 case ROUND_MOD_EXPR:
6450 binoptab = optab_trunc_mod_expr;
6453 case FIX_ROUND_EXPR:
6454 case FIX_FLOOR_EXPR:
6456 abort (); /* Not used for C. */
6458 case FIX_TRUNC_EXPR:
6465 abort (); /* FIXME */
6469 binoptab = optab_rdiv_expr;
6474 binoptab = optab_bit_and_expr;
6479 binoptab = optab_bit_ior_expr;
6484 binoptab = optab_bit_xor_expr;
6489 binoptab = optab_lshift_expr;
6494 binoptab = optab_rshift_expr;
6497 case TRUTH_AND_EXPR:
6499 binoptab = optab_truth_and_expr;
6504 binoptab = optab_truth_or_expr;
6509 binoptab = optab_lt_expr;
6514 binoptab = optab_le_expr;
6519 binoptab = optab_ge_expr;
6524 binoptab = optab_gt_expr;
6529 binoptab = optab_eq_expr;
6534 binoptab = optab_ne_expr;
6539 unoptab = optab_negate_expr;
6544 unoptab = optab_bit_not_expr;
6547 case TRUTH_NOT_EXPR:
6549 unoptab = optab_truth_not_expr;
6552 case PREDECREMENT_EXPR:
6554 incroptab = optab_predecrement_expr;
6557 case PREINCREMENT_EXPR:
6559 incroptab = optab_preincrement_expr;
6562 case POSTDECREMENT_EXPR:
6564 incroptab = optab_postdecrement_expr;
6567 case POSTINCREMENT_EXPR:
6569 incroptab = optab_postincrement_expr;
6574 bc_expand_constructor (exp);
6584 tree vars = TREE_OPERAND (exp, 0);
6585 int vars_need_expansion = 0;
6587 /* Need to open a binding contour here because
6588 if there are any cleanups they most be contained here. */
6589 expand_start_bindings (0);
6591 /* Mark the corresponding BLOCK for output. */
6592 if (TREE_OPERAND (exp, 2) != 0)
6593 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6595 /* If VARS have not yet been expanded, expand them now. */
6598 if (DECL_RTL (vars) == 0)
6600 vars_need_expansion = 1;
6603 expand_decl_init (vars);
6604 vars = TREE_CHAIN (vars);
6607 bc_expand_expr (TREE_OPERAND (exp, 1));
6609 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6619 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6620 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6626 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6632 bc_expand_expr (TREE_OPERAND (exp, 0));
6633 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6634 lab = bc_get_bytecode_label ();
6636 bc_emit_instruction (duplicate);
6637 bc_emit_bytecode (opcode);
6638 bc_emit_bytecode_labelref (lab);
6640 #ifdef DEBUG_PRINT_CODE
6641 fputc ('\n', stderr);
6644 bc_emit_instruction (drop);
6646 bc_expand_expr (TREE_OPERAND (exp, 1));
6647 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6648 bc_emit_bytecode_labeldef (lab);
6654 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6656 /* Push the quantum. */
6657 bc_expand_expr (TREE_OPERAND (exp, 1));
6659 /* Convert it to the lvalue's type. */
6660 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6662 /* Push the address of the lvalue */
6663 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6665 /* Perform actual increment */
6666 bc_expand_increment (incroptab, type);
6670 /* Return the alignment in bits of EXP, a pointer valued expression.
6671 But don't return more than MAX_ALIGN no matter what.
6672 The alignment returned is, by default, the alignment of the thing that
6673 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6675 Otherwise, look at the expression to see if we can do better, i.e., if the
6676 expression is actually pointing at an object whose alignment is tighter. */
6679 get_pointer_alignment (exp, max_align)
6683 unsigned align, inner;
6685 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6688 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6689 align = MIN (align, max_align);
6693 switch (TREE_CODE (exp))
6697 case NON_LVALUE_EXPR:
6698 exp = TREE_OPERAND (exp, 0);
6699 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6701 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6702 align = MIN (inner, max_align);
6706 /* If sum of pointer + int, restrict our maximum alignment to that
6707 imposed by the integer. If not, we can't do any better than
6709 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6712 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6717 exp = TREE_OPERAND (exp, 0);
6721 /* See what we are pointing at and look at its alignment. */
6722 exp = TREE_OPERAND (exp, 0);
6723 if (TREE_CODE (exp) == FUNCTION_DECL)
6724 align = FUNCTION_BOUNDARY;
6725 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6726 align = DECL_ALIGN (exp);
6727 #ifdef CONSTANT_ALIGNMENT
6728 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6729 align = CONSTANT_ALIGNMENT (exp, align);
6731 return MIN (align, max_align);
6739 /* Return the tree node and offset if a given argument corresponds to
6740 a string constant. */
6743 string_constant (arg, ptr_offset)
6749 if (TREE_CODE (arg) == ADDR_EXPR
6750 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6752 *ptr_offset = integer_zero_node;
6753 return TREE_OPERAND (arg, 0);
6755 else if (TREE_CODE (arg) == PLUS_EXPR)
6757 tree arg0 = TREE_OPERAND (arg, 0);
6758 tree arg1 = TREE_OPERAND (arg, 1);
6763 if (TREE_CODE (arg0) == ADDR_EXPR
6764 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6767 return TREE_OPERAND (arg0, 0);
6769 else if (TREE_CODE (arg1) == ADDR_EXPR
6770 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6773 return TREE_OPERAND (arg1, 0);
6780 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6781 way, because it could contain a zero byte in the middle.
6782 TREE_STRING_LENGTH is the size of the character array, not the string.
6784 Unfortunately, string_constant can't access the values of const char
6785 arrays with initializers, so neither can we do so here. */
6795 src = string_constant (src, &offset_node);
6798 max = TREE_STRING_LENGTH (src);
6799 ptr = TREE_STRING_POINTER (src);
6800 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6802 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6803 compute the offset to the following null if we don't know where to
6804 start searching for it. */
6806 for (i = 0; i < max; i++)
6809 /* We don't know the starting offset, but we do know that the string
6810 has no internal zero bytes. We can assume that the offset falls
6811 within the bounds of the string; otherwise, the programmer deserves
6812 what he gets. Subtract the offset from the length of the string,
6814 /* This would perhaps not be valid if we were dealing with named
6815 arrays in addition to literal string constants. */
6816 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6819 /* We have a known offset into the string. Start searching there for
6820 a null character. */
6821 if (offset_node == 0)
6825 /* Did we get a long long offset? If so, punt. */
6826 if (TREE_INT_CST_HIGH (offset_node) != 0)
6828 offset = TREE_INT_CST_LOW (offset_node);
6830 /* If the offset is known to be out of bounds, warn, and call strlen at
6832 if (offset < 0 || offset > max)
6834 warning ("offset outside bounds of constant string");
6837 /* Use strlen to search for the first zero byte. Since any strings
6838 constructed with build_string will have nulls appended, we win even
6839 if we get handed something like (char[4])"abcd".
6841 Since OFFSET is our starting index into the string, no further
6842 calculation is needed. */
6843 return size_int (strlen (ptr + offset));
6846 /* Expand an expression EXP that calls a built-in function,
6847 with result going to TARGET if that's convenient
6848 (and in mode MODE if that's convenient).
6849 SUBTARGET may be used as the target for computing one of EXP's operands.
6850 IGNORE is nonzero if the value is to be ignored. */
6852 #define CALLED_AS_BUILT_IN(NODE) \
6853 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6856 expand_builtin (exp, target, subtarget, mode, ignore)
6860 enum machine_mode mode;
6863 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6864 tree arglist = TREE_OPERAND (exp, 1);
6867 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6868 optab builtin_optab;
6870 switch (DECL_FUNCTION_CODE (fndecl))
6875 /* build_function_call changes these into ABS_EXPR. */
6880 /* Treat these like sqrt, but only if the user asks for them. */
6881 if (! flag_fast_math)
6883 case BUILT_IN_FSQRT:
6884 /* If not optimizing, call the library function. */
6889 /* Arg could be wrong type if user redeclared this fcn wrong. */
6890 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6893 /* Stabilize and compute the argument. */
6894 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6895 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6897 exp = copy_node (exp);
6898 arglist = copy_node (arglist);
6899 TREE_OPERAND (exp, 1) = arglist;
6900 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6902 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6904 /* Make a suitable register to place result in. */
6905 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6910 switch (DECL_FUNCTION_CODE (fndecl))
6913 builtin_optab = sin_optab; break;
6915 builtin_optab = cos_optab; break;
6916 case BUILT_IN_FSQRT:
6917 builtin_optab = sqrt_optab; break;
6922 /* Compute into TARGET.
6923 Set TARGET to wherever the result comes back. */
6924 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6925 builtin_optab, op0, target, 0);
6927 /* If we were unable to expand via the builtin, stop the
6928 sequence (without outputting the insns) and break, causing
6929 a call the the library function. */
6936 /* Check the results by default. But if flag_fast_math is turned on,
6937 then assume sqrt will always be called with valid arguments. */
6939 if (! flag_fast_math)
6941 /* Don't define the builtin FP instructions
6942 if your machine is not IEEE. */
6943 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6946 lab1 = gen_label_rtx ();
6948 /* Test the result; if it is NaN, set errno=EDOM because
6949 the argument was not in the domain. */
6950 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6951 emit_jump_insn (gen_beq (lab1));
6955 #ifdef GEN_ERRNO_RTX
6956 rtx errno_rtx = GEN_ERRNO_RTX;
6959 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
6962 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6965 /* We can't set errno=EDOM directly; let the library call do it.
6966 Pop the arguments right away in case the call gets deleted. */
6968 expand_call (exp, target, 0);
6975 /* Output the entire sequence. */
6976 insns = get_insns ();
6982 /* __builtin_apply_args returns block of memory allocated on
6983 the stack into which is stored the arg pointer, structure
6984 value address, static chain, and all the registers that might
6985 possibly be used in performing a function call. The code is
6986 moved to the start of the function so the incoming values are
6988 case BUILT_IN_APPLY_ARGS:
6989 /* Don't do __builtin_apply_args more than once in a function.
6990 Save the result of the first call and reuse it. */
6991 if (apply_args_value != 0)
6992 return apply_args_value;
6994 /* When this function is called, it means that registers must be
6995 saved on entry to this function. So we migrate the
6996 call to the first insn of this function. */
7001 temp = expand_builtin_apply_args ();
7005 apply_args_value = temp;
7007 /* Put the sequence after the NOTE that starts the function.
7008 If this is inside a SEQUENCE, make the outer-level insn
7009 chain current, so the code is placed at the start of the
7011 push_topmost_sequence ();
7012 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7013 pop_topmost_sequence ();
7017 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7018 FUNCTION with a copy of the parameters described by
7019 ARGUMENTS, and ARGSIZE. It returns a block of memory
7020 allocated on the stack into which is stored all the registers
7021 that might possibly be used for returning the result of a
7022 function. ARGUMENTS is the value returned by
7023 __builtin_apply_args. ARGSIZE is the number of bytes of
7024 arguments that must be copied. ??? How should this value be
7025 computed? We'll also need a safe worst case value for varargs
7027 case BUILT_IN_APPLY:
7029 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7030 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7031 || TREE_CHAIN (arglist) == 0
7032 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7033 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7034 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7042 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7043 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7045 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7048 /* __builtin_return (RESULT) causes the function to return the
7049 value described by RESULT. RESULT is address of the block of
7050 memory returned by __builtin_apply. */
7051 case BUILT_IN_RETURN:
7053 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7054 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7055 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7056 NULL_RTX, VOIDmode, 0));
7059 case BUILT_IN_SAVEREGS:
7060 /* Don't do __builtin_saveregs more than once in a function.
7061 Save the result of the first call and reuse it. */
7062 if (saveregs_value != 0)
7063 return saveregs_value;
7065 /* When this function is called, it means that registers must be
7066 saved on entry to this function. So we migrate the
7067 call to the first insn of this function. */
7071 /* Now really call the function. `expand_call' does not call
7072 expand_builtin, so there is no danger of infinite recursion here. */
7075 #ifdef EXPAND_BUILTIN_SAVEREGS
7076 /* Do whatever the machine needs done in this case. */
7077 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7079 /* The register where the function returns its value
7080 is likely to have something else in it, such as an argument.
7081 So preserve that register around the call. */
7083 if (value_mode != VOIDmode)
7085 rtx valreg = hard_libcall_value (value_mode);
7086 rtx saved_valreg = gen_reg_rtx (value_mode);
7088 emit_move_insn (saved_valreg, valreg);
7089 temp = expand_call (exp, target, ignore);
7090 emit_move_insn (valreg, saved_valreg);
7093 /* Generate the call, putting the value in a pseudo. */
7094 temp = expand_call (exp, target, ignore);
7100 saveregs_value = temp;
7102 /* Put the sequence after the NOTE that starts the function.
7103 If this is inside a SEQUENCE, make the outer-level insn
7104 chain current, so the code is placed at the start of the
7106 push_topmost_sequence ();
7107 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7108 pop_topmost_sequence ();
7112 /* __builtin_args_info (N) returns word N of the arg space info
7113 for the current function. The number and meanings of words
7114 is controlled by the definition of CUMULATIVE_ARGS. */
7115 case BUILT_IN_ARGS_INFO:
7117 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7119 int *word_ptr = (int *) ¤t_function_args_info;
7120 tree type, elts, result;
7122 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7123 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7124 __FILE__, __LINE__);
7128 tree arg = TREE_VALUE (arglist);
7129 if (TREE_CODE (arg) != INTEGER_CST)
7130 error ("argument of `__builtin_args_info' must be constant");
7133 int wordnum = TREE_INT_CST_LOW (arg);
7135 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7136 error ("argument of `__builtin_args_info' out of range");
7138 return GEN_INT (word_ptr[wordnum]);
7142 error ("missing argument in `__builtin_args_info'");
7147 for (i = 0; i < nwords; i++)
7148 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7150 type = build_array_type (integer_type_node,
7151 build_index_type (build_int_2 (nwords, 0)));
7152 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7153 TREE_CONSTANT (result) = 1;
7154 TREE_STATIC (result) = 1;
7155 result = build (INDIRECT_REF, build_pointer_type (type), result);
7156 TREE_CONSTANT (result) = 1;
7157 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7161 /* Return the address of the first anonymous stack arg. */
7162 case BUILT_IN_NEXT_ARG:
7164 tree fntype = TREE_TYPE (current_function_decl);
7166 if ((TYPE_ARG_TYPES (fntype) == 0
7167 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7169 && ! current_function_varargs)
7171 error ("`va_start' used in function with fixed args");
7177 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7178 tree arg = TREE_VALUE (arglist);
7180 /* Strip off all nops for the sake of the comparison. This
7181 is not quite the same as STRIP_NOPS. It does more. */
7182 while (TREE_CODE (arg) == NOP_EXPR
7183 || TREE_CODE (arg) == CONVERT_EXPR
7184 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7185 arg = TREE_OPERAND (arg, 0);
7186 if (arg != last_parm)
7187 warning ("second parameter of `va_start' not last named argument");
7190 /* Evidently an out of date version of <stdarg.h>; can't validate
7191 va_start's second argument, but can still work as intended. */
7192 warning ("`__builtin_next_arg' called without an argument");
7195 return expand_binop (Pmode, add_optab,
7196 current_function_internal_arg_pointer,
7197 current_function_arg_offset_rtx,
7198 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7200 case BUILT_IN_CLASSIFY_TYPE:
7203 tree type = TREE_TYPE (TREE_VALUE (arglist));
7204 enum tree_code code = TREE_CODE (type);
7205 if (code == VOID_TYPE)
7206 return GEN_INT (void_type_class);
7207 if (code == INTEGER_TYPE)
7208 return GEN_INT (integer_type_class);
7209 if (code == CHAR_TYPE)
7210 return GEN_INT (char_type_class);
7211 if (code == ENUMERAL_TYPE)
7212 return GEN_INT (enumeral_type_class);
7213 if (code == BOOLEAN_TYPE)
7214 return GEN_INT (boolean_type_class);
7215 if (code == POINTER_TYPE)
7216 return GEN_INT (pointer_type_class);
7217 if (code == REFERENCE_TYPE)
7218 return GEN_INT (reference_type_class);
7219 if (code == OFFSET_TYPE)
7220 return GEN_INT (offset_type_class);
7221 if (code == REAL_TYPE)
7222 return GEN_INT (real_type_class);
7223 if (code == COMPLEX_TYPE)
7224 return GEN_INT (complex_type_class);
7225 if (code == FUNCTION_TYPE)
7226 return GEN_INT (function_type_class);
7227 if (code == METHOD_TYPE)
7228 return GEN_INT (method_type_class);
7229 if (code == RECORD_TYPE)
7230 return GEN_INT (record_type_class);
7231 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7232 return GEN_INT (union_type_class);
7233 if (code == ARRAY_TYPE)
7235 if (TYPE_STRING_FLAG (type))
7236 return GEN_INT (string_type_class);
7238 return GEN_INT (array_type_class);
7240 if (code == SET_TYPE)
7241 return GEN_INT (set_type_class);
7242 if (code == FILE_TYPE)
7243 return GEN_INT (file_type_class);
7244 if (code == LANG_TYPE)
7245 return GEN_INT (lang_type_class);
7247 return GEN_INT (no_type_class);
7249 case BUILT_IN_CONSTANT_P:
7253 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7254 ? const1_rtx : const0_rtx);
7256 case BUILT_IN_FRAME_ADDRESS:
7257 /* The argument must be a nonnegative integer constant.
7258 It counts the number of frames to scan up the stack.
7259 The value is the address of that frame. */
7260 case BUILT_IN_RETURN_ADDRESS:
7261 /* The argument must be a nonnegative integer constant.
7262 It counts the number of frames to scan up the stack.
7263 The value is the return address saved in that frame. */
7265 /* Warning about missing arg was already issued. */
7267 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7269 error ("invalid arg to `__builtin_return_address'");
7272 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7274 error ("invalid arg to `__builtin_return_address'");
7279 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7280 rtx tem = frame_pointer_rtx;
7283 /* Some machines need special handling before we can access arbitrary
7284 frames. For example, on the sparc, we must first flush all
7285 register windows to the stack. */
7286 #ifdef SETUP_FRAME_ADDRESSES
7287 SETUP_FRAME_ADDRESSES ();
7290 /* On the sparc, the return address is not in the frame, it is
7291 in a register. There is no way to access it off of the current
7292 frame pointer, but it can be accessed off the previous frame
7293 pointer by reading the value from the register window save
7295 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7296 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7300 /* Scan back COUNT frames to the specified frame. */
7301 for (i = 0; i < count; i++)
7303 /* Assume the dynamic chain pointer is in the word that
7304 the frame address points to, unless otherwise specified. */
7305 #ifdef DYNAMIC_CHAIN_ADDRESS
7306 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7308 tem = memory_address (Pmode, tem);
7309 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7312 /* For __builtin_frame_address, return what we've got. */
7313 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7316 /* For __builtin_return_address,
7317 Get the return address from that frame. */
7318 #ifdef RETURN_ADDR_RTX
7319 return RETURN_ADDR_RTX (count, tem);
7321 tem = memory_address (Pmode,
7322 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7323 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7327 case BUILT_IN_ALLOCA:
7329 /* Arg could be non-integer if user redeclared this fcn wrong. */
7330 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7333 /* Compute the argument. */
7334 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7336 /* Allocate the desired space. */
7337 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7340 /* If not optimizing, call the library function. */
7341 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7345 /* Arg could be non-integer if user redeclared this fcn wrong. */
7346 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7349 /* Compute the argument. */
7350 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7351 /* Compute ffs, into TARGET if possible.
7352 Set TARGET to wherever the result comes back. */
7353 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7354 ffs_optab, op0, target, 1);
7359 case BUILT_IN_STRLEN:
7360 /* If not optimizing, call the library function. */
7361 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7365 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7366 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7370 tree src = TREE_VALUE (arglist);
7371 tree len = c_strlen (src);
7374 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7376 rtx result, src_rtx, char_rtx;
7377 enum machine_mode insn_mode = value_mode, char_mode;
7378 enum insn_code icode;
7380 /* If the length is known, just return it. */
7382 return expand_expr (len, target, mode, 0);
7384 /* If SRC is not a pointer type, don't do this operation inline. */
7388 /* Call a function if we can't compute strlen in the right mode. */
7390 while (insn_mode != VOIDmode)
7392 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7393 if (icode != CODE_FOR_nothing)
7396 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7398 if (insn_mode == VOIDmode)
7401 /* Make a place to write the result of the instruction. */
7404 && GET_CODE (result) == REG
7405 && GET_MODE (result) == insn_mode
7406 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7407 result = gen_reg_rtx (insn_mode);
7409 /* Make sure the operands are acceptable to the predicates. */
7411 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7412 result = gen_reg_rtx (insn_mode);
7414 src_rtx = memory_address (BLKmode,
7415 expand_expr (src, NULL_RTX, Pmode,
7417 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7418 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7420 char_rtx = const0_rtx;
7421 char_mode = insn_operand_mode[(int)icode][2];
7422 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7423 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7425 emit_insn (GEN_FCN (icode) (result,
7426 gen_rtx (MEM, BLKmode, src_rtx),
7427 char_rtx, GEN_INT (align)));
7429 /* Return the value in the proper mode for this function. */
7430 if (GET_MODE (result) == value_mode)
7432 else if (target != 0)
7434 convert_move (target, result, 0);
7438 return convert_to_mode (value_mode, result, 0);
7441 case BUILT_IN_STRCPY:
7442 /* If not optimizing, call the library function. */
7443 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7447 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7448 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7449 || TREE_CHAIN (arglist) == 0
7450 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7454 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7459 len = size_binop (PLUS_EXPR, len, integer_one_node);
7461 chainon (arglist, build_tree_list (NULL_TREE, len));
7465 case BUILT_IN_MEMCPY:
7466 /* If not optimizing, call the library function. */
7467 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7471 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7472 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7473 || TREE_CHAIN (arglist) == 0
7474 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7475 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7476 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7480 tree dest = TREE_VALUE (arglist);
7481 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7482 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7485 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7487 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7488 rtx dest_rtx, dest_mem, src_mem;
7490 /* If either SRC or DEST is not a pointer type, don't do
7491 this operation in-line. */
7492 if (src_align == 0 || dest_align == 0)
7494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7495 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7499 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7500 dest_mem = gen_rtx (MEM, BLKmode,
7501 memory_address (BLKmode, dest_rtx));
7502 src_mem = gen_rtx (MEM, BLKmode,
7503 memory_address (BLKmode,
7504 expand_expr (src, NULL_RTX,
7508 /* Copy word part most expediently. */
7509 emit_block_move (dest_mem, src_mem,
7510 expand_expr (len, NULL_RTX, VOIDmode, 0),
7511 MIN (src_align, dest_align));
7515 /* These comparison functions need an instruction that returns an actual
7516 index. An ordinary compare that just sets the condition codes
7518 #ifdef HAVE_cmpstrsi
7519 case BUILT_IN_STRCMP:
7520 /* If not optimizing, call the library function. */
7521 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7525 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7526 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7527 || TREE_CHAIN (arglist) == 0
7528 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7530 else if (!HAVE_cmpstrsi)
7533 tree arg1 = TREE_VALUE (arglist);
7534 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7538 len = c_strlen (arg1);
7540 len = size_binop (PLUS_EXPR, integer_one_node, len);
7541 len2 = c_strlen (arg2);
7543 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7545 /* If we don't have a constant length for the first, use the length
7546 of the second, if we know it. We don't require a constant for
7547 this case; some cost analysis could be done if both are available
7548 but neither is constant. For now, assume they're equally cheap.
7550 If both strings have constant lengths, use the smaller. This
7551 could arise if optimization results in strcpy being called with
7552 two fixed strings, or if the code was machine-generated. We should
7553 add some code to the `memcmp' handler below to deal with such
7554 situations, someday. */
7555 if (!len || TREE_CODE (len) != INTEGER_CST)
7562 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7564 if (tree_int_cst_lt (len2, len))
7568 chainon (arglist, build_tree_list (NULL_TREE, len));
7572 case BUILT_IN_MEMCMP:
7573 /* If not optimizing, call the library function. */
7574 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7578 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7579 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7580 || TREE_CHAIN (arglist) == 0
7581 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7582 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7583 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7585 else if (!HAVE_cmpstrsi)
7588 tree arg1 = TREE_VALUE (arglist);
7589 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7590 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7594 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7596 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7597 enum machine_mode insn_mode
7598 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7600 /* If we don't have POINTER_TYPE, call the function. */
7601 if (arg1_align == 0 || arg2_align == 0)
7603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7604 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7608 /* Make a place to write the result of the instruction. */
7611 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7612 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7613 result = gen_reg_rtx (insn_mode);
7615 emit_insn (gen_cmpstrsi (result,
7616 gen_rtx (MEM, BLKmode,
7617 expand_expr (arg1, NULL_RTX, Pmode,
7619 gen_rtx (MEM, BLKmode,
7620 expand_expr (arg2, NULL_RTX, Pmode,
7622 expand_expr (len, NULL_RTX, VOIDmode, 0),
7623 GEN_INT (MIN (arg1_align, arg2_align))));
7625 /* Return the value in the proper mode for this function. */
7626 mode = TYPE_MODE (TREE_TYPE (exp));
7627 if (GET_MODE (result) == mode)
7629 else if (target != 0)
7631 convert_move (target, result, 0);
7635 return convert_to_mode (mode, result, 0);
7638 case BUILT_IN_STRCMP:
7639 case BUILT_IN_MEMCMP:
7643 default: /* just do library call, if unknown builtin */
7644 error ("built-in function `%s' not currently supported",
7645 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7648 /* The switch statement above can drop through to cause the function
7649 to be called normally. */
7651 return expand_call (exp, target, ignore);
7654 /* Built-in functions to perform an untyped call and return. */
7656 /* For each register that may be used for calling a function, this
7657 gives a mode used to copy the register's value. VOIDmode indicates
7658 the register is not used for calling a function. If the machine
7659 has register windows, this gives only the outbound registers.
7660 INCOMING_REGNO gives the corresponding inbound register. */
7661 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7663 /* For each register that may be used for returning values, this gives
7664 a mode used to copy the register's value. VOIDmode indicates the
7665 register is not used for returning values. If the machine has
7666 register windows, this gives only the outbound registers.
7667 INCOMING_REGNO gives the corresponding inbound register. */
7668 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7670 /* For each register that may be used for calling a function, this
7671 gives the offset of that register into the block returned by
7672 __bultin_apply_args. 0 indicates that the register is not
7673 used for calling a function. */
7674 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7676 /* Return the offset of register REGNO into the block returned by
7677 __builtin_apply_args. This is not declared static, since it is
7678 needed in objc-act.c. */
7681 apply_args_register_offset (regno)
7686 /* Arguments are always put in outgoing registers (in the argument
7687 block) if such make sense. */
7688 #ifdef OUTGOING_REGNO
7689 regno = OUTGOING_REGNO(regno);
7691 return apply_args_reg_offset[regno];
7694 /* Return the size required for the block returned by __builtin_apply_args,
7695 and initialize apply_args_mode. */
7700 static int size = -1;
7702 enum machine_mode mode;
7704 /* The values computed by this function never change. */
7707 /* The first value is the incoming arg-pointer. */
7708 size = GET_MODE_SIZE (Pmode);
7710 /* The second value is the structure value address unless this is
7711 passed as an "invisible" first argument. */
7712 if (struct_value_rtx)
7713 size += GET_MODE_SIZE (Pmode);
7715 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7716 if (FUNCTION_ARG_REGNO_P (regno))
7718 /* Search for the proper mode for copying this register's
7719 value. I'm not sure this is right, but it works so far. */
7720 enum machine_mode best_mode = VOIDmode;
7722 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7724 mode = GET_MODE_WIDER_MODE (mode))
7725 if (HARD_REGNO_MODE_OK (regno, mode)
7726 && HARD_REGNO_NREGS (regno, mode) == 1)
7729 if (best_mode == VOIDmode)
7730 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7732 mode = GET_MODE_WIDER_MODE (mode))
7733 if (HARD_REGNO_MODE_OK (regno, mode)
7734 && (mov_optab->handlers[(int) mode].insn_code
7735 != CODE_FOR_nothing))
7739 if (mode == VOIDmode)
7742 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7743 if (size % align != 0)
7744 size = CEIL (size, align) * align;
7745 apply_args_reg_offset[regno] = size;
7746 size += GET_MODE_SIZE (mode);
7747 apply_args_mode[regno] = mode;
7751 apply_args_mode[regno] = VOIDmode;
7752 apply_args_reg_offset[regno] = 0;
7758 /* Return the size required for the block returned by __builtin_apply,
7759 and initialize apply_result_mode. */
7762 apply_result_size ()
7764 static int size = -1;
7766 enum machine_mode mode;
7768 /* The values computed by this function never change. */
7773 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7774 if (FUNCTION_VALUE_REGNO_P (regno))
7776 /* Search for the proper mode for copying this register's
7777 value. I'm not sure this is right, but it works so far. */
7778 enum machine_mode best_mode = VOIDmode;
7780 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7782 mode = GET_MODE_WIDER_MODE (mode))
7783 if (HARD_REGNO_MODE_OK (regno, mode))
7786 if (best_mode == VOIDmode)
7787 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7789 mode = GET_MODE_WIDER_MODE (mode))
7790 if (HARD_REGNO_MODE_OK (regno, mode)
7791 && (mov_optab->handlers[(int) mode].insn_code
7792 != CODE_FOR_nothing))
7796 if (mode == VOIDmode)
7799 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7800 if (size % align != 0)
7801 size = CEIL (size, align) * align;
7802 size += GET_MODE_SIZE (mode);
7803 apply_result_mode[regno] = mode;
7806 apply_result_mode[regno] = VOIDmode;
7808 /* Allow targets that use untyped_call and untyped_return to override
7809 the size so that machine-specific information can be stored here. */
7810 #ifdef APPLY_RESULT_SIZE
7811 size = APPLY_RESULT_SIZE;
7817 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7818 /* Create a vector describing the result block RESULT. If SAVEP is true,
7819 the result block is used to save the values; otherwise it is used to
7820 restore the values. */
7823 result_vector (savep, result)
7827 int regno, size, align, nelts;
7828 enum machine_mode mode;
7830 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7833 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7834 if ((mode = apply_result_mode[regno]) != VOIDmode)
7836 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7837 if (size % align != 0)
7838 size = CEIL (size, align) * align;
7839 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
7840 mem = change_address (result, mode,
7841 plus_constant (XEXP (result, 0), size));
7842 savevec[nelts++] = (savep
7843 ? gen_rtx (SET, VOIDmode, mem, reg)
7844 : gen_rtx (SET, VOIDmode, reg, mem));
7845 size += GET_MODE_SIZE (mode);
7847 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7849 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7851 /* Save the state required to perform an untyped call with the same
7852 arguments as were passed to the current function. */
7855 expand_builtin_apply_args ()
7858 int size, align, regno;
7859 enum machine_mode mode;
7861 /* Create a block where the arg-pointer, structure value address,
7862 and argument registers can be saved. */
7863 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7865 /* Walk past the arg-pointer and structure value address. */
7866 size = GET_MODE_SIZE (Pmode);
7867 if (struct_value_rtx)
7868 size += GET_MODE_SIZE (Pmode);
7870 /* Save each register used in calling a function to the block. */
7871 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7872 if ((mode = apply_args_mode[regno]) != VOIDmode)
7874 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7875 if (size % align != 0)
7876 size = CEIL (size, align) * align;
7877 emit_move_insn (change_address (registers, mode,
7878 plus_constant (XEXP (registers, 0),
7880 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7881 size += GET_MODE_SIZE (mode);
7884 /* Save the arg pointer to the block. */
7885 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7886 copy_to_reg (virtual_incoming_args_rtx));
7887 size = GET_MODE_SIZE (Pmode);
7889 /* Save the structure value address unless this is passed as an
7890 "invisible" first argument. */
7891 if (struct_value_incoming_rtx)
7893 emit_move_insn (change_address (registers, Pmode,
7894 plus_constant (XEXP (registers, 0),
7896 copy_to_reg (struct_value_incoming_rtx));
7897 size += GET_MODE_SIZE (Pmode);
7900 /* Return the address of the block. */
7901 return copy_addr_to_reg (XEXP (registers, 0));
7904 /* Perform an untyped call and save the state required to perform an
7905 untyped return of whatever value was returned by the given function. */
7908 expand_builtin_apply (function, arguments, argsize)
7909 rtx function, arguments, argsize;
7911 int size, align, regno;
7912 enum machine_mode mode;
7913 rtx incoming_args, result, reg, dest, call_insn;
7914 rtx old_stack_level = 0;
7915 rtx call_fusage = 0;
7917 /* Create a block where the return registers can be saved. */
7918 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7920 /* ??? The argsize value should be adjusted here. */
7922 /* Fetch the arg pointer from the ARGUMENTS block. */
7923 incoming_args = gen_reg_rtx (Pmode);
7924 emit_move_insn (incoming_args,
7925 gen_rtx (MEM, Pmode, arguments));
7926 #ifndef STACK_GROWS_DOWNWARD
7927 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7928 incoming_args, 0, OPTAB_LIB_WIDEN);
7931 /* Perform postincrements before actually calling the function. */
7934 /* Push a new argument block and copy the arguments. */
7935 do_pending_stack_adjust ();
7936 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7938 /* Push a block of memory onto the stack to store the memory arguments.
7939 Save the address in a register, and copy the memory arguments. ??? I
7940 haven't figured out how the calling convention macros effect this,
7941 but it's likely that the source and/or destination addresses in
7942 the block copy will need updating in machine specific ways. */
7943 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7944 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7945 gen_rtx (MEM, BLKmode, incoming_args),
7947 PARM_BOUNDARY / BITS_PER_UNIT);
7949 /* Refer to the argument block. */
7951 arguments = gen_rtx (MEM, BLKmode, arguments);
7953 /* Walk past the arg-pointer and structure value address. */
7954 size = GET_MODE_SIZE (Pmode);
7955 if (struct_value_rtx)
7956 size += GET_MODE_SIZE (Pmode);
7958 /* Restore each of the registers previously saved. Make USE insns
7959 for each of these registers for use in making the call. */
7960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7961 if ((mode = apply_args_mode[regno]) != VOIDmode)
7963 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7964 if (size % align != 0)
7965 size = CEIL (size, align) * align;
7966 reg = gen_rtx (REG, mode, regno);
7967 emit_move_insn (reg,
7968 change_address (arguments, mode,
7969 plus_constant (XEXP (arguments, 0),
7972 use_reg (&call_fusage, reg);
7973 size += GET_MODE_SIZE (mode);
7976 /* Restore the structure value address unless this is passed as an
7977 "invisible" first argument. */
7978 size = GET_MODE_SIZE (Pmode);
7979 if (struct_value_rtx)
7981 rtx value = gen_reg_rtx (Pmode);
7982 emit_move_insn (value,
7983 change_address (arguments, Pmode,
7984 plus_constant (XEXP (arguments, 0),
7986 emit_move_insn (struct_value_rtx, value);
7987 if (GET_CODE (struct_value_rtx) == REG)
7988 use_reg (&call_fusage, struct_value_rtx);
7989 size += GET_MODE_SIZE (Pmode);
7992 /* All arguments and registers used for the call are set up by now! */
7993 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
7995 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7996 and we don't want to load it into a register as an optimization,
7997 because prepare_call_address already did it if it should be done. */
7998 if (GET_CODE (function) != SYMBOL_REF)
7999 function = memory_address (FUNCTION_MODE, function);
8001 /* Generate the actual call instruction and save the return value. */
8002 #ifdef HAVE_untyped_call
8003 if (HAVE_untyped_call)
8004 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8005 result, result_vector (1, result)));
8008 #ifdef HAVE_call_value
8009 if (HAVE_call_value)
8013 /* Locate the unique return register. It is not possible to
8014 express a call that sets more than one return register using
8015 call_value; use untyped_call for that. In fact, untyped_call
8016 only needs to save the return registers in the given block. */
8017 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8018 if ((mode = apply_result_mode[regno]) != VOIDmode)
8021 abort (); /* HAVE_untyped_call required. */
8022 valreg = gen_rtx (REG, mode, regno);
8025 emit_call_insn (gen_call_value (valreg,
8026 gen_rtx (MEM, FUNCTION_MODE, function),
8027 const0_rtx, NULL_RTX, const0_rtx));
8029 emit_move_insn (change_address (result, GET_MODE (valreg),
8037 /* Find the CALL insn we just emitted. */
8038 for (call_insn = get_last_insn ();
8039 call_insn && GET_CODE (call_insn) != CALL_INSN;
8040 call_insn = PREV_INSN (call_insn))
8046 /* Put the register usage information on the CALL. If there is already
8047 some usage information, put ours at the end. */
8048 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8052 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8053 link = XEXP (link, 1))
8056 XEXP (link, 1) = call_fusage;
8059 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8061 /* Restore the stack. */
8062 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8064 /* Return the address of the result block. */
8065 return copy_addr_to_reg (XEXP (result, 0));
8068 /* Perform an untyped return. */
8071 expand_builtin_return (result)
8074 int size, align, regno;
8075 enum machine_mode mode;
8077 rtx call_fusage = 0;
8079 apply_result_size ();
8080 result = gen_rtx (MEM, BLKmode, result);
8082 #ifdef HAVE_untyped_return
8083 if (HAVE_untyped_return)
8085 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8091 /* Restore the return value and note that each value is used. */
8093 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8094 if ((mode = apply_result_mode[regno]) != VOIDmode)
8096 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8097 if (size % align != 0)
8098 size = CEIL (size, align) * align;
8099 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8100 emit_move_insn (reg,
8101 change_address (result, mode,
8102 plus_constant (XEXP (result, 0),
8105 push_to_sequence (call_fusage);
8106 emit_insn (gen_rtx (USE, VOIDmode, reg));
8107 call_fusage = get_insns ();
8109 size += GET_MODE_SIZE (mode);
8112 /* Put the USE insns before the return. */
8113 emit_insns (call_fusage);
8115 /* Return whatever values was restored by jumping directly to the end
8117 expand_null_return ();
8120 /* Expand code for a post- or pre- increment or decrement
8121 and return the RTX for the result.
8122 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8125 expand_increment (exp, post)
8129 register rtx op0, op1;
8130 register rtx temp, value;
8131 register tree incremented = TREE_OPERAND (exp, 0);
8132 optab this_optab = add_optab;
8134 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8135 int op0_is_copy = 0;
8136 int single_insn = 0;
8137 /* 1 means we can't store into OP0 directly,
8138 because it is a subreg narrower than a word,
8139 and we don't dare clobber the rest of the word. */
8142 if (output_bytecode)
8144 bc_expand_expr (exp);
8148 /* Stabilize any component ref that might need to be
8149 evaluated more than once below. */
8151 || TREE_CODE (incremented) == BIT_FIELD_REF
8152 || (TREE_CODE (incremented) == COMPONENT_REF
8153 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8154 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8155 incremented = stabilize_reference (incremented);
8156 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8157 ones into save exprs so that they don't accidentally get evaluated
8158 more than once by the code below. */
8159 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8160 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8161 incremented = save_expr (incremented);
8163 /* Compute the operands as RTX.
8164 Note whether OP0 is the actual lvalue or a copy of it:
8165 I believe it is a copy iff it is a register or subreg
8166 and insns were generated in computing it. */
8168 temp = get_last_insn ();
8169 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8171 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8172 in place but intead must do sign- or zero-extension during assignment,
8173 so we copy it into a new register and let the code below use it as
8176 Note that we can safely modify this SUBREG since it is know not to be
8177 shared (it was made by the expand_expr call above). */
8179 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8182 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8186 else if (GET_CODE (op0) == SUBREG
8187 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8189 /* We cannot increment this SUBREG in place. If we are
8190 post-incrementing, get a copy of the old value. Otherwise,
8191 just mark that we cannot increment in place. */
8193 op0 = copy_to_reg (op0);
8198 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8199 && temp != get_last_insn ());
8200 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8202 /* Decide whether incrementing or decrementing. */
8203 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8204 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8205 this_optab = sub_optab;
8207 /* Convert decrement by a constant into a negative increment. */
8208 if (this_optab == sub_optab
8209 && GET_CODE (op1) == CONST_INT)
8211 op1 = GEN_INT (- INTVAL (op1));
8212 this_optab = add_optab;
8215 /* For a preincrement, see if we can do this with a single instruction. */
8218 icode = (int) this_optab->handlers[(int) mode].insn_code;
8219 if (icode != (int) CODE_FOR_nothing
8220 /* Make sure that OP0 is valid for operands 0 and 1
8221 of the insn we want to queue. */
8222 && (*insn_operand_predicate[icode][0]) (op0, mode)
8223 && (*insn_operand_predicate[icode][1]) (op0, mode)
8224 && (*insn_operand_predicate[icode][2]) (op1, mode))
8228 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8229 then we cannot just increment OP0. We must therefore contrive to
8230 increment the original value. Then, for postincrement, we can return
8231 OP0 since it is a copy of the old value. For preincrement, expand here
8232 unless we can do it with a single insn.
8234 Likewise if storing directly into OP0 would clobber high bits
8235 we need to preserve (bad_subreg). */
8236 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8238 /* This is the easiest way to increment the value wherever it is.
8239 Problems with multiple evaluation of INCREMENTED are prevented
8240 because either (1) it is a component_ref or preincrement,
8241 in which case it was stabilized above, or (2) it is an array_ref
8242 with constant index in an array in a register, which is
8243 safe to reevaluate. */
8244 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8245 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8246 ? MINUS_EXPR : PLUS_EXPR),
8249 TREE_OPERAND (exp, 1));
8250 temp = expand_assignment (incremented, newexp, ! post, 0);
8251 return post ? op0 : temp;
8256 /* We have a true reference to the value in OP0.
8257 If there is an insn to add or subtract in this mode, queue it.
8258 Queueing the increment insn avoids the register shuffling
8259 that often results if we must increment now and first save
8260 the old value for subsequent use. */
8262 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8263 op0 = stabilize (op0);
8266 icode = (int) this_optab->handlers[(int) mode].insn_code;
8267 if (icode != (int) CODE_FOR_nothing
8268 /* Make sure that OP0 is valid for operands 0 and 1
8269 of the insn we want to queue. */
8270 && (*insn_operand_predicate[icode][0]) (op0, mode)
8271 && (*insn_operand_predicate[icode][1]) (op0, mode))
8273 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8274 op1 = force_reg (mode, op1);
8276 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8280 /* Preincrement, or we can't increment with one simple insn. */
8282 /* Save a copy of the value before inc or dec, to return it later. */
8283 temp = value = copy_to_reg (op0);
8285 /* Arrange to return the incremented value. */
8286 /* Copy the rtx because expand_binop will protect from the queue,
8287 and the results of that would be invalid for us to return
8288 if our caller does emit_queue before using our result. */
8289 temp = copy_rtx (value = op0);
8291 /* Increment however we can. */
8292 op1 = expand_binop (mode, this_optab, value, op1, op0,
8293 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8294 /* Make sure the value is stored into OP0. */
8296 emit_move_insn (op0, op1);
8301 /* Expand all function calls contained within EXP, innermost ones first.
8302 But don't look within expressions that have sequence points.
8303 For each CALL_EXPR, record the rtx for its value
8304 in the CALL_EXPR_RTL field. */
8307 preexpand_calls (exp)
8310 register int nops, i;
8311 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8313 if (! do_preexpand_calls)
8316 /* Only expressions and references can contain calls. */
8318 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8321 switch (TREE_CODE (exp))
8324 /* Do nothing if already expanded. */
8325 if (CALL_EXPR_RTL (exp) != 0)
8328 /* Do nothing to built-in functions. */
8329 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8330 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8331 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8332 /* Do nothing if the call returns a variable-sized object. */
8333 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8334 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8339 case TRUTH_ANDIF_EXPR:
8340 case TRUTH_ORIF_EXPR:
8341 /* If we find one of these, then we can be sure
8342 the adjust will be done for it (since it makes jumps).
8343 Do it now, so that if this is inside an argument
8344 of a function, we don't get the stack adjustment
8345 after some other args have already been pushed. */
8346 do_pending_stack_adjust ();
8351 case WITH_CLEANUP_EXPR:
8355 if (SAVE_EXPR_RTL (exp) != 0)
8359 nops = tree_code_length[(int) TREE_CODE (exp)];
8360 for (i = 0; i < nops; i++)
8361 if (TREE_OPERAND (exp, i) != 0)
8363 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8364 if (type == 'e' || type == '<' || type == '1' || type == '2'
8366 preexpand_calls (TREE_OPERAND (exp, i));
8370 /* At the start of a function, record that we have no previously-pushed
8371 arguments waiting to be popped. */
8374 init_pending_stack_adjust ()
8376 pending_stack_adjust = 0;
8379 /* When exiting from function, if safe, clear out any pending stack adjust
8380 so the adjustment won't get done. */
8383 clear_pending_stack_adjust ()
8385 #ifdef EXIT_IGNORE_STACK
8386 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8387 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8388 && ! flag_inline_functions)
8389 pending_stack_adjust = 0;
8393 /* Pop any previously-pushed arguments that have not been popped yet. */
8396 do_pending_stack_adjust ()
8398 if (inhibit_defer_pop == 0)
8400 if (pending_stack_adjust != 0)
8401 adjust_stack (GEN_INT (pending_stack_adjust));
8402 pending_stack_adjust = 0;
8406 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8407 Returns the cleanups to be performed. */
8410 defer_cleanups_to (old_cleanups)
8413 tree new_cleanups = NULL_TREE;
8414 tree cleanups = cleanups_this_call;
8415 tree last = NULL_TREE;
8417 while (cleanups_this_call != old_cleanups)
8419 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8420 last = cleanups_this_call;
8421 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8426 /* Remove the list from the chain of cleanups. */
8427 TREE_CHAIN (last) = NULL_TREE;
8429 /* reverse them so that we can build them in the right order. */
8430 cleanups = nreverse (cleanups);
8435 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8436 TREE_VALUE (cleanups), new_cleanups);
8438 new_cleanups = TREE_VALUE (cleanups);
8440 cleanups = TREE_CHAIN (cleanups);
8444 return new_cleanups;
8447 /* Expand all cleanups up to OLD_CLEANUPS.
8448 Needed here, and also for language-dependent calls. */
8451 expand_cleanups_to (old_cleanups)
8454 while (cleanups_this_call != old_cleanups)
8456 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8457 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8458 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8462 /* Expand conditional expressions. */
8464 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8465 LABEL is an rtx of code CODE_LABEL, in this function and all the
8469 jumpifnot (exp, label)
8473 do_jump (exp, label, NULL_RTX);
8476 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8483 do_jump (exp, NULL_RTX, label);
8486 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8487 the result is zero, or IF_TRUE_LABEL if the result is one.
8488 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8489 meaning fall through in that case.
8491 do_jump always does any pending stack adjust except when it does not
8492 actually perform a jump. An example where there is no jump
8493 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8495 This function is responsible for optimizing cases such as
8496 &&, || and comparison operators in EXP. */
8499 do_jump (exp, if_false_label, if_true_label)
8501 rtx if_false_label, if_true_label;
8503 register enum tree_code code = TREE_CODE (exp);
8504 /* Some cases need to create a label to jump to
8505 in order to properly fall through.
8506 These cases set DROP_THROUGH_LABEL nonzero. */
8507 rtx drop_through_label = 0;
8512 enum machine_mode mode;
8522 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8528 /* This is not true with #pragma weak */
8530 /* The address of something can never be zero. */
8532 emit_jump (if_true_label);
8537 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8538 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8539 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8542 /* If we are narrowing the operand, we have to do the compare in the
8544 if ((TYPE_PRECISION (TREE_TYPE (exp))
8545 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8547 case NON_LVALUE_EXPR:
8548 case REFERENCE_EXPR:
8553 /* These cannot change zero->non-zero or vice versa. */
8554 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8558 /* This is never less insns than evaluating the PLUS_EXPR followed by
8559 a test and can be longer if the test is eliminated. */
8561 /* Reduce to minus. */
8562 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8563 TREE_OPERAND (exp, 0),
8564 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8565 TREE_OPERAND (exp, 1))));
8566 /* Process as MINUS. */
8570 /* Non-zero iff operands of minus differ. */
8571 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8572 TREE_OPERAND (exp, 0),
8573 TREE_OPERAND (exp, 1)),
8578 /* If we are AND'ing with a small constant, do this comparison in the
8579 smallest type that fits. If the machine doesn't have comparisons
8580 that small, it will be converted back to the wider comparison.
8581 This helps if we are testing the sign bit of a narrower object.
8582 combine can't do this for us because it can't know whether a
8583 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8585 if (! SLOW_BYTE_ACCESS
8586 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8587 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8588 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8589 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8590 && (type = type_for_mode (mode, 1)) != 0
8591 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8592 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8593 != CODE_FOR_nothing))
8595 do_jump (convert (type, exp), if_false_label, if_true_label);
8600 case TRUTH_NOT_EXPR:
8601 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8604 case TRUTH_ANDIF_EXPR:
8607 tree cleanups, old_cleanups;
8609 if (if_false_label == 0)
8610 if_false_label = drop_through_label = gen_label_rtx ();
8612 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8613 seq1 = get_insns ();
8616 old_cleanups = cleanups_this_call;
8618 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8619 seq2 = get_insns ();
8622 cleanups = defer_cleanups_to (old_cleanups);
8625 rtx flag = gen_reg_rtx (word_mode);
8629 /* Flag cleanups as not needed. */
8630 emit_move_insn (flag, const0_rtx);
8633 /* Flag cleanups as needed. */
8634 emit_move_insn (flag, const1_rtx);
8637 /* convert flag, which is an rtx, into a tree. */
8638 cond = make_node (RTL_EXPR);
8639 TREE_TYPE (cond) = integer_type_node;
8640 RTL_EXPR_RTL (cond) = flag;
8641 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8643 new_cleanups = build (COND_EXPR, void_type_node,
8644 truthvalue_conversion (cond),
8645 cleanups, integer_zero_node);
8646 new_cleanups = fold (new_cleanups);
8648 /* Now add in the conditionalized cleanups. */
8650 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8651 (*interim_eh_hook) (NULL_TREE);
8661 case TRUTH_ORIF_EXPR:
8664 tree cleanups, old_cleanups;
8666 if (if_true_label == 0)
8667 if_true_label = drop_through_label = gen_label_rtx ();
8669 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8670 seq1 = get_insns ();
8673 old_cleanups = cleanups_this_call;
8675 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8676 seq2 = get_insns ();
8679 cleanups = defer_cleanups_to (old_cleanups);
8682 rtx flag = gen_reg_rtx (word_mode);
8686 /* Flag cleanups as not needed. */
8687 emit_move_insn (flag, const0_rtx);
8690 /* Flag cleanups as needed. */
8691 emit_move_insn (flag, const1_rtx);
8694 /* convert flag, which is an rtx, into a tree. */
8695 cond = make_node (RTL_EXPR);
8696 TREE_TYPE (cond) = integer_type_node;
8697 RTL_EXPR_RTL (cond) = flag;
8698 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8700 new_cleanups = build (COND_EXPR, void_type_node,
8701 truthvalue_conversion (cond),
8702 cleanups, integer_zero_node);
8703 new_cleanups = fold (new_cleanups);
8705 /* Now add in the conditionalized cleanups. */
8707 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8708 (*interim_eh_hook) (NULL_TREE);
8720 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8724 do_pending_stack_adjust ();
8725 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8732 int bitsize, bitpos, unsignedp;
8733 enum machine_mode mode;
8738 /* Get description of this reference. We don't actually care
8739 about the underlying object here. */
8740 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8741 &mode, &unsignedp, &volatilep);
8743 type = type_for_size (bitsize, unsignedp);
8744 if (! SLOW_BYTE_ACCESS
8745 && type != 0 && bitsize >= 0
8746 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8747 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8748 != CODE_FOR_nothing))
8750 do_jump (convert (type, exp), if_false_label, if_true_label);
8757 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8758 if (integer_onep (TREE_OPERAND (exp, 1))
8759 && integer_zerop (TREE_OPERAND (exp, 2)))
8760 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8762 else if (integer_zerop (TREE_OPERAND (exp, 1))
8763 && integer_onep (TREE_OPERAND (exp, 2)))
8764 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8768 register rtx label1 = gen_label_rtx ();
8769 drop_through_label = gen_label_rtx ();
8770 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8771 /* Now the THEN-expression. */
8772 do_jump (TREE_OPERAND (exp, 1),
8773 if_false_label ? if_false_label : drop_through_label,
8774 if_true_label ? if_true_label : drop_through_label);
8775 /* In case the do_jump just above never jumps. */
8776 do_pending_stack_adjust ();
8777 emit_label (label1);
8778 /* Now the ELSE-expression. */
8779 do_jump (TREE_OPERAND (exp, 2),
8780 if_false_label ? if_false_label : drop_through_label,
8781 if_true_label ? if_true_label : drop_through_label);
8786 if (integer_zerop (TREE_OPERAND (exp, 1)))
8787 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8788 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8791 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8792 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8793 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8794 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8796 comparison = compare (exp, EQ, EQ);
8800 if (integer_zerop (TREE_OPERAND (exp, 1)))
8801 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8802 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8805 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8806 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8807 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8808 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8810 comparison = compare (exp, NE, NE);
8814 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8816 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8817 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8819 comparison = compare (exp, LT, LTU);
8823 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8825 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8826 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8828 comparison = compare (exp, LE, LEU);
8832 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8834 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8835 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8837 comparison = compare (exp, GT, GTU);
8841 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8843 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8844 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8846 comparison = compare (exp, GE, GEU);
8851 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8853 /* This is not needed any more and causes poor code since it causes
8854 comparisons and tests from non-SI objects to have different code
8856 /* Copy to register to avoid generating bad insns by cse
8857 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8858 if (!cse_not_expected && GET_CODE (temp) == MEM)
8859 temp = copy_to_reg (temp);
8861 do_pending_stack_adjust ();
8862 if (GET_CODE (temp) == CONST_INT)
8863 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8864 else if (GET_CODE (temp) == LABEL_REF)
8865 comparison = const_true_rtx;
8866 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8867 && !can_compare_p (GET_MODE (temp)))
8868 /* Note swapping the labels gives us not-equal. */
8869 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8870 else if (GET_MODE (temp) != VOIDmode)
8871 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8872 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8873 GET_MODE (temp), NULL_RTX, 0);
8878 /* Do any postincrements in the expression that was tested. */
8881 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8882 straight into a conditional jump instruction as the jump condition.
8883 Otherwise, all the work has been done already. */
8885 if (comparison == const_true_rtx)
8888 emit_jump (if_true_label);
8890 else if (comparison == const0_rtx)
8893 emit_jump (if_false_label);
8895 else if (comparison)
8896 do_jump_for_compare (comparison, if_false_label, if_true_label);
8898 if (drop_through_label)
8900 /* If do_jump produces code that might be jumped around,
8901 do any stack adjusts from that code, before the place
8902 where control merges in. */
8903 do_pending_stack_adjust ();
8904 emit_label (drop_through_label);
8908 /* Given a comparison expression EXP for values too wide to be compared
8909 with one insn, test the comparison and jump to the appropriate label.
8910 The code of EXP is ignored; we always test GT if SWAP is 0,
8911 and LT if SWAP is 1. */
8914 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8917 rtx if_false_label, if_true_label;
8919 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8920 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8921 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8922 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8923 rtx drop_through_label = 0;
8924 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8927 if (! if_true_label || ! if_false_label)
8928 drop_through_label = gen_label_rtx ();
8929 if (! if_true_label)
8930 if_true_label = drop_through_label;
8931 if (! if_false_label)
8932 if_false_label = drop_through_label;
8934 /* Compare a word at a time, high order first. */
8935 for (i = 0; i < nwords; i++)
8938 rtx op0_word, op1_word;
8940 if (WORDS_BIG_ENDIAN)
8942 op0_word = operand_subword_force (op0, i, mode);
8943 op1_word = operand_subword_force (op1, i, mode);
8947 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8948 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8951 /* All but high-order word must be compared as unsigned. */
8952 comp = compare_from_rtx (op0_word, op1_word,
8953 (unsignedp || i > 0) ? GTU : GT,
8954 unsignedp, word_mode, NULL_RTX, 0);
8955 if (comp == const_true_rtx)
8956 emit_jump (if_true_label);
8957 else if (comp != const0_rtx)
8958 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8960 /* Consider lower words only if these are equal. */
8961 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8963 if (comp == const_true_rtx)
8964 emit_jump (if_false_label);
8965 else if (comp != const0_rtx)
8966 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8970 emit_jump (if_false_label);
8971 if (drop_through_label)
8972 emit_label (drop_through_label);
8975 /* Compare OP0 with OP1, word at a time, in mode MODE.
8976 UNSIGNEDP says to do unsigned comparison.
8977 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8980 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8981 enum machine_mode mode;
8984 rtx if_false_label, if_true_label;
8986 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8987 rtx drop_through_label = 0;
8990 if (! if_true_label || ! if_false_label)
8991 drop_through_label = gen_label_rtx ();
8992 if (! if_true_label)
8993 if_true_label = drop_through_label;
8994 if (! if_false_label)
8995 if_false_label = drop_through_label;
8997 /* Compare a word at a time, high order first. */
8998 for (i = 0; i < nwords; i++)
9001 rtx op0_word, op1_word;
9003 if (WORDS_BIG_ENDIAN)
9005 op0_word = operand_subword_force (op0, i, mode);
9006 op1_word = operand_subword_force (op1, i, mode);
9010 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9011 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9014 /* All but high-order word must be compared as unsigned. */
9015 comp = compare_from_rtx (op0_word, op1_word,
9016 (unsignedp || i > 0) ? GTU : GT,
9017 unsignedp, word_mode, NULL_RTX, 0);
9018 if (comp == const_true_rtx)
9019 emit_jump (if_true_label);
9020 else if (comp != const0_rtx)
9021 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9023 /* Consider lower words only if these are equal. */
9024 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9026 if (comp == const_true_rtx)
9027 emit_jump (if_false_label);
9028 else if (comp != const0_rtx)
9029 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9033 emit_jump (if_false_label);
9034 if (drop_through_label)
9035 emit_label (drop_through_label);
9038 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9039 with one insn, test the comparison and jump to the appropriate label. */
9042 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9044 rtx if_false_label, if_true_label;
9046 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9047 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9048 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9049 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9051 rtx drop_through_label = 0;
9053 if (! if_false_label)
9054 drop_through_label = if_false_label = gen_label_rtx ();
9056 for (i = 0; i < nwords; i++)
9058 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9059 operand_subword_force (op1, i, mode),
9060 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9061 word_mode, NULL_RTX, 0);
9062 if (comp == const_true_rtx)
9063 emit_jump (if_false_label);
9064 else if (comp != const0_rtx)
9065 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9069 emit_jump (if_true_label);
9070 if (drop_through_label)
9071 emit_label (drop_through_label);
9074 /* Jump according to whether OP0 is 0.
9075 We assume that OP0 has an integer mode that is too wide
9076 for the available compare insns. */
9079 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9081 rtx if_false_label, if_true_label;
9083 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9085 rtx drop_through_label = 0;
9087 if (! if_false_label)
9088 drop_through_label = if_false_label = gen_label_rtx ();
9090 for (i = 0; i < nwords; i++)
9092 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9094 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9095 if (comp == const_true_rtx)
9096 emit_jump (if_false_label);
9097 else if (comp != const0_rtx)
9098 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9102 emit_jump (if_true_label);
9103 if (drop_through_label)
9104 emit_label (drop_through_label);
9107 /* Given a comparison expression in rtl form, output conditional branches to
9108 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9111 do_jump_for_compare (comparison, if_false_label, if_true_label)
9112 rtx comparison, if_false_label, if_true_label;
9116 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9117 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9122 emit_jump (if_false_label);
9124 else if (if_false_label)
9127 rtx prev = get_last_insn ();
9130 /* Output the branch with the opposite condition. Then try to invert
9131 what is generated. If more than one insn is a branch, or if the
9132 branch is not the last insn written, abort. If we can't invert
9133 the branch, emit make a true label, redirect this jump to that,
9134 emit a jump to the false label and define the true label. */
9136 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9137 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9141 /* Here we get the first insn that was just emitted. It used to be the
9142 case that, on some machines, emitting the branch would discard
9143 the previous compare insn and emit a replacement. This isn't
9144 done anymore, but abort if we see that PREV is deleted. */
9147 insn = get_insns ();
9148 else if (INSN_DELETED_P (prev))
9151 insn = NEXT_INSN (prev);
9153 for (; insn; insn = NEXT_INSN (insn))
9154 if (GET_CODE (insn) == JUMP_INSN)
9161 if (branch != get_last_insn ())
9164 JUMP_LABEL (branch) = if_false_label;
9165 if (! invert_jump (branch, if_false_label))
9167 if_true_label = gen_label_rtx ();
9168 redirect_jump (branch, if_true_label);
9169 emit_jump (if_false_label);
9170 emit_label (if_true_label);
9175 /* Generate code for a comparison expression EXP
9176 (including code to compute the values to be compared)
9177 and set (CC0) according to the result.
9178 SIGNED_CODE should be the rtx operation for this comparison for
9179 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9181 We force a stack adjustment unless there are currently
9182 things pushed on the stack that aren't yet used. */
9185 compare (exp, signed_code, unsigned_code)
9187 enum rtx_code signed_code, unsigned_code;
9190 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9192 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9193 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9194 register enum machine_mode mode = TYPE_MODE (type);
9195 int unsignedp = TREE_UNSIGNED (type);
9196 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9198 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9200 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9201 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9204 /* Like compare but expects the values to compare as two rtx's.
9205 The decision as to signed or unsigned comparison must be made by the caller.
9207 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9210 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9211 size of MODE should be used. */
9214 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9215 register rtx op0, op1;
9218 enum machine_mode mode;
9224 /* If one operand is constant, make it the second one. Only do this
9225 if the other operand is not constant as well. */
9227 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9228 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9233 code = swap_condition (code);
9238 op0 = force_not_mem (op0);
9239 op1 = force_not_mem (op1);
9242 do_pending_stack_adjust ();
9244 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9245 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9249 /* There's no need to do this now that combine.c can eliminate lots of
9250 sign extensions. This can be less efficient in certain cases on other
9253 /* If this is a signed equality comparison, we can do it as an
9254 unsigned comparison since zero-extension is cheaper than sign
9255 extension and comparisons with zero are done as unsigned. This is
9256 the case even on machines that can do fast sign extension, since
9257 zero-extension is easier to combine with other operations than
9258 sign-extension is. If we are comparing against a constant, we must
9259 convert it to what it would look like unsigned. */
9260 if ((code == EQ || code == NE) && ! unsignedp
9261 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9263 if (GET_CODE (op1) == CONST_INT
9264 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9265 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9270 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9272 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9275 /* Generate code to calculate EXP using a store-flag instruction
9276 and return an rtx for the result. EXP is either a comparison
9277 or a TRUTH_NOT_EXPR whose operand is a comparison.
9279 If TARGET is nonzero, store the result there if convenient.
9281 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9284 Return zero if there is no suitable set-flag instruction
9285 available on this machine.
9287 Once expand_expr has been called on the arguments of the comparison,
9288 we are committed to doing the store flag, since it is not safe to
9289 re-evaluate the expression. We emit the store-flag insn by calling
9290 emit_store_flag, but only expand the arguments if we have a reason
9291 to believe that emit_store_flag will be successful. If we think that
9292 it will, but it isn't, we have to simulate the store-flag with a
9293 set/jump/set sequence. */
9296 do_store_flag (exp, target, mode, only_cheap)
9299 enum machine_mode mode;
9303 tree arg0, arg1, type;
9305 enum machine_mode operand_mode;
9309 enum insn_code icode;
9310 rtx subtarget = target;
9311 rtx result, label, pattern, jump_pat;
9313 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9314 result at the end. We can't simply invert the test since it would
9315 have already been inverted if it were valid. This case occurs for
9316 some floating-point comparisons. */
9318 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9319 invert = 1, exp = TREE_OPERAND (exp, 0);
9321 arg0 = TREE_OPERAND (exp, 0);
9322 arg1 = TREE_OPERAND (exp, 1);
9323 type = TREE_TYPE (arg0);
9324 operand_mode = TYPE_MODE (type);
9325 unsignedp = TREE_UNSIGNED (type);
9327 /* We won't bother with BLKmode store-flag operations because it would mean
9328 passing a lot of information to emit_store_flag. */
9329 if (operand_mode == BLKmode)
9335 /* Get the rtx comparison code to use. We know that EXP is a comparison
9336 operation of some type. Some comparisons against 1 and -1 can be
9337 converted to comparisons with zero. Do so here so that the tests
9338 below will be aware that we have a comparison with zero. These
9339 tests will not catch constants in the first operand, but constants
9340 are rarely passed as the first operand. */
9342 switch (TREE_CODE (exp))
9351 if (integer_onep (arg1))
9352 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9354 code = unsignedp ? LTU : LT;
9357 if (! unsignedp && integer_all_onesp (arg1))
9358 arg1 = integer_zero_node, code = LT;
9360 code = unsignedp ? LEU : LE;
9363 if (! unsignedp && integer_all_onesp (arg1))
9364 arg1 = integer_zero_node, code = GE;
9366 code = unsignedp ? GTU : GT;
9369 if (integer_onep (arg1))
9370 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9372 code = unsignedp ? GEU : GE;
9378 /* Put a constant second. */
9379 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9381 tem = arg0; arg0 = arg1; arg1 = tem;
9382 code = swap_condition (code);
9385 /* If this is an equality or inequality test of a single bit, we can
9386 do this by shifting the bit being tested to the low-order bit and
9387 masking the result with the constant 1. If the condition was EQ,
9388 we xor it with 1. This does not require an scc insn and is faster
9389 than an scc insn even if we have it. */
9391 if ((code == NE || code == EQ)
9392 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9393 && integer_pow2p (TREE_OPERAND (arg0, 1))
9394 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9396 tree inner = TREE_OPERAND (arg0, 0);
9397 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9398 NULL_RTX, VOIDmode, 0)));
9401 /* If INNER is a right shift of a constant and it plus BITNUM does
9402 not overflow, adjust BITNUM and INNER. */
9404 if (TREE_CODE (inner) == RSHIFT_EXPR
9405 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9406 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9407 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9408 < TYPE_PRECISION (type)))
9410 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9411 inner = TREE_OPERAND (inner, 0);
9414 /* If we are going to be able to omit the AND below, we must do our
9415 operations as unsigned. If we must use the AND, we have a choice.
9416 Normally unsigned is faster, but for some machines signed is. */
9417 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9418 #ifdef LOAD_EXTEND_OP
9419 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9425 if (subtarget == 0 || GET_CODE (subtarget) != REG
9426 || GET_MODE (subtarget) != operand_mode
9427 || ! safe_from_p (subtarget, inner))
9430 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9433 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9434 size_int (bitnum), subtarget, ops_unsignedp);
9436 if (GET_MODE (op0) != mode)
9437 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9439 if ((code == EQ && ! invert) || (code == NE && invert))
9440 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9441 ops_unsignedp, OPTAB_LIB_WIDEN);
9443 /* Put the AND last so it can combine with more things. */
9444 if (bitnum != TYPE_PRECISION (type) - 1)
9445 op0 = expand_and (op0, const1_rtx, subtarget);
9450 /* Now see if we are likely to be able to do this. Return if not. */
9451 if (! can_compare_p (operand_mode))
9453 icode = setcc_gen_code[(int) code];
9454 if (icode == CODE_FOR_nothing
9455 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9457 /* We can only do this if it is one of the special cases that
9458 can be handled without an scc insn. */
9459 if ((code == LT && integer_zerop (arg1))
9460 || (! only_cheap && code == GE && integer_zerop (arg1)))
9462 else if (BRANCH_COST >= 0
9463 && ! only_cheap && (code == NE || code == EQ)
9464 && TREE_CODE (type) != REAL_TYPE
9465 && ((abs_optab->handlers[(int) operand_mode].insn_code
9466 != CODE_FOR_nothing)
9467 || (ffs_optab->handlers[(int) operand_mode].insn_code
9468 != CODE_FOR_nothing)))
9474 preexpand_calls (exp);
9475 if (subtarget == 0 || GET_CODE (subtarget) != REG
9476 || GET_MODE (subtarget) != operand_mode
9477 || ! safe_from_p (subtarget, arg1))
9480 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9481 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9484 target = gen_reg_rtx (mode);
9486 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9487 because, if the emit_store_flag does anything it will succeed and
9488 OP0 and OP1 will not be used subsequently. */
9490 result = emit_store_flag (target, code,
9491 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9492 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9493 operand_mode, unsignedp, 1);
9498 result = expand_binop (mode, xor_optab, result, const1_rtx,
9499 result, 0, OPTAB_LIB_WIDEN);
9503 /* If this failed, we have to do this with set/compare/jump/set code. */
9504 if (target == 0 || GET_CODE (target) != REG
9505 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9506 target = gen_reg_rtx (GET_MODE (target));
9508 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9509 result = compare_from_rtx (op0, op1, code, unsignedp,
9510 operand_mode, NULL_RTX, 0);
9511 if (GET_CODE (result) == CONST_INT)
9512 return (((result == const0_rtx && ! invert)
9513 || (result != const0_rtx && invert))
9514 ? const0_rtx : const1_rtx);
9516 label = gen_label_rtx ();
9517 if (bcc_gen_fctn[(int) code] == 0)
9520 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9521 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9527 /* Generate a tablejump instruction (used for switch statements). */
9529 #ifdef HAVE_tablejump
9531 /* INDEX is the value being switched on, with the lowest value
9532 in the table already subtracted.
9533 MODE is its expected mode (needed if INDEX is constant).
9534 RANGE is the length of the jump table.
9535 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9537 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9538 index value is out of range. */
9541 do_tablejump (index, mode, range, table_label, default_label)
9542 rtx index, range, table_label, default_label;
9543 enum machine_mode mode;
9545 register rtx temp, vector;
9547 /* Do an unsigned comparison (in the proper mode) between the index
9548 expression and the value which represents the length of the range.
9549 Since we just finished subtracting the lower bound of the range
9550 from the index expression, this comparison allows us to simultaneously
9551 check that the original index expression value is both greater than
9552 or equal to the minimum value of the range and less than or equal to
9553 the maximum value of the range. */
9555 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9556 emit_jump_insn (gen_bgtu (default_label));
9558 /* If index is in range, it must fit in Pmode.
9559 Convert to Pmode so we can index with it. */
9561 index = convert_to_mode (Pmode, index, 1);
9563 /* Don't let a MEM slip thru, because then INDEX that comes
9564 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9565 and break_out_memory_refs will go to work on it and mess it up. */
9566 #ifdef PIC_CASE_VECTOR_ADDRESS
9567 if (flag_pic && GET_CODE (index) != REG)
9568 index = copy_to_mode_reg (Pmode, index);
9571 /* If flag_force_addr were to affect this address
9572 it could interfere with the tricky assumptions made
9573 about addresses that contain label-refs,
9574 which may be valid only very near the tablejump itself. */
9575 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9576 GET_MODE_SIZE, because this indicates how large insns are. The other
9577 uses should all be Pmode, because they are addresses. This code
9578 could fail if addresses and insns are not the same size. */
9579 index = gen_rtx (PLUS, Pmode,
9580 gen_rtx (MULT, Pmode, index,
9581 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9582 gen_rtx (LABEL_REF, Pmode, table_label));
9583 #ifdef PIC_CASE_VECTOR_ADDRESS
9585 index = PIC_CASE_VECTOR_ADDRESS (index);
9588 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9589 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9590 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9591 RTX_UNCHANGING_P (vector) = 1;
9592 convert_move (temp, vector, 0);
9594 emit_jump_insn (gen_tablejump (temp, table_label));
9596 #ifndef CASE_VECTOR_PC_RELATIVE
9597 /* If we are generating PIC code or if the table is PC-relative, the
9598 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9604 #endif /* HAVE_tablejump */
9607 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9608 to that value is on the top of the stack. The resulting type is TYPE, and
9609 the source declaration is DECL. */
9612 bc_load_memory (type, decl)
9615 enum bytecode_opcode opcode;
9618 /* Bit fields are special. We only know about signed and
9619 unsigned ints, and enums. The latter are treated as
9622 if (DECL_BIT_FIELD (decl))
9623 if (TREE_CODE (type) == ENUMERAL_TYPE
9624 || TREE_CODE (type) == INTEGER_TYPE)
9625 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9629 /* See corresponding comment in bc_store_memory(). */
9630 if (TYPE_MODE (type) == BLKmode
9631 || TYPE_MODE (type) == VOIDmode)
9634 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9636 if (opcode == neverneverland)
9639 bc_emit_bytecode (opcode);
9641 #ifdef DEBUG_PRINT_CODE
9642 fputc ('\n', stderr);
9647 /* Store the contents of the second stack slot to the address in the
9648 top stack slot. DECL is the declaration of the destination and is used
9649 to determine whether we're dealing with a bitfield. */
9652 bc_store_memory (type, decl)
9655 enum bytecode_opcode opcode;
9658 if (DECL_BIT_FIELD (decl))
9660 if (TREE_CODE (type) == ENUMERAL_TYPE
9661 || TREE_CODE (type) == INTEGER_TYPE)
9667 if (TYPE_MODE (type) == BLKmode)
9669 /* Copy structure. This expands to a block copy instruction, storeBLK.
9670 In addition to the arguments expected by the other store instructions,
9671 it also expects a type size (SImode) on top of the stack, which is the
9672 structure size in size units (usually bytes). The two first arguments
9673 are already on the stack; so we just put the size on level 1. For some
9674 other languages, the size may be variable, this is why we don't encode
9675 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9677 bc_expand_expr (TYPE_SIZE (type));
9681 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9683 if (opcode == neverneverland)
9686 bc_emit_bytecode (opcode);
9688 #ifdef DEBUG_PRINT_CODE
9689 fputc ('\n', stderr);
9694 /* Allocate local stack space sufficient to hold a value of the given
9695 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9696 integral power of 2. A special case is locals of type VOID, which
9697 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9698 remapped into the corresponding attribute of SI. */
9701 bc_allocate_local (size, alignment)
9702 int size, alignment;
9710 /* Normalize size and alignment */
9712 size = UNITS_PER_WORD;
9714 if (alignment < BITS_PER_UNIT)
9715 byte_alignment = 1 << (INT_ALIGN - 1);
9718 byte_alignment = alignment / BITS_PER_UNIT;
9720 if (local_vars_size & (byte_alignment - 1))
9721 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9723 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9724 local_vars_size += size;
9730 /* Allocate variable-sized local array. Variable-sized arrays are
9731 actually pointers to the address in memory where they are stored. */
9734 bc_allocate_variable_array (size)
9738 const int ptralign = (1 << (PTR_ALIGN - 1));
9741 if (local_vars_size & ptralign)
9742 local_vars_size += ptralign - (local_vars_size & ptralign);
9744 /* Note down local space needed: pointer to block; also return
9747 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9748 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9753 /* Push the machine address for the given external variable offset. */
9755 bc_load_externaddr (externaddr)
9758 bc_emit_bytecode (constP);
9759 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9760 BYTECODE_BC_LABEL (externaddr)->offset);
9762 #ifdef DEBUG_PRINT_CODE
9763 fputc ('\n', stderr);
9772 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9778 /* Like above, but expects an IDENTIFIER. */
9780 bc_load_externaddr_id (id, offset)
9784 if (!IDENTIFIER_POINTER (id))
9787 bc_emit_bytecode (constP);
9788 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9790 #ifdef DEBUG_PRINT_CODE
9791 fputc ('\n', stderr);
9796 /* Push the machine address for the given local variable offset. */
9798 bc_load_localaddr (localaddr)
9801 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9805 /* Push the machine address for the given parameter offset.
9806 NOTE: offset is in bits. */
9808 bc_load_parmaddr (parmaddr)
9811 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9816 /* Convert a[i] into *(a + i). */
9818 bc_canonicalize_array_ref (exp)
9821 tree type = TREE_TYPE (exp);
9822 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9823 TREE_OPERAND (exp, 0));
9824 tree index = TREE_OPERAND (exp, 1);
9827 /* Convert the integer argument to a type the same size as a pointer
9828 so the multiply won't overflow spuriously. */
9830 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9831 index = convert (type_for_size (POINTER_SIZE, 0), index);
9833 /* The array address isn't volatile even if the array is.
9834 (Of course this isn't terribly relevant since the bytecode
9835 translator treats nearly everything as volatile anyway.) */
9836 TREE_THIS_VOLATILE (array_adr) = 0;
9838 return build1 (INDIRECT_REF, type,
9839 fold (build (PLUS_EXPR,
9840 TYPE_POINTER_TO (type),
9842 fold (build (MULT_EXPR,
9843 TYPE_POINTER_TO (type),
9845 size_in_bytes (type))))));
9849 /* Load the address of the component referenced by the given
9850 COMPONENT_REF expression.
9852 Returns innermost lvalue. */
9855 bc_expand_component_address (exp)
9859 enum machine_mode mode;
9861 HOST_WIDE_INT SIval;
9864 tem = TREE_OPERAND (exp, 1);
9865 mode = DECL_MODE (tem);
9868 /* Compute cumulative bit offset for nested component refs
9869 and array refs, and find the ultimate containing object. */
9871 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9873 if (TREE_CODE (tem) == COMPONENT_REF)
9874 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9876 if (TREE_CODE (tem) == ARRAY_REF
9877 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9880 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9881 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9882 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9887 bc_expand_expr (tem);
9890 /* For bitfields also push their offset and size */
9891 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9892 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9894 if (SIval = bitpos / BITS_PER_UNIT)
9895 bc_emit_instruction (addconstPSI, SIval);
9897 return (TREE_OPERAND (exp, 1));
9901 /* Emit code to push two SI constants */
9903 bc_push_offset_and_size (offset, size)
9904 HOST_WIDE_INT offset, size;
9906 bc_emit_instruction (constSI, offset);
9907 bc_emit_instruction (constSI, size);
9911 /* Emit byte code to push the address of the given lvalue expression to
9912 the stack. If it's a bit field, we also push offset and size info.
9914 Returns innermost component, which allows us to determine not only
9915 its type, but also whether it's a bitfield. */
9918 bc_expand_address (exp)
9922 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9926 switch (TREE_CODE (exp))
9930 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9934 return (bc_expand_component_address (exp));
9938 bc_expand_expr (TREE_OPERAND (exp, 0));
9940 /* For variable-sized types: retrieve pointer. Sometimes the
9941 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9942 also make sure we have an operand, just in case... */
9944 if (TREE_OPERAND (exp, 0)
9945 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9946 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9947 bc_emit_instruction (loadP);
9949 /* If packed, also return offset and size */
9950 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9952 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9953 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9955 return (TREE_OPERAND (exp, 0));
9959 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9960 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9965 bc_load_parmaddr (DECL_RTL (exp));
9967 /* For variable-sized types: retrieve pointer */
9968 if (TYPE_SIZE (TREE_TYPE (exp))
9969 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9970 bc_emit_instruction (loadP);
9972 /* If packed, also return offset and size */
9973 if (DECL_BIT_FIELD (exp))
9974 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9975 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9981 bc_emit_instruction (returnP);
9987 if (BYTECODE_LABEL (DECL_RTL (exp)))
9988 bc_load_externaddr (DECL_RTL (exp));
9991 if (DECL_EXTERNAL (exp))
9992 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9993 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9995 bc_load_localaddr (DECL_RTL (exp));
9997 /* For variable-sized types: retrieve pointer */
9998 if (TYPE_SIZE (TREE_TYPE (exp))
9999 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10000 bc_emit_instruction (loadP);
10002 /* If packed, also return offset and size */
10003 if (DECL_BIT_FIELD (exp))
10004 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10005 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10013 bc_emit_bytecode (constP);
10014 r = output_constant_def (exp);
10015 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10017 #ifdef DEBUG_PRINT_CODE
10018 fputc ('\n', stderr);
10029 /* Most lvalues don't have components. */
10034 /* Emit a type code to be used by the runtime support in handling
10035 parameter passing. The type code consists of the machine mode
10036 plus the minimal alignment shifted left 8 bits. */
10039 bc_runtime_type_code (type)
10044 switch (TREE_CODE (type))
10050 case ENUMERAL_TYPE:
10054 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10066 return build_int_2 (val, 0);
10070 /* Generate constructor label */
10072 bc_gen_constr_label ()
10074 static int label_counter;
10075 static char label[20];
10077 sprintf (label, "*LR%d", label_counter++);
10079 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10083 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10084 expand the constructor data as static data, and push a pointer to it.
10085 The pointer is put in the pointer table and is retrieved by a constP
10086 bytecode instruction. We then loop and store each constructor member in
10087 the corresponding component. Finally, we return the original pointer on
10091 bc_expand_constructor (constr)
10095 HOST_WIDE_INT ptroffs;
10099 /* Literal constructors are handled as constants, whereas
10100 non-literals are evaluated and stored element by element
10101 into the data segment. */
10103 /* Allocate space in proper segment and push pointer to space on stack.
10106 l = bc_gen_constr_label ();
10108 if (TREE_CONSTANT (constr))
10112 bc_emit_const_labeldef (l);
10113 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10119 bc_emit_data_labeldef (l);
10120 bc_output_data_constructor (constr);
10124 /* Add reference to pointer table and recall pointer to stack;
10125 this code is common for both types of constructors: literals
10126 and non-literals. */
10128 ptroffs = bc_define_pointer (l);
10129 bc_emit_instruction (constP, ptroffs);
10131 /* This is all that has to be done if it's a literal. */
10132 if (TREE_CONSTANT (constr))
10136 /* At this point, we have the pointer to the structure on top of the stack.
10137 Generate sequences of store_memory calls for the constructor. */
10139 /* constructor type is structure */
10140 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10144 /* If the constructor has fewer fields than the structure,
10145 clear the whole structure first. */
10147 if (list_length (CONSTRUCTOR_ELTS (constr))
10148 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10150 bc_emit_instruction (duplicate);
10151 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10152 bc_emit_instruction (clearBLK);
10155 /* Store each element of the constructor into the corresponding
10156 field of TARGET. */
10158 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10160 register tree field = TREE_PURPOSE (elt);
10161 register enum machine_mode mode;
10166 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10167 mode = DECL_MODE (field);
10168 unsignedp = TREE_UNSIGNED (field);
10170 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10172 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10173 /* The alignment of TARGET is
10174 at least what its type requires. */
10176 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10177 int_size_in_bytes (TREE_TYPE (constr)));
10182 /* Constructor type is array */
10183 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10187 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10188 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10189 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10190 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10192 /* If the constructor has fewer fields than the structure,
10193 clear the whole structure first. */
10195 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10197 bc_emit_instruction (duplicate);
10198 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10199 bc_emit_instruction (clearBLK);
10203 /* Store each element of the constructor into the corresponding
10204 element of TARGET, determined by counting the elements. */
10206 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10208 elt = TREE_CHAIN (elt), i++)
10210 register enum machine_mode mode;
10215 mode = TYPE_MODE (elttype);
10216 bitsize = GET_MODE_BITSIZE (mode);
10217 unsignedp = TREE_UNSIGNED (elttype);
10219 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10220 /* * TYPE_SIZE_UNIT (elttype) */ );
10222 bc_store_field (elt, bitsize, bitpos, mode,
10223 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10224 /* The alignment of TARGET is
10225 at least what its type requires. */
10227 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10228 int_size_in_bytes (TREE_TYPE (constr)));
10235 /* Store the value of EXP (an expression tree) into member FIELD of
10236 structure at address on stack, which has type TYPE, mode MODE and
10237 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10240 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10241 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10244 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10245 value_mode, unsignedp, align, total_size)
10246 int bitsize, bitpos;
10247 enum machine_mode mode;
10248 tree field, exp, type;
10249 enum machine_mode value_mode;
10255 /* Expand expression and copy pointer */
10256 bc_expand_expr (exp);
10257 bc_emit_instruction (over);
10260 /* If the component is a bit field, we cannot use addressing to access
10261 it. Use bit-field techniques to store in it. */
10263 if (DECL_BIT_FIELD (field))
10265 bc_store_bit_field (bitpos, bitsize, unsignedp);
10269 /* Not bit field */
10271 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10273 /* Advance pointer to the desired member */
10275 bc_emit_instruction (addconstPSI, offset);
10278 bc_store_memory (type, field);
10283 /* Store SI/SU in bitfield */
10285 bc_store_bit_field (offset, size, unsignedp)
10286 int offset, size, unsignedp;
10288 /* Push bitfield offset and size */
10289 bc_push_offset_and_size (offset, size);
10292 bc_emit_instruction (sstoreBI);
10296 /* Load SI/SU from bitfield */
10298 bc_load_bit_field (offset, size, unsignedp)
10299 int offset, size, unsignedp;
10301 /* Push bitfield offset and size */
10302 bc_push_offset_and_size (offset, size);
10304 /* Load: sign-extend if signed, else zero-extend */
10305 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10309 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10310 (adjust stack pointer upwards), negative means add that number of
10311 levels (adjust the stack pointer downwards). Only positive values
10312 normally make sense. */
10315 bc_adjust_stack (nlevels)
10324 bc_emit_instruction (drop);
10327 bc_emit_instruction (drop);
10332 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10333 stack_depth -= nlevels;
10336 #if defined (VALIDATE_STACK_FOR_BC)
10337 VALIDATE_STACK_FOR_BC ();