1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-flags.h"
29 #include "insn-codes.h"
31 #include "insn-config.h"
34 #include "typeclass.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
108 struct move_by_pieces
117 int explicit_inc_from;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void group_insns PROTO((rtx));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
152 static int apply_args_size PROTO((void));
153 static int apply_result_size PROTO((void));
154 static rtx result_vector PROTO((int, rtx));
155 static rtx expand_builtin_apply_args PROTO((void));
156 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157 static void expand_builtin_return PROTO((rtx));
158 static rtx expand_increment PROTO((tree, int));
159 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160 tree bc_runtime_type_code PROTO((tree));
161 rtx bc_allocate_local PROTO((int, int));
162 void bc_store_memory PROTO((tree, tree));
163 tree bc_expand_component_address PROTO((tree));
164 tree bc_expand_address PROTO((tree));
165 void bc_expand_constructor PROTO((tree));
166 void bc_adjust_stack PROTO((int));
167 tree bc_canonicalize_array_ref PROTO((tree));
168 void bc_load_memory PROTO((tree, tree));
169 void bc_load_externaddr PROTO((rtx));
170 void bc_load_externaddr_id PROTO((tree, int));
171 void bc_load_localaddr PROTO((rtx));
172 void bc_load_parmaddr PROTO((rtx));
173 static void preexpand_calls PROTO((tree));
174 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
175 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
176 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* MOVE_RATIO is the number of move instructions that is better than
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 /* A value of around 6 would minimize code size; infinity would minimize
198 #define MOVE_RATIO 15
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS 0
211 /* Register mappings for target machines without register windows. */
212 #ifndef INCOMING_REGNO
213 #define INCOMING_REGNO(OUT) (OUT)
215 #ifndef OUTGOING_REGNO
216 #define OUTGOING_REGNO(IN) (IN)
219 /* Maps used to convert modes to const, load, and store bytecodes. */
220 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
224 /* Initialize maps used to convert modes to const, load, and store
227 bc_init_mode_to_opcode_maps ()
231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
236 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
241 #include "modemap.def"
245 /* This is run once per compilation to set up which modes can be used
246 directly in memory and to initialize the block move optab. */
252 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 if (! HARD_REGNO_MODE_OK (regno, mode))
285 reg = gen_rtx (REG, mode, regno);
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
312 /* This is run at the start of compiling a function. */
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
323 apply_args_value = 0;
327 /* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
334 /* Instead of saving the postincrement queue, empty it. */
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
341 p->apply_args_value = apply_args_value;
342 p->forced_labels = forced_labels;
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
348 apply_args_value = 0;
352 /* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
356 restore_expr_status (p)
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
363 apply_args_value = p->apply_args_value;
364 forced_labels = p->forced_labels;
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 static rtx pending_chain;
372 /* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
380 enqueue_insn (var, body)
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
384 var, NULL_RTX, NULL_RTX, body, pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
448 else if (code == PLUS || code == MULT)
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 register enum rtx_code code = GET_CODE (x);
491 return queued_subexp_p (XEXP (x, 0));
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
501 /* Perform all the pending incrementations. */
507 while (p = pending_chain)
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
521 /* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
527 convert_move (to, from, unsignedp)
528 register rtx to, from;
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
544 if (to_real != from_real)
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
563 emit_move_insn (to, from);
571 #ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
578 #ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
585 #ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
592 #ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
599 #ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
607 #ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
614 #ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
621 #ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
628 #ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
636 #ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
643 #ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
650 #ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
657 #ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
664 #ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
693 #ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
700 #ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
707 #ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
714 #ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
721 #ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
728 #ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
795 libcall = truncdfsf2_libfunc;
799 libcall = extenddfxf2_libfunc;
803 libcall = extenddftf2_libfunc;
812 libcall = truncxfsf2_libfunc;
816 libcall = truncxfdf2_libfunc;
825 libcall = trunctfsf2_libfunc;
829 libcall = trunctfdf2_libfunc;
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
841 emit_move_insn (to, value);
845 /* Now both modes are integers. */
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
869 emit_unop_insn (code, to, from, equiv_code);
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
885 /* No special multiword conversion insn; do it by hand. */
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
892 lowpart_mode = from_mode;
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
899 /* Compute the value to put in each remaining word. */
901 fill_value = const0_rtx;
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
938 insns = get_insns ();
941 emit_no_conflict_block (insns, to, from, NULL_RTX,
942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
957 convert_move (to, gen_lowpart (word_mode, from), 0);
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
967 #ifdef HAVE_truncsipsi
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
973 #endif /* HAVE_truncsipsi */
977 if (from_mode == PSImode)
979 if (to_mode != SImode)
981 from = convert_to_mode (SImode, from, unsignedp);
986 #ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
992 #endif /* HAVE_extendpsisi */
997 /* Now follow all the conversions between integers
998 no more than a word long. */
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1003 GET_MODE_BITSIZE (from_mode)))
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1016 /* Handle extension. */
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
1029 emit_unop_insn (code, to, from, equiv_code);
1034 enum machine_mode intermediate;
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1049 /* No suitable intermediate mode. */
1054 /* Support special truncate insns for certain modes. */
1056 if (from_mode == DImode && to_mode == SImode)
1058 #ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1069 if (from_mode == DImode && to_mode == HImode)
1071 #ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1082 if (from_mode == DImode && to_mode == QImode)
1084 #ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1095 if (from_mode == SImode && to_mode == HImode)
1097 #ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1108 if (from_mode == SImode && to_mode == QImode)
1110 #ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 if (from_mode == HImode && to_mode == QImode)
1123 #ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1144 /* Mode combination is not recognized. */
1148 /* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
1153 or by copying to a new temporary with conversion.
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
1159 convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1167 /* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1181 convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1199 if (mode == oldmode)
1202 /* There is one case that we must handle specially: If we are converting
1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1220 || (GET_MODE_CLASS (mode) == MODE_INT
1221 && GET_MODE_CLASS (oldmode) == MODE_INT
1222 && (GET_CODE (x) == CONST_DOUBLE
1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
1226 || GET_CODE (x) == REG)))))
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1244 return GEN_INT (val);
1247 return gen_lowpart (mode, x);
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1255 /* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1262 move_by_pieces (to, from, len, align)
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1268 int max_size = MOVE_MAX + 1;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1296 #ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1304 #ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314 #ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1319 data.explicit_inc_to = -1;
1322 #ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1325 data.to_addr = copy_addr_to_reg (to_addr);
1327 data.explicit_inc_to = 1;
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1341 while (max_size > 1)
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
1351 if (mode == VOIDmode)
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1360 max_size = GET_MODE_SIZE (mode);
1363 /* The code above should have handled everything. */
1368 /* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1372 move_by_pieces_ninsns (l, align)
1376 register int n_insns = 0;
1377 int max_size = MOVE_MAX + 1;
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1383 while (max_size > 1)
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
1393 if (mode == VOIDmode)
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1402 max_size = GET_MODE_SIZE (mode);
1408 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1413 move_by_pieces_1 (genfun, mode, data)
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1421 while (data->len >= size)
1423 if (data->reverse) data->offset -= size;
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1435 #ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1438 if (data->explicit_inc_from < 0)
1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1442 emit_insn ((*genfun) (to1, from1));
1443 #ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1446 if (data->explicit_inc_from > 0)
1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1450 if (! data->reverse) data->offset += size;
1456 /* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1467 emit_block_move (x, y, size, align)
1472 if (GET_MODE (x) != BLKmode)
1475 if (GET_MODE (y) != BLKmode)
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
1480 size = protect_from_queue (size, 0);
1482 if (GET_CODE (x) != MEM)
1484 if (GET_CODE (y) != MEM)
1489 if (GET_CODE (size) == CONST_INT
1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1491 move_by_pieces (x, y, INTVAL (size), align);
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
1498 rtx opalign = GEN_INT (align);
1499 enum machine_mode mode;
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
1504 enum insn_code code = movstr_optab[(int) mode];
1506 if (code != CODE_FOR_nothing
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
1509 returned by the macro, it will definitely be less than the
1510 actual mode mask. */
1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1521 rtx last = get_last_insn ();
1524 op2 = convert_to_mode (mode, size, 1);
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1527 op2 = copy_to_mode_reg (mode, op2);
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1536 delete_insns_since (last);
1540 #ifdef TARGET_MEM_FUNCTIONS
1541 emit_library_call (memcpy_libfunc, 0,
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
1548 emit_library_call (bcopy_libfunc, 0,
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
1558 /* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1562 move_block_to_reg (regno, x, nregs, mode)
1566 enum machine_mode mode;
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1574 /* See if the machine can do this with a load multiple insn. */
1575 #ifdef HAVE_load_multiple
1576 if (HAVE_load_multiple)
1578 last = get_last_insn ();
1579 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1587 delete_insns_since (last);
1591 for (i = 0; i < nregs; i++)
1592 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1593 operand_subword_force (x, i, mode));
1596 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1597 The number of registers to be filled is NREGS. SIZE indicates the number
1598 of bytes in the object X. */
1602 move_block_from_reg (regno, x, nregs, size)
1611 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1612 to the left before storing to memory. */
1613 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1615 rtx tem = operand_subword (x, 0, 1, BLKmode);
1621 shift = expand_shift (LSHIFT_EXPR, word_mode,
1622 gen_rtx (REG, word_mode, regno),
1623 build_int_2 ((UNITS_PER_WORD - size)
1624 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1625 emit_move_insn (tem, shift);
1629 /* See if the machine can do this with a store multiple insn. */
1630 #ifdef HAVE_store_multiple
1631 if (HAVE_store_multiple)
1633 last = get_last_insn ();
1634 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1642 delete_insns_since (last);
1646 for (i = 0; i < nregs; i++)
1648 rtx tem = operand_subword (x, i, 1, BLKmode);
1653 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1657 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1660 use_regs (regno, nregs)
1666 for (i = 0; i < nregs; i++)
1667 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1670 /* Mark the instructions since PREV as a libcall block.
1671 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1680 /* Find the instructions to mark */
1682 insn_first = NEXT_INSN (prev);
1684 insn_first = get_insns ();
1686 insn_last = get_last_insn ();
1688 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1689 REG_NOTES (insn_last));
1691 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1692 REG_NOTES (insn_first));
1695 /* Write zeros through the storage of OBJECT.
1696 If OBJECT has BLKmode, SIZE is its length in bytes. */
1699 clear_storage (object, size)
1703 if (GET_MODE (object) == BLKmode)
1705 #ifdef TARGET_MEM_FUNCTIONS
1706 emit_library_call (memset_libfunc, 0,
1708 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1709 GEN_INT (size), Pmode);
1711 emit_library_call (bzero_libfunc, 0,
1713 XEXP (object, 0), Pmode,
1714 GEN_INT (size), Pmode);
1718 emit_move_insn (object, const0_rtx);
1721 /* Generate code to copy Y into X.
1722 Both Y and X must have the same mode, except that
1723 Y can be a constant with VOIDmode.
1724 This mode cannot be BLKmode; use emit_block_move for that.
1726 Return the last instruction emitted. */
1729 emit_move_insn (x, y)
1732 enum machine_mode mode = GET_MODE (x);
1733 enum machine_mode submode;
1734 enum mode_class class = GET_MODE_CLASS (mode);
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1740 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1743 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1744 y = force_const_mem (mode, y);
1746 /* If X or Y are memory references, verify that their addresses are valid
1748 if (GET_CODE (x) == MEM
1749 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1750 && ! push_operand (x, GET_MODE (x)))
1752 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1753 x = change_address (x, VOIDmode, XEXP (x, 0));
1755 if (GET_CODE (y) == MEM
1756 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1758 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1759 y = change_address (y, VOIDmode, XEXP (y, 0));
1761 if (mode == BLKmode)
1764 return emit_move_insn_1 (x, y);
1767 /* Low level part of emit_move_insn.
1768 Called just like emit_move_insn, but assumes X and Y
1769 are basically valid. */
1772 emit_move_insn_1 (x, y)
1775 enum machine_mode mode = GET_MODE (x);
1776 enum machine_mode submode;
1777 enum mode_class class = GET_MODE_CLASS (mode);
1780 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1781 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1782 (class == MODE_COMPLEX_INT
1783 ? MODE_INT : MODE_FLOAT),
1786 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1788 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1790 /* Expand complex moves by moving real part and imag part, if possible. */
1791 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1792 && submode != BLKmode
1793 && (mov_optab->handlers[(int) submode].insn_code
1794 != CODE_FOR_nothing))
1796 /* Don't split destination if it is a stack push. */
1797 int stack = push_operand (x, GET_MODE (x));
1798 rtx prev = get_last_insn ();
1800 /* Tell flow that the whole of the destination is being set. */
1801 if (GET_CODE (x) == REG)
1802 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1804 /* If this is a stack, push the highpart first, so it
1805 will be in the argument order.
1807 In that case, change_address is used only to convert
1808 the mode, not to change the address. */
1811 /* Note that the real part always precedes the imag part in memory
1812 regardless of machine's endianness. */
1813 #ifdef STACK_GROWS_DOWNWARD
1814 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1815 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1816 gen_imagpart (submode, y)));
1817 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1818 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1819 gen_realpart (submode, y)));
1821 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1822 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1823 gen_realpart (submode, y)));
1824 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1825 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1826 gen_imagpart (submode, y)));
1831 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1832 (gen_highpart (submode, x), gen_highpart (submode, y)));
1833 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1834 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1837 if (GET_CODE (x) != CONCAT)
1838 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1839 each with a separate pseudo as destination.
1840 It's not correct for flow to treat them as a unit. */
1843 return get_last_insn ();
1846 /* This will handle any multi-word mode that lacks a move_insn pattern.
1847 However, you will get better code if you define such patterns,
1848 even if they must turn into multiple assembler instructions. */
1849 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1852 rtx prev_insn = get_last_insn ();
1855 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1858 rtx xpart = operand_subword (x, i, 1, mode);
1859 rtx ypart = operand_subword (y, i, 1, mode);
1861 /* If we can't get a part of Y, put Y into memory if it is a
1862 constant. Otherwise, force it into a register. If we still
1863 can't get a part of Y, abort. */
1864 if (ypart == 0 && CONSTANT_P (y))
1866 y = force_const_mem (mode, y);
1867 ypart = operand_subword (y, i, 1, mode);
1869 else if (ypart == 0)
1870 ypart = operand_subword_force (y, i, mode);
1872 if (xpart == 0 || ypart == 0)
1875 last_insn = emit_move_insn (xpart, ypart);
1877 /* Mark these insns as a libcall block. */
1878 group_insns (prev_insn);
1886 /* Pushing data onto the stack. */
1888 /* Push a block of length SIZE (perhaps variable)
1889 and return an rtx to address the beginning of the block.
1890 Note that it is not possible for the value returned to be a QUEUED.
1891 The value may be virtual_outgoing_args_rtx.
1893 EXTRA is the number of bytes of padding to push in addition to SIZE.
1894 BELOW nonzero means this padding comes at low addresses;
1895 otherwise, the padding comes at high addresses. */
1898 push_block (size, extra, below)
1903 if (CONSTANT_P (size))
1904 anti_adjust_stack (plus_constant (size, extra));
1905 else if (GET_CODE (size) == REG && extra == 0)
1906 anti_adjust_stack (size);
1909 rtx temp = copy_to_mode_reg (Pmode, size);
1911 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1912 temp, 0, OPTAB_LIB_WIDEN);
1913 anti_adjust_stack (temp);
1916 #ifdef STACK_GROWS_DOWNWARD
1917 temp = virtual_outgoing_args_rtx;
1918 if (extra != 0 && below)
1919 temp = plus_constant (temp, extra);
1921 if (GET_CODE (size) == CONST_INT)
1922 temp = plus_constant (virtual_outgoing_args_rtx,
1923 - INTVAL (size) - (below ? 0 : extra));
1924 else if (extra != 0 && !below)
1925 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1926 negate_rtx (Pmode, plus_constant (size, extra)));
1928 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1929 negate_rtx (Pmode, size));
1932 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1938 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1941 /* Generate code to push X onto the stack, assuming it has mode MODE and
1943 MODE is redundant except when X is a CONST_INT (since they don't
1945 SIZE is an rtx for the size of data to be copied (in bytes),
1946 needed only if X is BLKmode.
1948 ALIGN (in bytes) is maximum alignment we can assume.
1950 If PARTIAL and REG are both nonzero, then copy that many of the first
1951 words of X into registers starting with REG, and push the rest of X.
1952 The amount of space pushed is decreased by PARTIAL words,
1953 rounded *down* to a multiple of PARM_BOUNDARY.
1954 REG must be a hard register in this case.
1955 If REG is zero but PARTIAL is not, take any all others actions for an
1956 argument partially in registers, but do not actually load any
1959 EXTRA is the amount in bytes of extra space to leave next to this arg.
1960 This is ignored if an argument block has already been allocated.
1962 On a machine that lacks real push insns, ARGS_ADDR is the address of
1963 the bottom of the argument block for this call. We use indexing off there
1964 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1965 argument block has not been preallocated.
1967 ARGS_SO_FAR is the size of args previously pushed for this call. */
1970 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1971 args_addr, args_so_far)
1973 enum machine_mode mode;
1984 enum direction stack_direction
1985 #ifdef STACK_GROWS_DOWNWARD
1991 /* Decide where to pad the argument: `downward' for below,
1992 `upward' for above, or `none' for don't pad it.
1993 Default is below for small data on big-endian machines; else above. */
1994 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1996 /* Invert direction if stack is post-update. */
1997 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1998 if (where_pad != none)
1999 where_pad = (where_pad == downward ? upward : downward);
2001 xinner = x = protect_from_queue (x, 0);
2003 if (mode == BLKmode)
2005 /* Copy a block into the stack, entirely or partially. */
2008 int used = partial * UNITS_PER_WORD;
2009 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2017 /* USED is now the # of bytes we need not copy to the stack
2018 because registers will take care of them. */
2021 xinner = change_address (xinner, BLKmode,
2022 plus_constant (XEXP (xinner, 0), used));
2024 /* If the partial register-part of the arg counts in its stack size,
2025 skip the part of stack space corresponding to the registers.
2026 Otherwise, start copying to the beginning of the stack space,
2027 by setting SKIP to 0. */
2028 #ifndef REG_PARM_STACK_SPACE
2034 #ifdef PUSH_ROUNDING
2035 /* Do it with several push insns if that doesn't take lots of insns
2036 and if there is no difficulty with push insns that skip bytes
2037 on the stack for alignment purposes. */
2039 && GET_CODE (size) == CONST_INT
2041 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2043 /* Here we avoid the case of a structure whose weak alignment
2044 forces many pushes of a small amount of data,
2045 and such small pushes do rounding that causes trouble. */
2046 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2047 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2048 || PUSH_ROUNDING (align) == align)
2049 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2051 /* Push padding now if padding above and stack grows down,
2052 or if padding below and stack grows up.
2053 But if space already allocated, this has already been done. */
2054 if (extra && args_addr == 0
2055 && where_pad != none && where_pad != stack_direction)
2056 anti_adjust_stack (GEN_INT (extra));
2058 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2059 INTVAL (size) - used, align);
2062 #endif /* PUSH_ROUNDING */
2064 /* Otherwise make space on the stack and copy the data
2065 to the address of that space. */
2067 /* Deduct words put into registers from the size we must copy. */
2070 if (GET_CODE (size) == CONST_INT)
2071 size = GEN_INT (INTVAL (size) - used);
2073 size = expand_binop (GET_MODE (size), sub_optab, size,
2074 GEN_INT (used), NULL_RTX, 0,
2078 /* Get the address of the stack space.
2079 In this case, we do not deal with EXTRA separately.
2080 A single stack adjust will do. */
2083 temp = push_block (size, extra, where_pad == downward);
2086 else if (GET_CODE (args_so_far) == CONST_INT)
2087 temp = memory_address (BLKmode,
2088 plus_constant (args_addr,
2089 skip + INTVAL (args_so_far)));
2091 temp = memory_address (BLKmode,
2092 plus_constant (gen_rtx (PLUS, Pmode,
2093 args_addr, args_so_far),
2096 /* TEMP is the address of the block. Copy the data there. */
2097 if (GET_CODE (size) == CONST_INT
2098 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2101 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2102 INTVAL (size), align);
2105 /* Try the most limited insn first, because there's no point
2106 including more than one in the machine description unless
2107 the more limited one has some advantage. */
2108 #ifdef HAVE_movstrqi
2110 && GET_CODE (size) == CONST_INT
2111 && ((unsigned) INTVAL (size)
2112 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2114 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2115 xinner, size, GEN_INT (align));
2123 #ifdef HAVE_movstrhi
2125 && GET_CODE (size) == CONST_INT
2126 && ((unsigned) INTVAL (size)
2127 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2129 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2130 xinner, size, GEN_INT (align));
2138 #ifdef HAVE_movstrsi
2141 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2142 xinner, size, GEN_INT (align));
2150 #ifdef HAVE_movstrdi
2153 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2154 xinner, size, GEN_INT (align));
2163 #ifndef ACCUMULATE_OUTGOING_ARGS
2164 /* If the source is referenced relative to the stack pointer,
2165 copy it to another register to stabilize it. We do not need
2166 to do this if we know that we won't be changing sp. */
2168 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2169 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2170 temp = copy_to_reg (temp);
2173 /* Make inhibit_defer_pop nonzero around the library call
2174 to force it to pop the bcopy-arguments right away. */
2176 #ifdef TARGET_MEM_FUNCTIONS
2177 emit_library_call (memcpy_libfunc, 0,
2178 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
2181 TYPE_MODE (sizetype));
2183 emit_library_call (bcopy_libfunc, 0,
2184 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2185 convert_to_mode (TYPE_MODE (sizetype),
2186 size, TREE_UNSIGNED (sizetype)),
2187 TYPE_MODE (sizetype));
2192 else if (partial > 0)
2194 /* Scalar partly in registers. */
2196 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2199 /* # words of start of argument
2200 that we must make space for but need not store. */
2201 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2202 int args_offset = INTVAL (args_so_far);
2205 /* Push padding now if padding above and stack grows down,
2206 or if padding below and stack grows up.
2207 But if space already allocated, this has already been done. */
2208 if (extra && args_addr == 0
2209 && where_pad != none && where_pad != stack_direction)
2210 anti_adjust_stack (GEN_INT (extra));
2212 /* If we make space by pushing it, we might as well push
2213 the real data. Otherwise, we can leave OFFSET nonzero
2214 and leave the space uninitialized. */
2218 /* Now NOT_STACK gets the number of words that we don't need to
2219 allocate on the stack. */
2220 not_stack = partial - offset;
2222 /* If the partial register-part of the arg counts in its stack size,
2223 skip the part of stack space corresponding to the registers.
2224 Otherwise, start copying to the beginning of the stack space,
2225 by setting SKIP to 0. */
2226 #ifndef REG_PARM_STACK_SPACE
2232 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2233 x = validize_mem (force_const_mem (mode, x));
2235 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2236 SUBREGs of such registers are not allowed. */
2237 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2238 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2239 x = copy_to_reg (x);
2241 /* Loop over all the words allocated on the stack for this arg. */
2242 /* We can do it by words, because any scalar bigger than a word
2243 has a size a multiple of a word. */
2244 #ifndef PUSH_ARGS_REVERSED
2245 for (i = not_stack; i < size; i++)
2247 for (i = size - 1; i >= not_stack; i--)
2249 if (i >= not_stack + offset)
2250 emit_push_insn (operand_subword_force (x, i, mode),
2251 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2253 GEN_INT (args_offset + ((i - not_stack + skip)
2254 * UNITS_PER_WORD)));
2260 /* Push padding now if padding above and stack grows down,
2261 or if padding below and stack grows up.
2262 But if space already allocated, this has already been done. */
2263 if (extra && args_addr == 0
2264 && where_pad != none && where_pad != stack_direction)
2265 anti_adjust_stack (GEN_INT (extra));
2267 #ifdef PUSH_ROUNDING
2269 addr = gen_push_operand ();
2272 if (GET_CODE (args_so_far) == CONST_INT)
2274 = memory_address (mode,
2275 plus_constant (args_addr, INTVAL (args_so_far)));
2277 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2280 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2284 /* If part should go in registers, copy that part
2285 into the appropriate registers. Do this now, at the end,
2286 since mem-to-mem copies above may do function calls. */
2287 if (partial > 0 && reg != 0)
2288 move_block_to_reg (REGNO (reg), x, partial, mode);
2290 if (extra && args_addr == 0 && where_pad == stack_direction)
2291 anti_adjust_stack (GEN_INT (extra));
2294 /* Expand an assignment that stores the value of FROM into TO.
2295 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2296 (This may contain a QUEUED rtx;
2297 if the value is constant, this rtx is a constant.)
2298 Otherwise, the returned value is NULL_RTX.
2300 SUGGEST_REG is no longer actually used.
2301 It used to mean, copy the value through a register
2302 and return that register, if that is possible.
2303 We now use WANT_VALUE to decide whether to do this. */
2306 expand_assignment (to, from, want_value, suggest_reg)
2311 register rtx to_rtx = 0;
2314 /* Don't crash if the lhs of the assignment was erroneous. */
2316 if (TREE_CODE (to) == ERROR_MARK)
2318 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2319 return want_value ? result : NULL_RTX;
2322 if (output_bytecode)
2324 tree dest_innermost;
2326 bc_expand_expr (from);
2327 bc_emit_instruction (duplicate);
2329 dest_innermost = bc_expand_address (to);
2331 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2332 take care of it here. */
2334 bc_store_memory (TREE_TYPE (to), dest_innermost);
2338 /* Assignment of a structure component needs special treatment
2339 if the structure component's rtx is not simply a MEM.
2340 Assignment of an array element at a constant index
2341 has the same problem. */
2343 if (TREE_CODE (to) == COMPONENT_REF
2344 || TREE_CODE (to) == BIT_FIELD_REF
2345 || (TREE_CODE (to) == ARRAY_REF
2346 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2347 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2349 enum machine_mode mode1;
2359 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2360 &mode1, &unsignedp, &volatilep);
2362 /* If we are going to use store_bit_field and extract_bit_field,
2363 make sure to_rtx will be safe for multiple use. */
2365 if (mode1 == VOIDmode && want_value)
2366 tem = stabilize_reference (tem);
2368 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2369 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2372 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2374 if (GET_CODE (to_rtx) != MEM)
2376 to_rtx = change_address (to_rtx, VOIDmode,
2377 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2378 force_reg (Pmode, offset_rtx)));
2379 /* If we have a variable offset, the known alignment
2380 is only that of the innermost structure containing the field.
2381 (Actually, we could sometimes do better by using the
2382 align of an element of the innermost array, but no need.) */
2383 if (TREE_CODE (to) == COMPONENT_REF
2384 || TREE_CODE (to) == BIT_FIELD_REF)
2386 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2390 if (GET_CODE (to_rtx) == MEM)
2391 MEM_VOLATILE_P (to_rtx) = 1;
2392 #if 0 /* This was turned off because, when a field is volatile
2393 in an object which is not volatile, the object may be in a register,
2394 and then we would abort over here. */
2400 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2402 /* Spurious cast makes HPUX compiler happy. */
2403 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2406 /* Required alignment of containing datum. */
2408 int_size_in_bytes (TREE_TYPE (tem)));
2409 preserve_temp_slots (result);
2413 /* If the value is meaningful, convert RESULT to the proper mode.
2414 Otherwise, return nothing. */
2415 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2416 TYPE_MODE (TREE_TYPE (from)),
2418 TREE_UNSIGNED (TREE_TYPE (to)))
2422 /* If the rhs is a function call and its value is not an aggregate,
2423 call the function before we start to compute the lhs.
2424 This is needed for correct code for cases such as
2425 val = setjmp (buf) on machines where reference to val
2426 requires loading up part of an address in a separate insn.
2428 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2429 a promoted variable where the zero- or sign- extension needs to be done.
2430 Handling this in the normal way is safe because no computation is done
2432 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2433 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2438 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2440 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2441 emit_move_insn (to_rtx, value);
2442 preserve_temp_slots (to_rtx);
2445 return want_value ? to_rtx : NULL_RTX;
2448 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2449 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2452 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2454 /* Don't move directly into a return register. */
2455 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2460 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2461 emit_move_insn (to_rtx, temp);
2462 preserve_temp_slots (to_rtx);
2465 return want_value ? to_rtx : NULL_RTX;
2468 /* In case we are returning the contents of an object which overlaps
2469 the place the value is being stored, use a safe function when copying
2470 a value through a pointer into a structure value return block. */
2471 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2472 && current_function_returns_struct
2473 && !current_function_returns_pcc_struct)
2478 size = expr_size (from);
2479 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2481 #ifdef TARGET_MEM_FUNCTIONS
2482 emit_library_call (memcpy_libfunc, 0,
2483 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2484 XEXP (from_rtx, 0), Pmode,
2485 convert_to_mode (TYPE_MODE (sizetype),
2486 size, TREE_UNSIGNED (sizetype)),
2487 TYPE_MODE (sizetype));
2489 emit_library_call (bcopy_libfunc, 0,
2490 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2491 XEXP (to_rtx, 0), Pmode,
2492 convert_to_mode (TYPE_MODE (sizetype),
2493 size, TREE_UNSIGNED (sizetype)),
2494 TYPE_MODE (sizetype));
2497 preserve_temp_slots (to_rtx);
2500 return want_value ? to_rtx : NULL_RTX;
2503 /* Compute FROM and store the value in the rtx we got. */
2506 result = store_expr (from, to_rtx, want_value);
2507 preserve_temp_slots (result);
2510 return want_value ? result : NULL_RTX;
2513 /* Generate code for computing expression EXP,
2514 and storing the value into TARGET.
2515 TARGET may contain a QUEUED rtx.
2517 If WANT_VALUE is nonzero, return a copy of the value
2518 not in TARGET, so that we can be sure to use the proper
2519 value in a containing expression even if TARGET has something
2520 else stored in it. If possible, we copy the value through a pseudo
2521 and return that pseudo. Or, if the value is constant, we try to
2522 return the constant. In some cases, we return a pseudo
2523 copied *from* TARGET.
2525 If the mode is BLKmode then we may return TARGET itself.
2526 It turns out that in BLKmode it doesn't cause a problem.
2527 because C has no operators that could combine two different
2528 assignments into the same BLKmode object with different values
2529 with no sequence point. Will other languages need this to
2532 If WANT_VALUE is 0, we return NULL, to make sure
2533 to catch quickly any cases where the caller uses the value
2534 and fails to set WANT_VALUE. */
2537 store_expr (exp, target, want_value)
2539 register rtx target;
2543 int dont_return_target = 0;
2545 if (TREE_CODE (exp) == COMPOUND_EXPR)
2547 /* Perform first part of compound expression, then assign from second
2549 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2551 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2553 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2555 /* For conditional expression, get safe form of the target. Then
2556 test the condition, doing the appropriate assignment on either
2557 side. This avoids the creation of unnecessary temporaries.
2558 For non-BLKmode, it is more efficient not to do this. */
2560 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2563 target = protect_from_queue (target, 1);
2566 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2567 store_expr (TREE_OPERAND (exp, 1), target, 0);
2569 emit_jump_insn (gen_jump (lab2));
2572 store_expr (TREE_OPERAND (exp, 2), target, 0);
2576 return want_value ? target : NULL_RTX;
2578 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2579 && GET_MODE (target) != BLKmode)
2580 /* If target is in memory and caller wants value in a register instead,
2581 arrange that. Pass TARGET as target for expand_expr so that,
2582 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2583 We know expand_expr will not use the target in that case.
2584 Don't do this if TARGET is volatile because we are supposed
2585 to write it and then read it. */
2587 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2588 GET_MODE (target), 0);
2589 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2590 temp = copy_to_reg (temp);
2591 dont_return_target = 1;
2593 else if (queued_subexp_p (target))
2594 /* If target contains a postincrement, let's not risk
2595 using it as the place to generate the rhs. */
2597 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2599 /* Expand EXP into a new pseudo. */
2600 temp = gen_reg_rtx (GET_MODE (target));
2601 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2604 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2606 /* If target is volatile, ANSI requires accessing the value
2607 *from* the target, if it is accessed. So make that happen.
2608 In no case return the target itself. */
2609 if (! MEM_VOLATILE_P (target) && want_value)
2610 dont_return_target = 1;
2612 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2613 /* If this is an scalar in a register that is stored in a wider mode
2614 than the declared mode, compute the result into its declared mode
2615 and then convert to the wider mode. Our value is the computed
2618 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2620 /* If TEMP is a VOIDmode constant, use convert_modes to make
2621 sure that we properly convert it. */
2622 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2623 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2624 TYPE_MODE (TREE_TYPE (exp)), temp,
2625 SUBREG_PROMOTED_UNSIGNED_P (target));
2627 convert_move (SUBREG_REG (target), temp,
2628 SUBREG_PROMOTED_UNSIGNED_P (target));
2629 return want_value ? temp : NULL_RTX;
2633 temp = expand_expr (exp, target, GET_MODE (target), 0);
2634 /* DO return TARGET if it's a specified hardware register.
2635 expand_return relies on this.
2636 If TARGET is a volatile mem ref, either return TARGET
2637 or return a reg copied *from* TARGET; ANSI requires this.
2639 Otherwise, if TEMP is not TARGET, return TEMP
2640 if it is constant (for efficiency),
2641 or if we really want the correct value. */
2642 if (!(target && GET_CODE (target) == REG
2643 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2644 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2646 && (CONSTANT_P (temp) || want_value))
2647 dont_return_target = 1;
2650 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2651 the same as that of TARGET, adjust the constant. This is needed, for
2652 example, in case it is a CONST_DOUBLE and we want only a word-sized
2654 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2655 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2656 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2657 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2659 /* If value was not generated in the target, store it there.
2660 Convert the value to TARGET's type first if nec. */
2662 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2664 target = protect_from_queue (target, 1);
2665 if (GET_MODE (temp) != GET_MODE (target)
2666 && GET_MODE (temp) != VOIDmode)
2668 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2669 if (dont_return_target)
2671 /* In this case, we will return TEMP,
2672 so make sure it has the proper mode.
2673 But don't forget to store the value into TARGET. */
2674 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2675 emit_move_insn (target, temp);
2678 convert_move (target, temp, unsignedp);
2681 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2683 /* Handle copying a string constant into an array.
2684 The string constant may be shorter than the array.
2685 So copy just the string's actual length, and clear the rest. */
2688 /* Get the size of the data type of the string,
2689 which is actually the size of the target. */
2690 size = expr_size (exp);
2691 if (GET_CODE (size) == CONST_INT
2692 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2693 emit_block_move (target, temp, size,
2694 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2697 /* Compute the size of the data to copy from the string. */
2699 = size_binop (MIN_EXPR,
2700 make_tree (sizetype, size),
2702 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2703 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2707 /* Copy that much. */
2708 emit_block_move (target, temp, copy_size_rtx,
2709 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2711 /* Figure out how much is left in TARGET
2712 that we have to clear. */
2713 if (GET_CODE (copy_size_rtx) == CONST_INT)
2715 temp = plus_constant (XEXP (target, 0),
2716 TREE_STRING_LENGTH (exp));
2717 size = plus_constant (size,
2718 - TREE_STRING_LENGTH (exp));
2722 enum machine_mode size_mode = Pmode;
2724 temp = force_reg (Pmode, XEXP (target, 0));
2725 temp = expand_binop (size_mode, add_optab, temp,
2726 copy_size_rtx, NULL_RTX, 0,
2729 size = expand_binop (size_mode, sub_optab, size,
2730 copy_size_rtx, NULL_RTX, 0,
2733 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2734 GET_MODE (size), 0, 0);
2735 label = gen_label_rtx ();
2736 emit_jump_insn (gen_blt (label));
2739 if (size != const0_rtx)
2741 #ifdef TARGET_MEM_FUNCTIONS
2742 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2743 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2745 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2746 temp, Pmode, size, Pmode);
2753 else if (GET_MODE (temp) == BLKmode)
2754 emit_block_move (target, temp, expr_size (exp),
2755 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2757 emit_move_insn (target, temp);
2760 if (dont_return_target && GET_CODE (temp) != MEM)
2762 if (want_value && GET_MODE (target) != BLKmode)
2763 return copy_to_reg (target);
2769 /* Store the value of constructor EXP into the rtx TARGET.
2770 TARGET is either a REG or a MEM. */
2773 store_constructor (exp, target)
2777 tree type = TREE_TYPE (exp);
2779 /* We know our target cannot conflict, since safe_from_p has been called. */
2781 /* Don't try copying piece by piece into a hard register
2782 since that is vulnerable to being clobbered by EXP.
2783 Instead, construct in a pseudo register and then copy it all. */
2784 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2786 rtx temp = gen_reg_rtx (GET_MODE (target));
2787 store_constructor (exp, temp);
2788 emit_move_insn (target, temp);
2793 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2794 || TREE_CODE (type) == QUAL_UNION_TYPE)
2798 /* Inform later passes that the whole union value is dead. */
2799 if (TREE_CODE (type) == UNION_TYPE
2800 || TREE_CODE (type) == QUAL_UNION_TYPE)
2801 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2803 /* If we are building a static constructor into a register,
2804 set the initial value as zero so we can fold the value into
2806 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2807 emit_move_insn (target, const0_rtx);
2809 /* If the constructor has fewer fields than the structure,
2810 clear the whole structure first. */
2811 else if (list_length (CONSTRUCTOR_ELTS (exp))
2812 != list_length (TYPE_FIELDS (type)))
2813 clear_storage (target, int_size_in_bytes (type));
2815 /* Inform later passes that the old value is dead. */
2816 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2818 /* Store each element of the constructor into
2819 the corresponding field of TARGET. */
2821 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2823 register tree field = TREE_PURPOSE (elt);
2824 register enum machine_mode mode;
2828 tree pos, constant = 0, offset = 0;
2829 rtx to_rtx = target;
2831 /* Just ignore missing fields.
2832 We cleared the whole structure, above,
2833 if any fields are missing. */
2837 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2838 unsignedp = TREE_UNSIGNED (field);
2839 mode = DECL_MODE (field);
2840 if (DECL_BIT_FIELD (field))
2843 pos = DECL_FIELD_BITPOS (field);
2844 if (TREE_CODE (pos) == INTEGER_CST)
2846 else if (TREE_CODE (pos) == PLUS_EXPR
2847 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2848 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2853 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2859 if (contains_placeholder_p (offset))
2860 offset = build (WITH_RECORD_EXPR, sizetype,
2863 offset = size_binop (FLOOR_DIV_EXPR, offset,
2864 size_int (BITS_PER_UNIT));
2866 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2867 if (GET_CODE (to_rtx) != MEM)
2871 = change_address (to_rtx, VOIDmode,
2872 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2873 force_reg (Pmode, offset_rtx)));
2876 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2877 /* The alignment of TARGET is
2878 at least what its type requires. */
2880 TYPE_ALIGN (type) / BITS_PER_UNIT,
2881 int_size_in_bytes (type));
2884 else if (TREE_CODE (type) == ARRAY_TYPE)
2888 tree domain = TYPE_DOMAIN (type);
2889 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2890 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2891 tree elttype = TREE_TYPE (type);
2893 /* If the constructor has fewer fields than the structure,
2894 clear the whole structure first. Similarly if this this is
2895 static constructor of a non-BLKmode object. */
2897 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2898 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2899 clear_storage (target, int_size_in_bytes (type));
2901 /* Inform later passes that the old value is dead. */
2902 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2904 /* Store each element of the constructor into
2905 the corresponding element of TARGET, determined
2906 by counting the elements. */
2907 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2909 elt = TREE_CHAIN (elt), i++)
2911 register enum machine_mode mode;
2915 tree index = TREE_PURPOSE (elt);
2916 rtx xtarget = target;
2918 mode = TYPE_MODE (elttype);
2919 bitsize = GET_MODE_BITSIZE (mode);
2920 unsignedp = TREE_UNSIGNED (elttype);
2922 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2924 /* We don't currently allow variable indices in a
2925 C initializer, but let's try here to support them. */
2926 rtx pos_rtx, addr, xtarget;
2929 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2930 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2931 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2932 xtarget = change_address (target, mode, addr);
2933 store_expr (TREE_VALUE (elt), xtarget, 0);
2938 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2939 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2941 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2943 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2944 /* The alignment of TARGET is
2945 at least what its type requires. */
2947 TYPE_ALIGN (type) / BITS_PER_UNIT,
2948 int_size_in_bytes (type));
2957 /* Store the value of EXP (an expression tree)
2958 into a subfield of TARGET which has mode MODE and occupies
2959 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2960 If MODE is VOIDmode, it means that we are storing into a bit-field.
2962 If VALUE_MODE is VOIDmode, return nothing in particular.
2963 UNSIGNEDP is not used in this case.
2965 Otherwise, return an rtx for the value stored. This rtx
2966 has mode VALUE_MODE if that is convenient to do.
2967 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2969 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2970 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2973 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2974 unsignedp, align, total_size)
2976 int bitsize, bitpos;
2977 enum machine_mode mode;
2979 enum machine_mode value_mode;
2984 HOST_WIDE_INT width_mask = 0;
2986 if (bitsize < HOST_BITS_PER_WIDE_INT)
2987 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2989 /* If we are storing into an unaligned field of an aligned union that is
2990 in a register, we may have the mode of TARGET being an integer mode but
2991 MODE == BLKmode. In that case, get an aligned object whose size and
2992 alignment are the same as TARGET and store TARGET into it (we can avoid
2993 the store if the field being stored is the entire width of TARGET). Then
2994 call ourselves recursively to store the field into a BLKmode version of
2995 that object. Finally, load from the object into TARGET. This is not
2996 very efficient in general, but should only be slightly more expensive
2997 than the otherwise-required unaligned accesses. Perhaps this can be
2998 cleaned up later. */
3001 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3003 rtx object = assign_stack_temp (GET_MODE (target),
3004 GET_MODE_SIZE (GET_MODE (target)), 0);
3005 rtx blk_object = copy_rtx (object);
3007 PUT_MODE (blk_object, BLKmode);
3009 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3010 emit_move_insn (object, target);
3012 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3015 /* Even though we aren't returning target, we need to
3016 give it the updated value. */
3017 emit_move_insn (target, object);
3022 /* If the structure is in a register or if the component
3023 is a bit field, we cannot use addressing to access it.
3024 Use bit-field techniques or SUBREG to store in it. */
3026 if (mode == VOIDmode
3027 || (mode != BLKmode && ! direct_store[(int) mode])
3028 || GET_CODE (target) == REG
3029 || GET_CODE (target) == SUBREG
3030 /* If the field isn't aligned enough to store as an ordinary memref,
3031 store it as a bit field. */
3032 || (STRICT_ALIGNMENT
3033 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3034 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3036 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3038 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3040 if (mode != VOIDmode && mode != BLKmode
3041 && mode != TYPE_MODE (TREE_TYPE (exp)))
3042 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3044 /* Store the value in the bitfield. */
3045 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3046 if (value_mode != VOIDmode)
3048 /* The caller wants an rtx for the value. */
3049 /* If possible, avoid refetching from the bitfield itself. */
3051 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3054 enum machine_mode tmode;
3057 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3058 tmode = GET_MODE (temp);
3059 if (tmode == VOIDmode)
3061 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3062 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3063 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3065 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3066 NULL_RTX, value_mode, 0, align,
3073 rtx addr = XEXP (target, 0);
3076 /* If a value is wanted, it must be the lhs;
3077 so make the address stable for multiple use. */
3079 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3080 && ! CONSTANT_ADDRESS_P (addr)
3081 /* A frame-pointer reference is already stable. */
3082 && ! (GET_CODE (addr) == PLUS
3083 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3084 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3085 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3086 addr = copy_to_reg (addr);
3088 /* Now build a reference to just the desired component. */
3090 to_rtx = change_address (target, mode,
3091 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3092 MEM_IN_STRUCT_P (to_rtx) = 1;
3094 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3098 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3099 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3100 ARRAY_REFs and find the ultimate containing object, which we return.
3102 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3103 bit position, and *PUNSIGNEDP to the signedness of the field.
3104 If the position of the field is variable, we store a tree
3105 giving the variable offset (in units) in *POFFSET.
3106 This offset is in addition to the bit position.
3107 If the position is not variable, we store 0 in *POFFSET.
3109 If any of the extraction expressions is volatile,
3110 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3112 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3113 is a mode that can be used to access the field. In that case, *PBITSIZE
3116 If the field describes a variable-sized object, *PMODE is set to
3117 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3118 this case, but the address of the object can be found. */
3121 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3122 punsignedp, pvolatilep)
3127 enum machine_mode *pmode;
3131 tree orig_exp = exp;
3133 enum machine_mode mode = VOIDmode;
3134 tree offset = integer_zero_node;
3136 if (TREE_CODE (exp) == COMPONENT_REF)
3138 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3139 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3140 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3141 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3143 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3145 size_tree = TREE_OPERAND (exp, 1);
3146 *punsignedp = TREE_UNSIGNED (exp);
3150 mode = TYPE_MODE (TREE_TYPE (exp));
3151 *pbitsize = GET_MODE_BITSIZE (mode);
3152 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3157 if (TREE_CODE (size_tree) != INTEGER_CST)
3158 mode = BLKmode, *pbitsize = -1;
3160 *pbitsize = TREE_INT_CST_LOW (size_tree);
3163 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3164 and find the ultimate containing object. */
3170 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3172 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3173 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3174 : TREE_OPERAND (exp, 2));
3176 /* If this field hasn't been filled in yet, don't go
3177 past it. This should only happen when folding expressions
3178 made during type construction. */
3182 if (TREE_CODE (pos) == PLUS_EXPR)
3185 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3187 constant = TREE_OPERAND (pos, 0);
3188 var = TREE_OPERAND (pos, 1);
3190 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3192 constant = TREE_OPERAND (pos, 1);
3193 var = TREE_OPERAND (pos, 0);
3198 *pbitpos += TREE_INT_CST_LOW (constant);
3199 offset = size_binop (PLUS_EXPR, offset,
3200 size_binop (FLOOR_DIV_EXPR, var,
3201 size_int (BITS_PER_UNIT)));
3203 else if (TREE_CODE (pos) == INTEGER_CST)
3204 *pbitpos += TREE_INT_CST_LOW (pos);
3207 /* Assume here that the offset is a multiple of a unit.
3208 If not, there should be an explicitly added constant. */
3209 offset = size_binop (PLUS_EXPR, offset,
3210 size_binop (FLOOR_DIV_EXPR, pos,
3211 size_int (BITS_PER_UNIT)));
3215 else if (TREE_CODE (exp) == ARRAY_REF)
3217 /* This code is based on the code in case ARRAY_REF in expand_expr
3218 below. We assume here that the size of an array element is
3219 always an integral multiple of BITS_PER_UNIT. */
3221 tree index = TREE_OPERAND (exp, 1);
3222 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3224 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3225 tree index_type = TREE_TYPE (index);
3227 if (! integer_zerop (low_bound))
3228 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3230 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3232 index = convert (type_for_size (POINTER_SIZE, 0), index);
3233 index_type = TREE_TYPE (index);
3236 index = fold (build (MULT_EXPR, index_type, index,
3237 TYPE_SIZE (TREE_TYPE (exp))));
3239 if (TREE_CODE (index) == INTEGER_CST
3240 && TREE_INT_CST_HIGH (index) == 0)
3241 *pbitpos += TREE_INT_CST_LOW (index);
3243 offset = size_binop (PLUS_EXPR, offset,
3244 size_binop (FLOOR_DIV_EXPR, index,
3245 size_int (BITS_PER_UNIT)));
3247 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3248 && ! ((TREE_CODE (exp) == NOP_EXPR
3249 || TREE_CODE (exp) == CONVERT_EXPR)
3250 && (TYPE_MODE (TREE_TYPE (exp))
3251 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3254 /* If any reference in the chain is volatile, the effect is volatile. */
3255 if (TREE_THIS_VOLATILE (exp))
3257 exp = TREE_OPERAND (exp, 0);
3260 /* If this was a bit-field, see if there is a mode that allows direct
3261 access in case EXP is in memory. */
3262 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3264 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3265 if (mode == BLKmode)
3269 if (integer_zerop (offset))
3272 if (offset != 0 && contains_placeholder_p (offset))
3273 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3280 /* Given an rtx VALUE that may contain additions and multiplications,
3281 return an equivalent value that just refers to a register or memory.
3282 This is done by generating instructions to perform the arithmetic
3283 and returning a pseudo-register containing the value.
3285 The returned value may be a REG, SUBREG, MEM or constant. */
3288 force_operand (value, target)
3291 register optab binoptab = 0;
3292 /* Use a temporary to force order of execution of calls to
3296 /* Use subtarget as the target for operand 0 of a binary operation. */
3297 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3299 if (GET_CODE (value) == PLUS)
3300 binoptab = add_optab;
3301 else if (GET_CODE (value) == MINUS)
3302 binoptab = sub_optab;
3303 else if (GET_CODE (value) == MULT)
3305 op2 = XEXP (value, 1);
3306 if (!CONSTANT_P (op2)
3307 && !(GET_CODE (op2) == REG && op2 != subtarget))
3309 tmp = force_operand (XEXP (value, 0), subtarget);
3310 return expand_mult (GET_MODE (value), tmp,
3311 force_operand (op2, NULL_RTX),
3317 op2 = XEXP (value, 1);
3318 if (!CONSTANT_P (op2)
3319 && !(GET_CODE (op2) == REG && op2 != subtarget))
3321 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3323 binoptab = add_optab;
3324 op2 = negate_rtx (GET_MODE (value), op2);
3327 /* Check for an addition with OP2 a constant integer and our first
3328 operand a PLUS of a virtual register and something else. In that
3329 case, we want to emit the sum of the virtual register and the
3330 constant first and then add the other value. This allows virtual
3331 register instantiation to simply modify the constant rather than
3332 creating another one around this addition. */
3333 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3334 && GET_CODE (XEXP (value, 0)) == PLUS
3335 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3336 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3337 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3339 rtx temp = expand_binop (GET_MODE (value), binoptab,
3340 XEXP (XEXP (value, 0), 0), op2,
3341 subtarget, 0, OPTAB_LIB_WIDEN);
3342 return expand_binop (GET_MODE (value), binoptab, temp,
3343 force_operand (XEXP (XEXP (value, 0), 1), 0),
3344 target, 0, OPTAB_LIB_WIDEN);
3347 tmp = force_operand (XEXP (value, 0), subtarget);
3348 return expand_binop (GET_MODE (value), binoptab, tmp,
3349 force_operand (op2, NULL_RTX),
3350 target, 0, OPTAB_LIB_WIDEN);
3351 /* We give UNSIGNEDP = 0 to expand_binop
3352 because the only operations we are expanding here are signed ones. */
3357 /* Subroutine of expand_expr:
3358 save the non-copied parts (LIST) of an expr (LHS), and return a list
3359 which can restore these values to their previous values,
3360 should something modify their storage. */
3363 save_noncopied_parts (lhs, list)
3370 for (tail = list; tail; tail = TREE_CHAIN (tail))
3371 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3372 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3375 tree part = TREE_VALUE (tail);
3376 tree part_type = TREE_TYPE (part);
3377 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3378 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3379 int_size_in_bytes (part_type), 0);
3380 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3381 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3382 parts = tree_cons (to_be_saved,
3383 build (RTL_EXPR, part_type, NULL_TREE,
3386 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3391 /* Subroutine of expand_expr:
3392 record the non-copied parts (LIST) of an expr (LHS), and return a list
3393 which specifies the initial values of these parts. */
3396 init_noncopied_parts (lhs, list)
3403 for (tail = list; tail; tail = TREE_CHAIN (tail))
3404 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3405 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3408 tree part = TREE_VALUE (tail);
3409 tree part_type = TREE_TYPE (part);
3410 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3411 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3416 /* Subroutine of expand_expr: return nonzero iff there is no way that
3417 EXP can reference X, which is being modified. */
3420 safe_from_p (x, exp)
3430 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3431 find the underlying pseudo. */
3432 if (GET_CODE (x) == SUBREG)
3435 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3439 /* If X is a location in the outgoing argument area, it is always safe. */
3440 if (GET_CODE (x) == MEM
3441 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3442 || (GET_CODE (XEXP (x, 0)) == PLUS
3443 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3446 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3449 exp_rtl = DECL_RTL (exp);
3456 if (TREE_CODE (exp) == TREE_LIST)
3457 return ((TREE_VALUE (exp) == 0
3458 || safe_from_p (x, TREE_VALUE (exp)))
3459 && (TREE_CHAIN (exp) == 0
3460 || safe_from_p (x, TREE_CHAIN (exp))));
3465 return safe_from_p (x, TREE_OPERAND (exp, 0));
3469 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3470 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3474 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3475 the expression. If it is set, we conflict iff we are that rtx or
3476 both are in memory. Otherwise, we check all operands of the
3477 expression recursively. */
3479 switch (TREE_CODE (exp))
3482 return (staticp (TREE_OPERAND (exp, 0))
3483 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3486 if (GET_CODE (x) == MEM)
3491 exp_rtl = CALL_EXPR_RTL (exp);
3494 /* Assume that the call will clobber all hard registers and
3496 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3497 || GET_CODE (x) == MEM)
3504 exp_rtl = RTL_EXPR_RTL (exp);
3506 /* We don't know what this can modify. */
3511 case WITH_CLEANUP_EXPR:
3512 exp_rtl = RTL_EXPR_RTL (exp);
3516 exp_rtl = SAVE_EXPR_RTL (exp);
3520 /* The only operand we look at is operand 1. The rest aren't
3521 part of the expression. */
3522 return safe_from_p (x, TREE_OPERAND (exp, 1));
3524 case METHOD_CALL_EXPR:
3525 /* This takes a rtx argument, but shouldn't appear here. */
3529 /* If we have an rtx, we do not need to scan our operands. */
3533 nops = tree_code_length[(int) TREE_CODE (exp)];
3534 for (i = 0; i < nops; i++)
3535 if (TREE_OPERAND (exp, i) != 0
3536 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3540 /* If we have an rtl, find any enclosed object. Then see if we conflict
3544 if (GET_CODE (exp_rtl) == SUBREG)
3546 exp_rtl = SUBREG_REG (exp_rtl);
3547 if (GET_CODE (exp_rtl) == REG
3548 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3552 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3553 are memory and EXP is not readonly. */
3554 return ! (rtx_equal_p (x, exp_rtl)
3555 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3556 && ! TREE_READONLY (exp)));
3559 /* If we reach here, it is safe. */
3563 /* Subroutine of expand_expr: return nonzero iff EXP is an
3564 expression whose type is statically determinable. */
3570 if (TREE_CODE (exp) == PARM_DECL
3571 || TREE_CODE (exp) == VAR_DECL
3572 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3573 || TREE_CODE (exp) == COMPONENT_REF
3574 || TREE_CODE (exp) == ARRAY_REF)
3579 /* expand_expr: generate code for computing expression EXP.
3580 An rtx for the computed value is returned. The value is never null.
3581 In the case of a void EXP, const0_rtx is returned.
3583 The value may be stored in TARGET if TARGET is nonzero.
3584 TARGET is just a suggestion; callers must assume that
3585 the rtx returned may not be the same as TARGET.
3587 If TARGET is CONST0_RTX, it means that the value will be ignored.
3589 If TMODE is not VOIDmode, it suggests generating the
3590 result in mode TMODE. But this is done only when convenient.
3591 Otherwise, TMODE is ignored and the value generated in its natural mode.
3592 TMODE is just a suggestion; callers must assume that
3593 the rtx returned may not have mode TMODE.
3595 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3596 with a constant address even if that address is not normally legitimate.
3597 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3599 If MODIFIER is EXPAND_SUM then when EXP is an addition
3600 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3601 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3602 products as above, or REG or MEM, or constant.
3603 Ordinarily in such cases we would output mul or add instructions
3604 and then return a pseudo reg containing the sum.
3606 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3607 it also marks a label as absolutely required (it can't be dead).
3608 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3609 This is used for outputting expressions used in initializers. */
3612 expand_expr (exp, target, tmode, modifier)
3615 enum machine_mode tmode;
3616 enum expand_modifier modifier;
3618 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3619 This is static so it will be accessible to our recursive callees. */
3620 static tree placeholder_list = 0;
3621 register rtx op0, op1, temp;
3622 tree type = TREE_TYPE (exp);
3623 int unsignedp = TREE_UNSIGNED (type);
3624 register enum machine_mode mode = TYPE_MODE (type);
3625 register enum tree_code code = TREE_CODE (exp);
3627 /* Use subtarget as the target for operand 0 of a binary operation. */
3628 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3629 rtx original_target = target;
3630 /* Maybe defer this until sure not doing bytecode? */
3631 int ignore = (target == const0_rtx
3632 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3633 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3634 || code == COND_EXPR)
3635 && TREE_CODE (type) == VOID_TYPE));
3639 if (output_bytecode)
3641 bc_expand_expr (exp);
3645 /* Don't use hard regs as subtargets, because the combiner
3646 can only handle pseudo regs. */
3647 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3649 /* Avoid subtargets inside loops,
3650 since they hide some invariant expressions. */
3651 if (preserve_subexpressions_p ())
3654 /* If we are going to ignore this result, we need only do something
3655 if there is a side-effect somewhere in the expression. If there
3656 is, short-circuit the most common cases here. Note that we must
3657 not call expand_expr with anything but const0_rtx in case this
3658 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3662 if (! TREE_SIDE_EFFECTS (exp))
3665 /* Ensure we reference a volatile object even if value is ignored. */
3666 if (TREE_THIS_VOLATILE (exp)
3667 && TREE_CODE (exp) != FUNCTION_DECL
3668 && mode != VOIDmode && mode != BLKmode)
3670 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3671 if (GET_CODE (temp) == MEM)
3672 temp = copy_to_reg (temp);
3676 if (TREE_CODE_CLASS (code) == '1')
3677 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3678 VOIDmode, modifier);
3679 else if (TREE_CODE_CLASS (code) == '2'
3680 || TREE_CODE_CLASS (code) == '<')
3682 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3683 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3686 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3687 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3688 /* If the second operand has no side effects, just evaluate
3690 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3691 VOIDmode, modifier);
3696 /* If will do cse, generate all results into pseudo registers
3697 since 1) that allows cse to find more things
3698 and 2) otherwise cse could produce an insn the machine
3701 if (! cse_not_expected && mode != BLKmode && target
3702 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3709 tree function = decl_function_context (exp);
3710 /* Handle using a label in a containing function. */
3711 if (function != current_function_decl && function != 0)
3713 struct function *p = find_function_data (function);
3714 /* Allocate in the memory associated with the function
3715 that the label is in. */
3716 push_obstacks (p->function_obstack,
3717 p->function_maybepermanent_obstack);
3719 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3720 label_rtx (exp), p->forced_labels);
3723 else if (modifier == EXPAND_INITIALIZER)
3724 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3725 label_rtx (exp), forced_labels);
3726 temp = gen_rtx (MEM, FUNCTION_MODE,
3727 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3728 if (function != current_function_decl && function != 0)
3729 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3734 if (DECL_RTL (exp) == 0)
3736 error_with_decl (exp, "prior parameter's size depends on `%s'");
3737 return CONST0_RTX (mode);
3741 /* If a static var's type was incomplete when the decl was written,
3742 but the type is complete now, lay out the decl now. */
3743 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3744 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3746 push_obstacks_nochange ();
3747 end_temporary_allocation ();
3748 layout_decl (exp, 0);
3749 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3754 if (DECL_RTL (exp) == 0)
3756 /* Ensure variable marked as used even if it doesn't go through
3757 a parser. If it hasn't be used yet, write out an external
3759 if (! TREE_USED (exp))
3761 assemble_external (exp);
3762 TREE_USED (exp) = 1;
3765 /* Handle variables inherited from containing functions. */
3766 context = decl_function_context (exp);
3768 /* We treat inline_function_decl as an alias for the current function
3769 because that is the inline function whose vars, types, etc.
3770 are being merged into the current function.
3771 See expand_inline_function. */
3772 if (context != 0 && context != current_function_decl
3773 && context != inline_function_decl
3774 /* If var is static, we don't need a static chain to access it. */
3775 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3776 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3780 /* Mark as non-local and addressable. */
3781 DECL_NONLOCAL (exp) = 1;
3782 mark_addressable (exp);
3783 if (GET_CODE (DECL_RTL (exp)) != MEM)
3785 addr = XEXP (DECL_RTL (exp), 0);
3786 if (GET_CODE (addr) == MEM)
3787 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3789 addr = fix_lexical_addr (addr, exp);
3790 return change_address (DECL_RTL (exp), mode, addr);
3793 /* This is the case of an array whose size is to be determined
3794 from its initializer, while the initializer is still being parsed.
3796 if (GET_CODE (DECL_RTL (exp)) == MEM
3797 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3798 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3799 XEXP (DECL_RTL (exp), 0));
3800 if (GET_CODE (DECL_RTL (exp)) == MEM
3801 && modifier != EXPAND_CONST_ADDRESS
3802 && modifier != EXPAND_SUM
3803 && modifier != EXPAND_INITIALIZER)
3805 /* DECL_RTL probably contains a constant address.
3806 On RISC machines where a constant address isn't valid,
3807 make some insns to get that address into a register. */
3808 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3810 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3811 return change_address (DECL_RTL (exp), VOIDmode,
3812 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3815 /* If the mode of DECL_RTL does not match that of the decl, it
3816 must be a promoted value. We return a SUBREG of the wanted mode,
3817 but mark it so that we know that it was already extended. */
3819 if (GET_CODE (DECL_RTL (exp)) == REG
3820 && GET_MODE (DECL_RTL (exp)) != mode)
3822 enum machine_mode decl_mode = DECL_MODE (exp);
3824 /* Get the signedness used for this variable. Ensure we get the
3825 same mode we got when the variable was declared. */
3827 PROMOTE_MODE (decl_mode, unsignedp, type);
3829 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3832 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3833 SUBREG_PROMOTED_VAR_P (temp) = 1;
3834 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3838 return DECL_RTL (exp);
3841 return immed_double_const (TREE_INT_CST_LOW (exp),
3842 TREE_INT_CST_HIGH (exp),
3846 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3849 /* If optimized, generate immediate CONST_DOUBLE
3850 which will be turned into memory by reload if necessary.
3852 We used to force a register so that loop.c could see it. But
3853 this does not allow gen_* patterns to perform optimizations with
3854 the constants. It also produces two insns in cases like "x = 1.0;".
3855 On most machines, floating-point constants are not permitted in
3856 many insns, so we'd end up copying it to a register in any case.
3858 Now, we do the copying in expand_binop, if appropriate. */
3859 return immed_real_const (exp);
3863 if (! TREE_CST_RTL (exp))
3864 output_constant_def (exp);
3866 /* TREE_CST_RTL probably contains a constant address.
3867 On RISC machines where a constant address isn't valid,
3868 make some insns to get that address into a register. */
3869 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3870 && modifier != EXPAND_CONST_ADDRESS
3871 && modifier != EXPAND_INITIALIZER
3872 && modifier != EXPAND_SUM
3873 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3874 return change_address (TREE_CST_RTL (exp), VOIDmode,
3875 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3876 return TREE_CST_RTL (exp);
3879 context = decl_function_context (exp);
3880 /* We treat inline_function_decl as an alias for the current function
3881 because that is the inline function whose vars, types, etc.
3882 are being merged into the current function.
3883 See expand_inline_function. */
3884 if (context == current_function_decl || context == inline_function_decl)
3887 /* If this is non-local, handle it. */
3890 temp = SAVE_EXPR_RTL (exp);
3891 if (temp && GET_CODE (temp) == REG)
3893 put_var_into_stack (exp);
3894 temp = SAVE_EXPR_RTL (exp);
3896 if (temp == 0 || GET_CODE (temp) != MEM)
3898 return change_address (temp, mode,
3899 fix_lexical_addr (XEXP (temp, 0), exp));
3901 if (SAVE_EXPR_RTL (exp) == 0)
3903 if (mode == BLKmode)
3906 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3907 MEM_IN_STRUCT_P (temp)
3908 = (TREE_CODE (type) == RECORD_TYPE
3909 || TREE_CODE (type) == UNION_TYPE
3910 || TREE_CODE (type) == QUAL_UNION_TYPE
3911 || TREE_CODE (type) == ARRAY_TYPE);
3915 enum machine_mode var_mode = mode;
3917 if (TREE_CODE (type) == INTEGER_TYPE
3918 || TREE_CODE (type) == ENUMERAL_TYPE
3919 || TREE_CODE (type) == BOOLEAN_TYPE
3920 || TREE_CODE (type) == CHAR_TYPE
3921 || TREE_CODE (type) == REAL_TYPE
3922 || TREE_CODE (type) == POINTER_TYPE
3923 || TREE_CODE (type) == OFFSET_TYPE)
3925 PROMOTE_MODE (var_mode, unsignedp, type);
3928 temp = gen_reg_rtx (var_mode);
3931 SAVE_EXPR_RTL (exp) = temp;
3932 if (!optimize && GET_CODE (temp) == REG)
3933 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3936 /* If the mode of TEMP does not match that of the expression, it
3937 must be a promoted value. We pass store_expr a SUBREG of the
3938 wanted mode but mark it so that we know that it was already
3939 extended. Note that `unsignedp' was modified above in
3942 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3944 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3945 SUBREG_PROMOTED_VAR_P (temp) = 1;
3946 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3949 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3952 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3953 must be a promoted value. We return a SUBREG of the wanted mode,
3954 but mark it so that we know that it was already extended. */
3956 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3957 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3959 enum machine_mode var_mode = mode;
3961 if (TREE_CODE (type) == INTEGER_TYPE
3962 || TREE_CODE (type) == ENUMERAL_TYPE
3963 || TREE_CODE (type) == BOOLEAN_TYPE
3964 || TREE_CODE (type) == CHAR_TYPE
3965 || TREE_CODE (type) == REAL_TYPE
3966 || TREE_CODE (type) == POINTER_TYPE
3967 || TREE_CODE (type) == OFFSET_TYPE)
3969 PROMOTE_MODE (var_mode, unsignedp, type);
3972 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3973 SUBREG_PROMOTED_VAR_P (temp) = 1;
3974 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3978 return SAVE_EXPR_RTL (exp);
3980 case PLACEHOLDER_EXPR:
3981 /* If there is an object on the head of the placeholder list,
3982 see if some object in it's references is of type TYPE. For
3983 further information, see tree.def. */
3984 if (placeholder_list)
3987 tree old_list = placeholder_list;
3989 for (object = TREE_PURPOSE (placeholder_list);
3990 TREE_TYPE (object) != type
3991 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3992 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3993 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3994 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
3995 object = TREE_OPERAND (object, 0))
3998 if (object && TREE_TYPE (object) == type)
4000 /* Expand this object skipping the list entries before
4001 it was found in case it is also a PLACEHOLDER_EXPR.
4002 In that case, we want to translate it using subsequent
4004 placeholder_list = TREE_CHAIN (placeholder_list);
4005 temp = expand_expr (object, original_target, tmode, modifier);
4006 placeholder_list = old_list;
4011 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4014 case WITH_RECORD_EXPR:
4015 /* Put the object on the placeholder list, expand our first operand,
4016 and pop the list. */
4017 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4019 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4021 placeholder_list = TREE_CHAIN (placeholder_list);
4025 expand_exit_loop_if_false (NULL_PTR,
4026 invert_truthvalue (TREE_OPERAND (exp, 0)));
4031 expand_start_loop (1);
4032 expand_expr_stmt (TREE_OPERAND (exp, 0));
4040 tree vars = TREE_OPERAND (exp, 0);
4041 int vars_need_expansion = 0;
4043 /* Need to open a binding contour here because
4044 if there are any cleanups they most be contained here. */
4045 expand_start_bindings (0);
4047 /* Mark the corresponding BLOCK for output in its proper place. */
4048 if (TREE_OPERAND (exp, 2) != 0
4049 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4050 insert_block (TREE_OPERAND (exp, 2));
4052 /* If VARS have not yet been expanded, expand them now. */
4055 if (DECL_RTL (vars) == 0)
4057 vars_need_expansion = 1;
4060 expand_decl_init (vars);
4061 vars = TREE_CHAIN (vars);
4064 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4066 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4072 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4074 emit_insns (RTL_EXPR_SEQUENCE (exp));
4075 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4076 free_temps_for_rtl_expr (exp);
4077 return RTL_EXPR_RTL (exp);
4080 /* If we don't need the result, just ensure we evaluate any
4085 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4086 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4089 /* All elts simple constants => refer to a constant in memory. But
4090 if this is a non-BLKmode mode, let it store a field at a time
4091 since that should make a CONST_INT or CONST_DOUBLE when we
4092 fold. If we are making an initializer and all operands are
4093 constant, put it in memory as well. */
4094 else if ((TREE_STATIC (exp)
4095 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4096 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4098 rtx constructor = output_constant_def (exp);
4099 if (modifier != EXPAND_CONST_ADDRESS
4100 && modifier != EXPAND_INITIALIZER
4101 && modifier != EXPAND_SUM
4102 && !memory_address_p (GET_MODE (constructor),
4103 XEXP (constructor, 0)))
4104 constructor = change_address (constructor, VOIDmode,
4105 XEXP (constructor, 0));
4111 if (target == 0 || ! safe_from_p (target, exp))
4113 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4114 target = gen_reg_rtx (mode);
4117 enum tree_code c = TREE_CODE (type);
4119 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4120 if (c == RECORD_TYPE || c == UNION_TYPE
4121 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
4122 MEM_IN_STRUCT_P (target) = 1;
4125 store_constructor (exp, target);
4131 tree exp1 = TREE_OPERAND (exp, 0);
4134 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4135 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4136 This code has the same general effect as simply doing
4137 expand_expr on the save expr, except that the expression PTR
4138 is computed for use as a memory address. This means different
4139 code, suitable for indexing, may be generated. */
4140 if (TREE_CODE (exp1) == SAVE_EXPR
4141 && SAVE_EXPR_RTL (exp1) == 0
4142 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4143 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4144 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4146 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4147 VOIDmode, EXPAND_SUM);
4148 op0 = memory_address (mode, temp);
4149 op0 = copy_all_regs (op0);
4150 SAVE_EXPR_RTL (exp1) = op0;
4154 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4155 op0 = memory_address (mode, op0);
4158 temp = gen_rtx (MEM, mode, op0);
4159 /* If address was computed by addition,
4160 mark this as an element of an aggregate. */
4161 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4162 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4163 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4164 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4165 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4166 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4167 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
4168 || (TREE_CODE (exp1) == ADDR_EXPR
4169 && (exp2 = TREE_OPERAND (exp1, 0))
4170 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4171 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4172 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4173 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
4174 MEM_IN_STRUCT_P (temp) = 1;
4175 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4176 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4177 a location is accessed through a pointer to const does not mean
4178 that the value there can never change. */
4179 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4185 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4189 tree array = TREE_OPERAND (exp, 0);
4190 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4191 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4192 tree index = TREE_OPERAND (exp, 1);
4193 tree index_type = TREE_TYPE (index);
4196 if (TREE_CODE (low_bound) != INTEGER_CST
4197 && contains_placeholder_p (low_bound))
4198 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4200 /* Optimize the special-case of a zero lower bound.
4202 We convert the low_bound to sizetype to avoid some problems
4203 with constant folding. (E.g. suppose the lower bound is 1,
4204 and its mode is QI. Without the conversion, (ARRAY
4205 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4206 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4208 But sizetype isn't quite right either (especially if
4209 the lowbound is negative). FIXME */
4211 if (! integer_zerop (low_bound))
4212 index = fold (build (MINUS_EXPR, index_type, index,
4213 convert (sizetype, low_bound)));
4215 if (TREE_CODE (index) != INTEGER_CST
4216 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4218 /* Nonconstant array index or nonconstant element size.
4219 Generate the tree for *(&array+index) and expand that,
4220 except do it in a language-independent way
4221 and don't complain about non-lvalue arrays.
4222 `mark_addressable' should already have been called
4223 for any array for which this case will be reached. */
4225 /* Don't forget the const or volatile flag from the array
4227 tree variant_type = build_type_variant (type,
4228 TREE_READONLY (exp),
4229 TREE_THIS_VOLATILE (exp));
4230 tree array_adr = build1 (ADDR_EXPR,
4231 build_pointer_type (variant_type), array);
4233 tree size = size_in_bytes (type);
4235 /* Convert the integer argument to a type the same size as a
4236 pointer so the multiply won't overflow spuriously. */
4237 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4238 index = convert (type_for_size (POINTER_SIZE, 0), index);
4240 if (TREE_CODE (size) != INTEGER_CST
4241 && contains_placeholder_p (size))
4242 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4244 /* Don't think the address has side effects
4245 just because the array does.
4246 (In some cases the address might have side effects,
4247 and we fail to record that fact here. However, it should not
4248 matter, since expand_expr should not care.) */
4249 TREE_SIDE_EFFECTS (array_adr) = 0;
4251 elt = build1 (INDIRECT_REF, type,
4252 fold (build (PLUS_EXPR,
4253 TYPE_POINTER_TO (variant_type),
4255 fold (build (MULT_EXPR,
4256 TYPE_POINTER_TO (variant_type),
4259 /* Volatility, etc., of new expression is same as old
4261 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4262 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4263 TREE_READONLY (elt) = TREE_READONLY (exp);
4265 return expand_expr (elt, target, tmode, modifier);
4268 /* Fold an expression like: "foo"[2].
4269 This is not done in fold so it won't happen inside &. */
4271 if (TREE_CODE (array) == STRING_CST
4272 && TREE_CODE (index) == INTEGER_CST
4273 && !TREE_INT_CST_HIGH (index)
4274 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
4276 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
4278 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
4279 TREE_TYPE (exp) = integer_type_node;
4280 return expand_expr (exp, target, tmode, modifier);
4282 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
4284 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
4285 TREE_TYPE (exp) = integer_type_node;
4286 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4288 target, tmode, modifier);
4292 /* If this is a constant index into a constant array,
4293 just get the value from the array. Handle both the cases when
4294 we have an explicit constructor and when our operand is a variable
4295 that was declared const. */
4297 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4299 if (TREE_CODE (index) == INTEGER_CST
4300 && TREE_INT_CST_HIGH (index) == 0)
4302 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4304 i = TREE_INT_CST_LOW (index);
4306 elem = TREE_CHAIN (elem);
4308 return expand_expr (fold (TREE_VALUE (elem)), target,
4313 else if (optimize >= 1
4314 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4315 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4316 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4318 if (TREE_CODE (index) == INTEGER_CST
4319 && TREE_INT_CST_HIGH (index) == 0)
4321 tree init = DECL_INITIAL (array);
4323 i = TREE_INT_CST_LOW (index);
4324 if (TREE_CODE (init) == CONSTRUCTOR)
4326 tree elem = CONSTRUCTOR_ELTS (init);
4329 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4330 elem = TREE_CHAIN (elem);
4332 return expand_expr (fold (TREE_VALUE (elem)), target,
4335 else if (TREE_CODE (init) == STRING_CST
4336 && i < TREE_STRING_LENGTH (init))
4338 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4339 return convert_to_mode (mode, temp, 0);
4345 /* Treat array-ref with constant index as a component-ref. */
4349 /* If the operand is a CONSTRUCTOR, we can just extract the
4350 appropriate field if it is present. */
4351 if (code != ARRAY_REF
4352 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4356 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4357 elt = TREE_CHAIN (elt))
4358 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4359 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4363 enum machine_mode mode1;
4368 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4369 &mode1, &unsignedp, &volatilep);
4372 /* If we got back the original object, something is wrong. Perhaps
4373 we are evaluating an expression too early. In any event, don't
4374 infinitely recurse. */
4378 /* In some cases, we will be offsetting OP0's address by a constant.
4379 So get it as a sum, if possible. If we will be using it
4380 directly in an insn, we validate it. */
4381 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4383 /* If this is a constant, put it into a register if it is a
4384 legitimate constant and memory if it isn't. */
4385 if (CONSTANT_P (op0))
4387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4388 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4389 op0 = force_reg (mode, op0);
4391 op0 = validize_mem (force_const_mem (mode, op0));
4394 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4397 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4399 if (GET_CODE (op0) != MEM)
4401 op0 = change_address (op0, VOIDmode,
4402 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4403 force_reg (Pmode, offset_rtx)));
4404 /* If we have a variable offset, the known alignment
4405 is only that of the innermost structure containing the field.
4406 (Actually, we could sometimes do better by using the
4407 size of an element of the innermost array, but no need.) */
4408 if (TREE_CODE (exp) == COMPONENT_REF
4409 || TREE_CODE (exp) == BIT_FIELD_REF)
4410 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4414 /* Don't forget about volatility even if this is a bitfield. */
4415 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4417 op0 = copy_rtx (op0);
4418 MEM_VOLATILE_P (op0) = 1;
4421 /* In cases where an aligned union has an unaligned object
4422 as a field, we might be extracting a BLKmode value from
4423 an integer-mode (e.g., SImode) object. Handle this case
4424 by doing the extract into an object as wide as the field
4425 (which we know to be the width of a basic mode), then
4426 storing into memory, and changing the mode to BLKmode. */
4427 if (mode1 == VOIDmode
4428 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4429 && modifier != EXPAND_CONST_ADDRESS
4430 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4431 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4432 /* If the field isn't aligned enough to fetch as a memref,
4433 fetch it as a bit field. */
4434 || (STRICT_ALIGNMENT
4435 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4436 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4438 enum machine_mode ext_mode = mode;
4440 if (ext_mode == BLKmode)
4441 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4443 if (ext_mode == BLKmode)
4446 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4447 unsignedp, target, ext_mode, ext_mode,
4449 int_size_in_bytes (TREE_TYPE (tem)));
4450 if (mode == BLKmode)
4452 rtx new = assign_stack_temp (ext_mode,
4453 bitsize / BITS_PER_UNIT, 0);
4455 emit_move_insn (new, op0);
4456 op0 = copy_rtx (new);
4457 PUT_MODE (op0, BLKmode);
4458 MEM_IN_STRUCT_P (op0) = 1;
4464 /* Get a reference to just this component. */
4465 if (modifier == EXPAND_CONST_ADDRESS
4466 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4467 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4468 (bitpos / BITS_PER_UNIT)));
4470 op0 = change_address (op0, mode1,
4471 plus_constant (XEXP (op0, 0),
4472 (bitpos / BITS_PER_UNIT)));
4473 MEM_IN_STRUCT_P (op0) = 1;
4474 MEM_VOLATILE_P (op0) |= volatilep;
4475 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4478 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4479 convert_move (target, op0, unsignedp);
4485 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4486 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4487 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4488 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4489 MEM_IN_STRUCT_P (temp) = 1;
4490 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4491 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4492 a location is accessed through a pointer to const does not mean
4493 that the value there can never change. */
4494 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4499 /* Intended for a reference to a buffer of a file-object in Pascal.
4500 But it's not certain that a special tree code will really be
4501 necessary for these. INDIRECT_REF might work for them. */
4505 /* IN_EXPR: Inlined pascal set IN expression.
4508 rlo = set_low - (set_low%bits_per_word);
4509 the_word = set [ (index - rlo)/bits_per_word ];
4510 bit_index = index % bits_per_word;
4511 bitmask = 1 << bit_index;
4512 return !!(the_word & bitmask); */
4514 preexpand_calls (exp);
4516 tree set = TREE_OPERAND (exp, 0);
4517 tree index = TREE_OPERAND (exp, 1);
4518 tree set_type = TREE_TYPE (set);
4520 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4521 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4527 rtx diff, quo, rem, addr, bit, result;
4528 rtx setval, setaddr;
4529 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4532 target = gen_reg_rtx (mode);
4534 /* If domain is empty, answer is no. */
4535 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4538 index_val = expand_expr (index, 0, VOIDmode, 0);
4539 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4540 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4541 setval = expand_expr (set, 0, VOIDmode, 0);
4542 setaddr = XEXP (setval, 0);
4544 /* Compare index against bounds, if they are constant. */
4545 if (GET_CODE (index_val) == CONST_INT
4546 && GET_CODE (lo_r) == CONST_INT
4547 && INTVAL (index_val) < INTVAL (lo_r))
4550 if (GET_CODE (index_val) == CONST_INT
4551 && GET_CODE (hi_r) == CONST_INT
4552 && INTVAL (hi_r) < INTVAL (index_val))
4555 /* If we get here, we have to generate the code for both cases
4556 (in range and out of range). */
4558 op0 = gen_label_rtx ();
4559 op1 = gen_label_rtx ();
4561 if (! (GET_CODE (index_val) == CONST_INT
4562 && GET_CODE (lo_r) == CONST_INT))
4564 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4565 GET_MODE (index_val), 0, 0);
4566 emit_jump_insn (gen_blt (op1));
4569 if (! (GET_CODE (index_val) == CONST_INT
4570 && GET_CODE (hi_r) == CONST_INT))
4572 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4573 GET_MODE (index_val), 0, 0);
4574 emit_jump_insn (gen_bgt (op1));
4577 /* Calculate the element number of bit zero in the first word
4579 if (GET_CODE (lo_r) == CONST_INT)
4580 rlow = GEN_INT (INTVAL (lo_r)
4581 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4583 rlow = expand_binop (index_mode, and_optab, lo_r,
4584 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4585 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4587 diff = expand_binop (index_mode, sub_optab,
4588 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4590 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4591 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4592 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4593 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4594 addr = memory_address (byte_mode,
4595 expand_binop (index_mode, add_optab,
4596 diff, setaddr, NULL_RTX, 0,
4598 /* Extract the bit we want to examine */
4599 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4600 gen_rtx (MEM, byte_mode, addr),
4601 make_tree (TREE_TYPE (index), rem),
4603 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4604 GET_MODE (target) == byte_mode ? target : 0,
4605 1, OPTAB_LIB_WIDEN);
4607 if (result != target)
4608 convert_move (target, result, 1);
4610 /* Output the code to handle the out-of-range case. */
4613 emit_move_insn (target, const0_rtx);
4618 case WITH_CLEANUP_EXPR:
4619 if (RTL_EXPR_RTL (exp) == 0)
4622 = expand_expr (TREE_OPERAND (exp, 0),
4623 target ? target : const0_rtx,
4626 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4627 /* That's it for this cleanup. */
4628 TREE_OPERAND (exp, 2) = 0;
4630 return RTL_EXPR_RTL (exp);
4633 /* Check for a built-in function. */
4634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
4642 return expand_call (exp, target, ignore);
4644 case NON_LVALUE_EXPR:
4647 case REFERENCE_EXPR:
4648 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4649 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4650 if (TREE_CODE (type) == UNION_TYPE)
4652 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4655 if (mode == BLKmode)
4657 if (TYPE_SIZE (type) == 0
4658 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4660 target = assign_stack_temp (BLKmode,
4661 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4662 + BITS_PER_UNIT - 1)
4663 / BITS_PER_UNIT, 0);
4666 target = gen_reg_rtx (mode);
4668 if (GET_CODE (target) == MEM)
4669 /* Store data into beginning of memory target. */
4670 store_expr (TREE_OPERAND (exp, 0),
4671 change_address (target, TYPE_MODE (valtype), 0), 0);
4673 else if (GET_CODE (target) == REG)
4674 /* Store this field into a union of the proper type. */
4675 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4676 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4678 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4682 /* Return the entire union. */
4685 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4686 if (GET_MODE (op0) == mode)
4688 /* If arg is a constant integer being extended from a narrower mode,
4689 we must really truncate to get the extended bits right. Otherwise
4690 (unsigned long) (unsigned char) ("\377"[0])
4691 would come out as ffffffff. */
4692 if (GET_MODE (op0) == VOIDmode
4693 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4694 < GET_MODE_BITSIZE (mode)))
4696 /* MODE must be narrower than HOST_BITS_PER_INT. */
4697 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4699 if (width < HOST_BITS_PER_WIDE_INT)
4701 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4702 : CONST_DOUBLE_LOW (op0));
4703 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4704 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4705 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4707 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4709 op0 = GEN_INT (val);
4713 op0 = (simplify_unary_operation
4714 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4715 ? ZERO_EXTEND : SIGN_EXTEND),
4717 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4722 if (GET_MODE (op0) == VOIDmode)
4724 if (modifier == EXPAND_INITIALIZER)
4725 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4726 if (flag_force_mem && GET_CODE (op0) == MEM)
4727 op0 = copy_to_reg (op0);
4730 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4732 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4736 /* We come here from MINUS_EXPR when the second operand is a constant. */
4738 this_optab = add_optab;
4740 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4741 something else, make sure we add the register to the constant and
4742 then to the other thing. This case can occur during strength
4743 reduction and doing it this way will produce better code if the
4744 frame pointer or argument pointer is eliminated.
4746 fold-const.c will ensure that the constant is always in the inner
4747 PLUS_EXPR, so the only case we need to do anything about is if
4748 sp, ap, or fp is our second argument, in which case we must swap
4749 the innermost first argument and our second argument. */
4751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4753 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4754 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4755 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4756 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4758 tree t = TREE_OPERAND (exp, 1);
4760 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4761 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4764 /* If the result is to be Pmode and we are adding an integer to
4765 something, we might be forming a constant. So try to use
4766 plus_constant. If it produces a sum and we can't accept it,
4767 use force_operand. This allows P = &ARR[const] to generate
4768 efficient code on machines where a SYMBOL_REF is not a valid
4771 If this is an EXPAND_SUM call, always return the sum. */
4772 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4775 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4776 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4777 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4779 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4781 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4782 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4783 op1 = force_operand (op1, target);
4787 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4789 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4793 if (! CONSTANT_P (op0))
4795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4796 VOIDmode, modifier);
4797 /* Don't go to both_summands if modifier
4798 says it's not right to return a PLUS. */
4799 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4803 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4804 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4805 op0 = force_operand (op0, target);
4810 /* No sense saving up arithmetic to be done
4811 if it's all in the wrong mode to form part of an address.
4812 And force_operand won't know whether to sign-extend or
4814 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4818 preexpand_calls (exp);
4819 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4826 /* Make sure any term that's a sum with a constant comes last. */
4827 if (GET_CODE (op0) == PLUS
4828 && CONSTANT_P (XEXP (op0, 1)))
4834 /* If adding to a sum including a constant,
4835 associate it to put the constant outside. */
4836 if (GET_CODE (op1) == PLUS
4837 && CONSTANT_P (XEXP (op1, 1)))
4839 rtx constant_term = const0_rtx;
4841 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4844 /* Ensure that MULT comes first if there is one. */
4845 else if (GET_CODE (op0) == MULT)
4846 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4848 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4850 /* Let's also eliminate constants from op0 if possible. */
4851 op0 = eliminate_constant_term (op0, &constant_term);
4853 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4854 their sum should be a constant. Form it into OP1, since the
4855 result we want will then be OP0 + OP1. */
4857 temp = simplify_binary_operation (PLUS, mode, constant_term,
4862 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4865 /* Put a constant term last and put a multiplication first. */
4866 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4867 temp = op1, op1 = op0, op0 = temp;
4869 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4870 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4873 /* Handle difference of two symbolic constants,
4874 for the sake of an initializer. */
4875 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4876 && really_constant_p (TREE_OPERAND (exp, 0))
4877 && really_constant_p (TREE_OPERAND (exp, 1)))
4879 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4880 VOIDmode, modifier);
4881 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4882 VOIDmode, modifier);
4883 return gen_rtx (MINUS, mode, op0, op1);
4885 /* Convert A - const to A + (-const). */
4886 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4888 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4889 fold (build1 (NEGATE_EXPR, type,
4890 TREE_OPERAND (exp, 1))));
4893 this_optab = sub_optab;
4897 preexpand_calls (exp);
4898 /* If first operand is constant, swap them.
4899 Thus the following special case checks need only
4900 check the second operand. */
4901 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4903 register tree t1 = TREE_OPERAND (exp, 0);
4904 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4905 TREE_OPERAND (exp, 1) = t1;
4908 /* Attempt to return something suitable for generating an
4909 indexed address, for machines that support that. */
4911 if (modifier == EXPAND_SUM && mode == Pmode
4912 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4913 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4917 /* Apply distributive law if OP0 is x+c. */
4918 if (GET_CODE (op0) == PLUS
4919 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4920 return gen_rtx (PLUS, mode,
4921 gen_rtx (MULT, mode, XEXP (op0, 0),
4922 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4923 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4924 * INTVAL (XEXP (op0, 1))));
4926 if (GET_CODE (op0) != REG)
4927 op0 = force_operand (op0, NULL_RTX);
4928 if (GET_CODE (op0) != REG)
4929 op0 = copy_to_mode_reg (mode, op0);
4931 return gen_rtx (MULT, mode, op0,
4932 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4938 /* Check for multiplying things that have been extended
4939 from a narrower type. If this machine supports multiplying
4940 in that narrower type with a result in the desired type,
4941 do it that way, and avoid the explicit type-conversion. */
4942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4943 && TREE_CODE (type) == INTEGER_TYPE
4944 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4945 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4946 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4947 && int_fits_type_p (TREE_OPERAND (exp, 1),
4948 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4949 /* Don't use a widening multiply if a shift will do. */
4950 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4951 > HOST_BITS_PER_WIDE_INT)
4952 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4954 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4955 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4957 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4958 /* If both operands are extended, they must either both
4959 be zero-extended or both be sign-extended. */
4960 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4962 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4964 enum machine_mode innermode
4965 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4966 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4967 ? umul_widen_optab : smul_widen_optab);
4968 if (mode == GET_MODE_WIDER_MODE (innermode)
4969 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4971 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4972 NULL_RTX, VOIDmode, 0);
4973 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4977 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4978 NULL_RTX, VOIDmode, 0);
4982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4984 return expand_mult (mode, op0, op1, target, unsignedp);
4986 case TRUNC_DIV_EXPR:
4987 case FLOOR_DIV_EXPR:
4989 case ROUND_DIV_EXPR:
4990 case EXACT_DIV_EXPR:
4991 preexpand_calls (exp);
4992 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4994 /* Possible optimization: compute the dividend with EXPAND_SUM
4995 then if the divisor is constant can optimize the case
4996 where some terms of the dividend have coeffs divisible by it. */
4997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4998 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4999 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5002 this_optab = flodiv_optab;
5005 case TRUNC_MOD_EXPR:
5006 case FLOOR_MOD_EXPR:
5008 case ROUND_MOD_EXPR:
5009 preexpand_calls (exp);
5010 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5013 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5014 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5016 case FIX_ROUND_EXPR:
5017 case FIX_FLOOR_EXPR:
5019 abort (); /* Not used for C. */
5021 case FIX_TRUNC_EXPR:
5022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5024 target = gen_reg_rtx (mode);
5025 expand_fix (target, op0, unsignedp);
5029 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5031 target = gen_reg_rtx (mode);
5032 /* expand_float can't figure out what to do if FROM has VOIDmode.
5033 So give it the correct mode. With -O, cse will optimize this. */
5034 if (GET_MODE (op0) == VOIDmode)
5035 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5037 expand_float (target, op0,
5038 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5042 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5043 temp = expand_unop (mode, neg_optab, op0, target, 0);
5049 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5051 /* Handle complex values specially. */
5053 enum machine_mode opmode
5054 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5056 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5057 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5058 return expand_complex_abs (opmode, op0, target, unsignedp);
5061 /* Unsigned abs is simply the operand. Testing here means we don't
5062 risk generating incorrect code below. */
5063 if (TREE_UNSIGNED (type))
5066 /* First try to do it with a special abs instruction. */
5067 temp = expand_unop (mode, abs_optab, op0, target, 0);
5071 /* If this machine has expensive jumps, we can do integer absolute
5072 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5073 where W is the width of MODE. */
5075 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5077 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5078 size_int (GET_MODE_BITSIZE (mode) - 1),
5081 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5084 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5091 /* If that does not win, use conditional jump and negate. */
5092 target = original_target;
5093 temp = gen_label_rtx ();
5094 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5095 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5096 || (GET_CODE (target) == REG
5097 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5098 target = gen_reg_rtx (mode);
5099 emit_move_insn (target, op0);
5100 emit_cmp_insn (target,
5101 expand_expr (convert (type, integer_zero_node),
5102 NULL_RTX, VOIDmode, 0),
5103 GE, NULL_RTX, mode, 0, 0);
5105 emit_jump_insn (gen_bge (temp));
5106 op0 = expand_unop (mode, neg_optab, target, target, 0);
5108 emit_move_insn (target, op0);
5115 target = original_target;
5116 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5117 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5118 || (GET_CODE (target) == REG
5119 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5120 target = gen_reg_rtx (mode);
5121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5122 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5124 /* First try to do it with a special MIN or MAX instruction.
5125 If that does not win, use a conditional jump to select the proper
5127 this_optab = (TREE_UNSIGNED (type)
5128 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5129 : (code == MIN_EXPR ? smin_optab : smax_optab));
5131 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5137 emit_move_insn (target, op0);
5138 op0 = gen_label_rtx ();
5139 /* If this mode is an integer too wide to compare properly,
5140 compare word by word. Rely on cse to optimize constant cases. */
5141 if (GET_MODE_CLASS (mode) == MODE_INT
5142 && !can_compare_p (mode))
5144 if (code == MAX_EXPR)
5145 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
5147 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5148 emit_move_insn (target, op1);
5152 if (code == MAX_EXPR)
5153 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5154 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5155 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5157 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5158 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5159 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5160 if (temp == const0_rtx)
5161 emit_move_insn (target, op1);
5162 else if (temp != const_true_rtx)
5164 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5165 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5168 emit_move_insn (target, op1);
5174 /* ??? Can optimize when the operand of this is a bitwise operation,
5175 by using a different bitwise operation. */
5177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5178 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5185 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5190 /* ??? Can optimize bitwise operations with one arg constant.
5191 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5192 and (a bitwise1 b) bitwise2 b (etc)
5193 but that is probably not worth while. */
5195 /* BIT_AND_EXPR is for bitwise anding.
5196 TRUTH_AND_EXPR is for anding two boolean values
5197 when we want in all cases to compute both of them.
5198 In general it is fastest to do TRUTH_AND_EXPR by
5199 computing both operands as actual zero-or-1 values
5200 and then bitwise anding. In cases where there cannot
5201 be any side effects, better code would be made by
5202 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5203 but the question is how to recognize those cases. */
5205 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5206 th operands. If so, don't use our target. */
5207 case TRUTH_AND_EXPR:
5208 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5211 this_optab = and_optab;
5214 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5216 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5219 this_optab = ior_optab;
5222 case TRUTH_XOR_EXPR:
5223 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5226 this_optab = xor_optab;
5233 preexpand_calls (exp);
5234 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5236 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5237 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5240 /* Could determine the answer when only additive constants differ.
5241 Also, the addition of one can be handled by changing the condition. */
5248 preexpand_calls (exp);
5249 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5252 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5253 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5255 && GET_CODE (original_target) == REG
5256 && (GET_MODE (original_target)
5257 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5259 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5260 if (temp != original_target)
5261 temp = copy_to_reg (temp);
5262 op1 = gen_label_rtx ();
5263 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5264 GET_MODE (temp), unsignedp, 0);
5265 emit_jump_insn (gen_beq (op1));
5266 emit_move_insn (temp, const1_rtx);
5270 /* If no set-flag instruction, must generate a conditional
5271 store into a temporary variable. Drop through
5272 and handle this like && and ||. */
5274 case TRUTH_ANDIF_EXPR:
5275 case TRUTH_ORIF_EXPR:
5277 && (target == 0 || ! safe_from_p (target, exp)
5278 /* Make sure we don't have a hard reg (such as function's return
5279 value) live across basic blocks, if not optimizing. */
5280 || (!optimize && GET_CODE (target) == REG
5281 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5282 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5285 emit_clr_insn (target);
5287 op1 = gen_label_rtx ();
5288 jumpifnot (exp, op1);
5291 emit_0_to_1_insn (target);
5294 return ignore ? const0_rtx : target;
5296 case TRUTH_NOT_EXPR:
5297 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5298 /* The parser is careful to generate TRUTH_NOT_EXPR
5299 only with operands that are always zero or one. */
5300 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5301 target, 1, OPTAB_LIB_WIDEN);
5307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5309 return expand_expr (TREE_OPERAND (exp, 1),
5310 (ignore ? const0_rtx : target),
5315 /* Note that COND_EXPRs whose type is a structure or union
5316 are required to be constructed to contain assignments of
5317 a temporary variable, so that we can evaluate them here
5318 for side effect only. If type is void, we must do likewise. */
5320 /* If an arm of the branch requires a cleanup,
5321 only that cleanup is performed. */
5324 tree binary_op = 0, unary_op = 0;
5325 tree old_cleanups = cleanups_this_call;
5326 cleanups_this_call = 0;
5328 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5329 convert it to our mode, if necessary. */
5330 if (integer_onep (TREE_OPERAND (exp, 1))
5331 && integer_zerop (TREE_OPERAND (exp, 2))
5332 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5336 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5341 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5342 if (GET_MODE (op0) == mode)
5345 target = gen_reg_rtx (mode);
5346 convert_move (target, op0, unsignedp);
5350 /* If we are not to produce a result, we have no target. Otherwise,
5351 if a target was specified use it; it will not be used as an
5352 intermediate target unless it is safe. If no target, use a
5357 else if (original_target
5358 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5359 temp = original_target;
5360 else if (mode == BLKmode)
5362 if (TYPE_SIZE (type) == 0
5363 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5366 temp = assign_stack_temp (BLKmode,
5367 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5368 + BITS_PER_UNIT - 1)
5369 / BITS_PER_UNIT, 0);
5370 MEM_IN_STRUCT_P (temp)
5371 = (TREE_CODE (type) == RECORD_TYPE
5372 || TREE_CODE (type) == UNION_TYPE
5373 || TREE_CODE (type) == QUAL_UNION_TYPE
5374 || TREE_CODE (type) == ARRAY_TYPE);
5377 temp = gen_reg_rtx (mode);
5379 /* Check for X ? A + B : A. If we have this, we can copy
5380 A to the output and conditionally add B. Similarly for unary
5381 operations. Don't do this if X has side-effects because
5382 those side effects might affect A or B and the "?" operation is
5383 a sequence point in ANSI. (We test for side effects later.) */
5385 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5386 && operand_equal_p (TREE_OPERAND (exp, 2),
5387 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5388 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5389 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5390 && operand_equal_p (TREE_OPERAND (exp, 1),
5391 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5392 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5393 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5394 && operand_equal_p (TREE_OPERAND (exp, 2),
5395 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5396 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5397 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5398 && operand_equal_p (TREE_OPERAND (exp, 1),
5399 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5400 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5402 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5403 operation, do this as A + (X != 0). Similarly for other simple
5404 binary operators. */
5405 if (temp && singleton && binary_op
5406 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5407 && (TREE_CODE (binary_op) == PLUS_EXPR
5408 || TREE_CODE (binary_op) == MINUS_EXPR
5409 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5410 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5411 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5412 && integer_onep (TREE_OPERAND (binary_op, 1))
5413 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5416 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5417 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5418 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5419 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5422 /* If we had X ? A : A + 1, do this as A + (X == 0).
5424 We have to invert the truth value here and then put it
5425 back later if do_store_flag fails. We cannot simply copy
5426 TREE_OPERAND (exp, 0) to another variable and modify that
5427 because invert_truthvalue can modify the tree pointed to
5429 if (singleton == TREE_OPERAND (exp, 1))
5430 TREE_OPERAND (exp, 0)
5431 = invert_truthvalue (TREE_OPERAND (exp, 0));
5433 result = do_store_flag (TREE_OPERAND (exp, 0),
5434 (safe_from_p (temp, singleton)
5436 mode, BRANCH_COST <= 1);
5440 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5441 return expand_binop (mode, boptab, op1, result, temp,
5442 unsignedp, OPTAB_LIB_WIDEN);
5444 else if (singleton == TREE_OPERAND (exp, 1))
5445 TREE_OPERAND (exp, 0)
5446 = invert_truthvalue (TREE_OPERAND (exp, 0));
5450 op0 = gen_label_rtx ();
5452 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5456 /* If the target conflicts with the other operand of the
5457 binary op, we can't use it. Also, we can't use the target
5458 if it is a hard register, because evaluating the condition
5459 might clobber it. */
5461 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5462 || (GET_CODE (temp) == REG
5463 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5464 temp = gen_reg_rtx (mode);
5465 store_expr (singleton, temp, 0);
5468 expand_expr (singleton,
5469 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5470 if (cleanups_this_call)
5472 sorry ("aggregate value in COND_EXPR");
5473 cleanups_this_call = 0;
5475 if (singleton == TREE_OPERAND (exp, 1))
5476 jumpif (TREE_OPERAND (exp, 0), op0);
5478 jumpifnot (TREE_OPERAND (exp, 0), op0);
5480 if (binary_op && temp == 0)
5481 /* Just touch the other operand. */
5482 expand_expr (TREE_OPERAND (binary_op, 1),
5483 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5485 store_expr (build (TREE_CODE (binary_op), type,
5486 make_tree (type, temp),
5487 TREE_OPERAND (binary_op, 1)),
5490 store_expr (build1 (TREE_CODE (unary_op), type,
5491 make_tree (type, temp)),
5496 /* This is now done in jump.c and is better done there because it
5497 produces shorter register lifetimes. */
5499 /* Check for both possibilities either constants or variables
5500 in registers (but not the same as the target!). If so, can
5501 save branches by assigning one, branching, and assigning the
5503 else if (temp && GET_MODE (temp) != BLKmode
5504 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5505 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5506 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5507 && DECL_RTL (TREE_OPERAND (exp, 1))
5508 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5509 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5510 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5511 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5512 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5513 && DECL_RTL (TREE_OPERAND (exp, 2))
5514 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5515 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5517 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5518 temp = gen_reg_rtx (mode);
5519 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5520 jumpifnot (TREE_OPERAND (exp, 0), op0);
5521 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5525 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5526 comparison operator. If we have one of these cases, set the
5527 output to A, branch on A (cse will merge these two references),
5528 then set the output to FOO. */
5530 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5531 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5532 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5533 TREE_OPERAND (exp, 1), 0)
5534 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5535 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5537 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5538 temp = gen_reg_rtx (mode);
5539 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5540 jumpif (TREE_OPERAND (exp, 0), op0);
5541 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5545 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5546 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5547 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5548 TREE_OPERAND (exp, 2), 0)
5549 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5550 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5552 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5553 temp = gen_reg_rtx (mode);
5554 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5555 jumpifnot (TREE_OPERAND (exp, 0), op0);
5556 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5561 op1 = gen_label_rtx ();
5562 jumpifnot (TREE_OPERAND (exp, 0), op0);
5564 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5566 expand_expr (TREE_OPERAND (exp, 1),
5567 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5568 if (cleanups_this_call)
5570 sorry ("aggregate value in COND_EXPR");
5571 cleanups_this_call = 0;
5575 emit_jump_insn (gen_jump (op1));
5579 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5581 expand_expr (TREE_OPERAND (exp, 2),
5582 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5585 if (cleanups_this_call)
5587 sorry ("aggregate value in COND_EXPR");
5588 cleanups_this_call = 0;
5594 cleanups_this_call = old_cleanups;
5600 /* Something needs to be initialized, but we didn't know
5601 where that thing was when building the tree. For example,
5602 it could be the return value of a function, or a parameter
5603 to a function which lays down in the stack, or a temporary
5604 variable which must be passed by reference.
5606 We guarantee that the expression will either be constructed
5607 or copied into our original target. */
5609 tree slot = TREE_OPERAND (exp, 0);
5612 if (TREE_CODE (slot) != VAR_DECL)
5617 if (DECL_RTL (slot) != 0)
5619 target = DECL_RTL (slot);
5620 /* If we have already expanded the slot, so don't do
5622 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5627 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5628 /* All temp slots at this level must not conflict. */
5629 preserve_temp_slots (target);
5630 DECL_RTL (slot) = target;
5633 /* We set IGNORE when we know that we're already
5634 doing this for a cleanup. */
5637 /* Since SLOT is not known to the called function
5638 to belong to its stack frame, we must build an explicit
5639 cleanup. This case occurs when we must build up a reference
5640 to pass the reference as an argument. In this case,
5641 it is very likely that such a reference need not be
5644 if (TREE_OPERAND (exp, 2) == 0)
5645 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5646 if (TREE_OPERAND (exp, 2))
5647 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5648 cleanups_this_call);
5653 /* This case does occur, when expanding a parameter which
5654 needs to be constructed on the stack. The target
5655 is the actual stack address that we want to initialize.
5656 The function we call will perform the cleanup in this case. */
5658 /* If we have already assigned it space, use that space,
5659 not target that we were passed in, as our target
5660 parameter is only a hint. */
5661 if (DECL_RTL (slot) != 0)
5663 target = DECL_RTL (slot);
5664 /* If we have already expanded the slot, so don't do
5666 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5670 DECL_RTL (slot) = target;
5673 exp1 = TREE_OPERAND (exp, 1);
5674 /* Mark it as expanded. */
5675 TREE_OPERAND (exp, 1) = NULL_TREE;
5677 return expand_expr (exp1, target, tmode, modifier);
5682 tree lhs = TREE_OPERAND (exp, 0);
5683 tree rhs = TREE_OPERAND (exp, 1);
5684 tree noncopied_parts = 0;
5685 tree lhs_type = TREE_TYPE (lhs);
5687 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5688 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5689 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5690 TYPE_NONCOPIED_PARTS (lhs_type));
5691 while (noncopied_parts != 0)
5693 expand_assignment (TREE_VALUE (noncopied_parts),
5694 TREE_PURPOSE (noncopied_parts), 0, 0);
5695 noncopied_parts = TREE_CHAIN (noncopied_parts);
5702 /* If lhs is complex, expand calls in rhs before computing it.
5703 That's so we don't compute a pointer and save it over a call.
5704 If lhs is simple, compute it first so we can give it as a
5705 target if the rhs is just a call. This avoids an extra temp and copy
5706 and that prevents a partial-subsumption which makes bad code.
5707 Actually we could treat component_ref's of vars like vars. */
5709 tree lhs = TREE_OPERAND (exp, 0);
5710 tree rhs = TREE_OPERAND (exp, 1);
5711 tree noncopied_parts = 0;
5712 tree lhs_type = TREE_TYPE (lhs);
5716 if (TREE_CODE (lhs) != VAR_DECL
5717 && TREE_CODE (lhs) != RESULT_DECL
5718 && TREE_CODE (lhs) != PARM_DECL)
5719 preexpand_calls (exp);
5721 /* Check for |= or &= of a bitfield of size one into another bitfield
5722 of size 1. In this case, (unless we need the result of the
5723 assignment) we can do this more efficiently with a
5724 test followed by an assignment, if necessary.
5726 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5727 things change so we do, this code should be enhanced to
5730 && TREE_CODE (lhs) == COMPONENT_REF
5731 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5732 || TREE_CODE (rhs) == BIT_AND_EXPR)
5733 && TREE_OPERAND (rhs, 0) == lhs
5734 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5735 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5736 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5738 rtx label = gen_label_rtx ();
5740 do_jump (TREE_OPERAND (rhs, 1),
5741 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5742 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5743 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5744 (TREE_CODE (rhs) == BIT_IOR_EXPR
5746 : integer_zero_node)),
5748 do_pending_stack_adjust ();
5753 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5754 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5755 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5756 TYPE_NONCOPIED_PARTS (lhs_type));
5758 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5759 while (noncopied_parts != 0)
5761 expand_assignment (TREE_PURPOSE (noncopied_parts),
5762 TREE_VALUE (noncopied_parts), 0, 0);
5763 noncopied_parts = TREE_CHAIN (noncopied_parts);
5768 case PREINCREMENT_EXPR:
5769 case PREDECREMENT_EXPR:
5770 return expand_increment (exp, 0);
5772 case POSTINCREMENT_EXPR:
5773 case POSTDECREMENT_EXPR:
5774 /* Faster to treat as pre-increment if result is not used. */
5775 return expand_increment (exp, ! ignore);
5778 /* Are we taking the address of a nested function? */
5779 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5780 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5782 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5783 op0 = force_operand (op0, target);
5787 /* We make sure to pass const0_rtx down if we came in with
5788 ignore set, to avoid doing the cleanups twice for something. */
5789 op0 = expand_expr (TREE_OPERAND (exp, 0),
5790 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5791 (modifier == EXPAND_INITIALIZER
5792 ? modifier : EXPAND_CONST_ADDRESS));
5794 /* We would like the object in memory. If it is a constant,
5795 we can have it be statically allocated into memory. For
5796 a non-constant (REG or SUBREG), we need to allocate some
5797 memory and store the value into it. */
5799 if (CONSTANT_P (op0))
5800 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5803 /* These cases happen in Fortran. Is that legitimate?
5804 Should Fortran work in another way?
5805 Do they happen in C? */
5806 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5807 || GET_CODE (op0) == CONCAT)
5809 /* If this object is in a register, it must be not
5811 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5812 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5814 = assign_stack_temp (inner_mode,
5815 int_size_in_bytes (inner_type), 1);
5817 emit_move_insn (memloc, op0);
5821 if (GET_CODE (op0) != MEM)
5824 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5825 return XEXP (op0, 0);
5826 op0 = force_operand (XEXP (op0, 0), target);
5828 if (flag_force_addr && GET_CODE (op0) != REG)
5829 return force_reg (Pmode, op0);
5832 case ENTRY_VALUE_EXPR:
5835 /* COMPLEX type for Extended Pascal & Fortran */
5838 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5842 /* Get the rtx code of the operands. */
5843 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5844 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5847 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5849 prev = get_last_insn ();
5851 /* Tell flow that the whole of the destination is being set. */
5852 if (GET_CODE (target) == REG)
5853 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5855 /* Move the real (op0) and imaginary (op1) parts to their location. */
5856 emit_move_insn (gen_realpart (mode, target), op0);
5857 emit_move_insn (gen_imagpart (mode, target), op1);
5859 /* Complex construction should appear as a single unit. */
5860 if (GET_CODE (target) != CONCAT)
5861 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5862 each with a separate pseudo as destination.
5863 It's not correct for flow to treat them as a unit. */
5870 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5871 return gen_realpart (mode, op0);
5874 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5875 return gen_imagpart (mode, op0);
5879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5883 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5886 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5888 prev = get_last_insn ();
5890 /* Tell flow that the whole of the destination is being set. */
5891 if (GET_CODE (target) == REG)
5892 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5894 /* Store the realpart and the negated imagpart to target. */
5895 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5897 imag_t = gen_imagpart (mode, target);
5898 temp = expand_unop (mode, neg_optab,
5899 gen_imagpart (mode, op0), imag_t, 0);
5901 emit_move_insn (imag_t, temp);
5903 /* Conjugate should appear as a single unit */
5904 if (GET_CODE (target) != CONCAT)
5905 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5906 each with a separate pseudo as destination.
5907 It's not correct for flow to treat them as a unit. */
5914 op0 = CONST0_RTX (tmode);
5920 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
5923 /* Here to do an ordinary binary operator, generating an instruction
5924 from the optab already placed in `this_optab'. */
5926 preexpand_calls (exp);
5927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5932 temp = expand_binop (mode, this_optab, op0, op1, target,
5933 unsignedp, OPTAB_LIB_WIDEN);
5940 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5942 bc_expand_expr (exp)
5945 enum tree_code code;
5948 struct binary_operator *binoptab;
5949 struct unary_operator *unoptab;
5950 struct increment_operator *incroptab;
5951 struct bc_label *lab, *lab1;
5952 enum bytecode_opcode opcode;
5955 code = TREE_CODE (exp);
5961 if (DECL_RTL (exp) == 0)
5963 error_with_decl (exp, "prior parameter's size depends on `%s'");
5967 bc_load_parmaddr (DECL_RTL (exp));
5968 bc_load_memory (TREE_TYPE (exp), exp);
5974 if (DECL_RTL (exp) == 0)
5978 if (BYTECODE_LABEL (DECL_RTL (exp)))
5979 bc_load_externaddr (DECL_RTL (exp));
5981 bc_load_localaddr (DECL_RTL (exp));
5983 if (TREE_PUBLIC (exp))
5984 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5985 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5987 bc_load_localaddr (DECL_RTL (exp));
5989 bc_load_memory (TREE_TYPE (exp), exp);
5994 #ifdef DEBUG_PRINT_CODE
5995 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5997 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
5999 : TYPE_MODE (TREE_TYPE (exp)))],
6000 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6006 #ifdef DEBUG_PRINT_CODE
6007 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6009 /* FIX THIS: find a better way to pass real_cst's. -bson */
6010 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6011 (double) TREE_REAL_CST (exp));
6020 /* We build a call description vector describing the type of
6021 the return value and of the arguments; this call vector,
6022 together with a pointer to a location for the return value
6023 and the base of the argument list, is passed to the low
6024 level machine dependent call subroutine, which is responsible
6025 for putting the arguments wherever real functions expect
6026 them, as well as getting the return value back. */
6028 tree calldesc = 0, arg;
6032 /* Push the evaluated args on the evaluation stack in reverse
6033 order. Also make an entry for each arg in the calldesc
6034 vector while we're at it. */
6036 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6038 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6041 bc_expand_expr (TREE_VALUE (arg));
6043 calldesc = tree_cons ((tree) 0,
6044 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6046 calldesc = tree_cons ((tree) 0,
6047 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6051 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6053 /* Allocate a location for the return value and push its
6054 address on the evaluation stack. Also make an entry
6055 at the front of the calldesc for the return value type. */
6057 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6058 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6059 bc_load_localaddr (retval);
6061 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6062 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6064 /* Prepend the argument count. */
6065 calldesc = tree_cons ((tree) 0,
6066 build_int_2 (nargs, 0),
6069 /* Push the address of the call description vector on the stack. */
6070 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6071 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6072 build_index_type (build_int_2 (nargs * 2, 0)));
6073 r = output_constant_def (calldesc);
6074 bc_load_externaddr (r);
6076 /* Push the address of the function to be called. */
6077 bc_expand_expr (TREE_OPERAND (exp, 0));
6079 /* Call the function, popping its address and the calldesc vector
6080 address off the evaluation stack in the process. */
6081 bc_emit_instruction (call);
6083 /* Pop the arguments off the stack. */
6084 bc_adjust_stack (nargs);
6086 /* Load the return value onto the stack. */
6087 bc_load_localaddr (retval);
6088 bc_load_memory (type, TREE_OPERAND (exp, 0));
6094 if (!SAVE_EXPR_RTL (exp))
6096 /* First time around: copy to local variable */
6097 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6098 TYPE_ALIGN (TREE_TYPE(exp)));
6099 bc_expand_expr (TREE_OPERAND (exp, 0));
6100 bc_emit_instruction (duplicate);
6102 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6103 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6107 /* Consecutive reference: use saved copy */
6108 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6109 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6114 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6115 how are they handled instead? */
6118 TREE_USED (exp) = 1;
6119 bc_expand_expr (STMT_BODY (exp));
6126 bc_expand_expr (TREE_OPERAND (exp, 0));
6127 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6132 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6137 bc_expand_address (TREE_OPERAND (exp, 0));
6142 bc_expand_expr (TREE_OPERAND (exp, 0));
6143 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6148 bc_expand_expr (bc_canonicalize_array_ref (exp));
6153 bc_expand_component_address (exp);
6155 /* If we have a bitfield, generate a proper load */
6156 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6161 bc_expand_expr (TREE_OPERAND (exp, 0));
6162 bc_emit_instruction (drop);
6163 bc_expand_expr (TREE_OPERAND (exp, 1));
6168 bc_expand_expr (TREE_OPERAND (exp, 0));
6169 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6170 lab = bc_get_bytecode_label ();
6171 bc_emit_bytecode (xjumpifnot);
6172 bc_emit_bytecode_labelref (lab);
6174 #ifdef DEBUG_PRINT_CODE
6175 fputc ('\n', stderr);
6177 bc_expand_expr (TREE_OPERAND (exp, 1));
6178 lab1 = bc_get_bytecode_label ();
6179 bc_emit_bytecode (jump);
6180 bc_emit_bytecode_labelref (lab1);
6182 #ifdef DEBUG_PRINT_CODE
6183 fputc ('\n', stderr);
6186 bc_emit_bytecode_labeldef (lab);
6187 bc_expand_expr (TREE_OPERAND (exp, 2));
6188 bc_emit_bytecode_labeldef (lab1);
6191 case TRUTH_ANDIF_EXPR:
6193 opcode = xjumpifnot;
6196 case TRUTH_ORIF_EXPR:
6203 binoptab = optab_plus_expr;
6208 binoptab = optab_minus_expr;
6213 binoptab = optab_mult_expr;
6216 case TRUNC_DIV_EXPR:
6217 case FLOOR_DIV_EXPR:
6219 case ROUND_DIV_EXPR:
6220 case EXACT_DIV_EXPR:
6222 binoptab = optab_trunc_div_expr;
6225 case TRUNC_MOD_EXPR:
6226 case FLOOR_MOD_EXPR:
6228 case ROUND_MOD_EXPR:
6230 binoptab = optab_trunc_mod_expr;
6233 case FIX_ROUND_EXPR:
6234 case FIX_FLOOR_EXPR:
6236 abort (); /* Not used for C. */
6238 case FIX_TRUNC_EXPR:
6245 abort (); /* FIXME */
6249 binoptab = optab_rdiv_expr;
6254 binoptab = optab_bit_and_expr;
6259 binoptab = optab_bit_ior_expr;
6264 binoptab = optab_bit_xor_expr;
6269 binoptab = optab_lshift_expr;
6274 binoptab = optab_rshift_expr;
6277 case TRUTH_AND_EXPR:
6279 binoptab = optab_truth_and_expr;
6284 binoptab = optab_truth_or_expr;
6289 binoptab = optab_lt_expr;
6294 binoptab = optab_le_expr;
6299 binoptab = optab_ge_expr;
6304 binoptab = optab_gt_expr;
6309 binoptab = optab_eq_expr;
6314 binoptab = optab_ne_expr;
6319 unoptab = optab_negate_expr;
6324 unoptab = optab_bit_not_expr;
6327 case TRUTH_NOT_EXPR:
6329 unoptab = optab_truth_not_expr;
6332 case PREDECREMENT_EXPR:
6334 incroptab = optab_predecrement_expr;
6337 case PREINCREMENT_EXPR:
6339 incroptab = optab_preincrement_expr;
6342 case POSTDECREMENT_EXPR:
6344 incroptab = optab_postdecrement_expr;
6347 case POSTINCREMENT_EXPR:
6349 incroptab = optab_postincrement_expr;
6354 bc_expand_constructor (exp);
6364 tree vars = TREE_OPERAND (exp, 0);
6365 int vars_need_expansion = 0;
6367 /* Need to open a binding contour here because
6368 if there are any cleanups they most be contained here. */
6369 expand_start_bindings (0);
6371 /* Mark the corresponding BLOCK for output. */
6372 if (TREE_OPERAND (exp, 2) != 0)
6373 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6375 /* If VARS have not yet been expanded, expand them now. */
6378 if (DECL_RTL (vars) == 0)
6380 vars_need_expansion = 1;
6381 bc_expand_decl (vars, 0);
6383 bc_expand_decl_init (vars);
6384 vars = TREE_CHAIN (vars);
6387 bc_expand_expr (TREE_OPERAND (exp, 1));
6389 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6399 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6400 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6406 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6412 bc_expand_expr (TREE_OPERAND (exp, 0));
6413 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6414 lab = bc_get_bytecode_label ();
6416 bc_emit_instruction (duplicate);
6417 bc_emit_bytecode (opcode);
6418 bc_emit_bytecode_labelref (lab);
6420 #ifdef DEBUG_PRINT_CODE
6421 fputc ('\n', stderr);
6424 bc_emit_instruction (drop);
6426 bc_expand_expr (TREE_OPERAND (exp, 1));
6427 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6428 bc_emit_bytecode_labeldef (lab);
6434 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6436 /* Push the quantum. */
6437 bc_expand_expr (TREE_OPERAND (exp, 1));
6439 /* Convert it to the lvalue's type. */
6440 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6442 /* Push the address of the lvalue */
6443 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6445 /* Perform actual increment */
6446 bc_expand_increment (incroptab, type);
6450 /* Return the alignment in bits of EXP, a pointer valued expression.
6451 But don't return more than MAX_ALIGN no matter what.
6452 The alignment returned is, by default, the alignment of the thing that
6453 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6455 Otherwise, look at the expression to see if we can do better, i.e., if the
6456 expression is actually pointing at an object whose alignment is tighter. */
6459 get_pointer_alignment (exp, max_align)
6463 unsigned align, inner;
6465 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6468 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6469 align = MIN (align, max_align);
6473 switch (TREE_CODE (exp))
6477 case NON_LVALUE_EXPR:
6478 exp = TREE_OPERAND (exp, 0);
6479 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6481 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6482 inner = MIN (inner, max_align);
6483 align = MAX (align, inner);
6487 /* If sum of pointer + int, restrict our maximum alignment to that
6488 imposed by the integer. If not, we can't do any better than
6490 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6493 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6498 exp = TREE_OPERAND (exp, 0);
6502 /* See what we are pointing at and look at its alignment. */
6503 exp = TREE_OPERAND (exp, 0);
6504 if (TREE_CODE (exp) == FUNCTION_DECL)
6505 align = MAX (align, FUNCTION_BOUNDARY);
6506 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6507 align = MAX (align, DECL_ALIGN (exp));
6508 #ifdef CONSTANT_ALIGNMENT
6509 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6510 align = CONSTANT_ALIGNMENT (exp, align);
6512 return MIN (align, max_align);
6520 /* Return the tree node and offset if a given argument corresponds to
6521 a string constant. */
6524 string_constant (arg, ptr_offset)
6530 if (TREE_CODE (arg) == ADDR_EXPR
6531 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6533 *ptr_offset = integer_zero_node;
6534 return TREE_OPERAND (arg, 0);
6536 else if (TREE_CODE (arg) == PLUS_EXPR)
6538 tree arg0 = TREE_OPERAND (arg, 0);
6539 tree arg1 = TREE_OPERAND (arg, 1);
6544 if (TREE_CODE (arg0) == ADDR_EXPR
6545 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6548 return TREE_OPERAND (arg0, 0);
6550 else if (TREE_CODE (arg1) == ADDR_EXPR
6551 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6554 return TREE_OPERAND (arg1, 0);
6561 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6562 way, because it could contain a zero byte in the middle.
6563 TREE_STRING_LENGTH is the size of the character array, not the string.
6565 Unfortunately, string_constant can't access the values of const char
6566 arrays with initializers, so neither can we do so here. */
6576 src = string_constant (src, &offset_node);
6579 max = TREE_STRING_LENGTH (src);
6580 ptr = TREE_STRING_POINTER (src);
6581 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6583 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6584 compute the offset to the following null if we don't know where to
6585 start searching for it. */
6587 for (i = 0; i < max; i++)
6590 /* We don't know the starting offset, but we do know that the string
6591 has no internal zero bytes. We can assume that the offset falls
6592 within the bounds of the string; otherwise, the programmer deserves
6593 what he gets. Subtract the offset from the length of the string,
6595 /* This would perhaps not be valid if we were dealing with named
6596 arrays in addition to literal string constants. */
6597 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6600 /* We have a known offset into the string. Start searching there for
6601 a null character. */
6602 if (offset_node == 0)
6606 /* Did we get a long long offset? If so, punt. */
6607 if (TREE_INT_CST_HIGH (offset_node) != 0)
6609 offset = TREE_INT_CST_LOW (offset_node);
6611 /* If the offset is known to be out of bounds, warn, and call strlen at
6613 if (offset < 0 || offset > max)
6615 warning ("offset outside bounds of constant string");
6618 /* Use strlen to search for the first zero byte. Since any strings
6619 constructed with build_string will have nulls appended, we win even
6620 if we get handed something like (char[4])"abcd".
6622 Since OFFSET is our starting index into the string, no further
6623 calculation is needed. */
6624 return size_int (strlen (ptr + offset));
6627 /* Expand an expression EXP that calls a built-in function,
6628 with result going to TARGET if that's convenient
6629 (and in mode MODE if that's convenient).
6630 SUBTARGET may be used as the target for computing one of EXP's operands.
6631 IGNORE is nonzero if the value is to be ignored. */
6634 expand_builtin (exp, target, subtarget, mode, ignore)
6638 enum machine_mode mode;
6641 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6642 tree arglist = TREE_OPERAND (exp, 1);
6645 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6646 optab builtin_optab;
6648 switch (DECL_FUNCTION_CODE (fndecl))
6653 /* build_function_call changes these into ABS_EXPR. */
6658 case BUILT_IN_FSQRT:
6659 /* If not optimizing, call the library function. */
6664 /* Arg could be wrong type if user redeclared this fcn wrong. */
6665 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6668 /* Stabilize and compute the argument. */
6669 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6670 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6672 exp = copy_node (exp);
6673 arglist = copy_node (arglist);
6674 TREE_OPERAND (exp, 1) = arglist;
6675 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6677 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6679 /* Make a suitable register to place result in. */
6680 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6685 switch (DECL_FUNCTION_CODE (fndecl))
6688 builtin_optab = sin_optab; break;
6690 builtin_optab = cos_optab; break;
6691 case BUILT_IN_FSQRT:
6692 builtin_optab = sqrt_optab; break;
6697 /* Compute into TARGET.
6698 Set TARGET to wherever the result comes back. */
6699 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6700 builtin_optab, op0, target, 0);
6702 /* If we were unable to expand via the builtin, stop the
6703 sequence (without outputting the insns) and break, causing
6704 a call the the library function. */
6711 /* Check the results by default. But if flag_fast_math is turned on,
6712 then assume sqrt will always be called with valid arguments. */
6714 if (! flag_fast_math)
6716 /* Don't define the builtin FP instructions
6717 if your machine is not IEEE. */
6718 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6721 lab1 = gen_label_rtx ();
6723 /* Test the result; if it is NaN, set errno=EDOM because
6724 the argument was not in the domain. */
6725 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6726 emit_jump_insn (gen_beq (lab1));
6730 #ifdef GEN_ERRNO_RTX
6731 rtx errno_rtx = GEN_ERRNO_RTX;
6734 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6737 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6740 /* We can't set errno=EDOM directly; let the library call do it.
6741 Pop the arguments right away in case the call gets deleted. */
6743 expand_call (exp, target, 0);
6750 /* Output the entire sequence. */
6751 insns = get_insns ();
6757 /* __builtin_apply_args returns block of memory allocated on
6758 the stack into which is stored the arg pointer, structure
6759 value address, static chain, and all the registers that might
6760 possibly be used in performing a function call. The code is
6761 moved to the start of the function so the incoming values are
6763 case BUILT_IN_APPLY_ARGS:
6764 /* Don't do __builtin_apply_args more than once in a function.
6765 Save the result of the first call and reuse it. */
6766 if (apply_args_value != 0)
6767 return apply_args_value;
6769 /* When this function is called, it means that registers must be
6770 saved on entry to this function. So we migrate the
6771 call to the first insn of this function. */
6776 temp = expand_builtin_apply_args ();
6780 apply_args_value = temp;
6782 /* Put the sequence after the NOTE that starts the function.
6783 If this is inside a SEQUENCE, make the outer-level insn
6784 chain current, so the code is placed at the start of the
6786 push_topmost_sequence ();
6787 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6788 pop_topmost_sequence ();
6792 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6793 FUNCTION with a copy of the parameters described by
6794 ARGUMENTS, and ARGSIZE. It returns a block of memory
6795 allocated on the stack into which is stored all the registers
6796 that might possibly be used for returning the result of a
6797 function. ARGUMENTS is the value returned by
6798 __builtin_apply_args. ARGSIZE is the number of bytes of
6799 arguments that must be copied. ??? How should this value be
6800 computed? We'll also need a safe worst case value for varargs
6802 case BUILT_IN_APPLY:
6804 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6805 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6806 || TREE_CHAIN (arglist) == 0
6807 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6808 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6809 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6817 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6818 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6820 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6823 /* __builtin_return (RESULT) causes the function to return the
6824 value described by RESULT. RESULT is address of the block of
6825 memory returned by __builtin_apply. */
6826 case BUILT_IN_RETURN:
6828 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6829 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6830 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6831 NULL_RTX, VOIDmode, 0));
6834 case BUILT_IN_SAVEREGS:
6835 /* Don't do __builtin_saveregs more than once in a function.
6836 Save the result of the first call and reuse it. */
6837 if (saveregs_value != 0)
6838 return saveregs_value;
6840 /* When this function is called, it means that registers must be
6841 saved on entry to this function. So we migrate the
6842 call to the first insn of this function. */
6845 rtx valreg, saved_valreg;
6847 /* Now really call the function. `expand_call' does not call
6848 expand_builtin, so there is no danger of infinite recursion here. */
6851 #ifdef EXPAND_BUILTIN_SAVEREGS
6852 /* Do whatever the machine needs done in this case. */
6853 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6855 /* The register where the function returns its value
6856 is likely to have something else in it, such as an argument.
6857 So preserve that register around the call. */
6858 if (value_mode != VOIDmode)
6860 valreg = hard_libcall_value (value_mode);
6861 saved_valreg = gen_reg_rtx (value_mode);
6862 emit_move_insn (saved_valreg, valreg);
6865 /* Generate the call, putting the value in a pseudo. */
6866 temp = expand_call (exp, target, ignore);
6868 if (value_mode != VOIDmode)
6869 emit_move_insn (valreg, saved_valreg);
6875 saveregs_value = temp;
6877 /* Put the sequence after the NOTE that starts the function.
6878 If this is inside a SEQUENCE, make the outer-level insn
6879 chain current, so the code is placed at the start of the
6881 push_topmost_sequence ();
6882 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6883 pop_topmost_sequence ();
6887 /* __builtin_args_info (N) returns word N of the arg space info
6888 for the current function. The number and meanings of words
6889 is controlled by the definition of CUMULATIVE_ARGS. */
6890 case BUILT_IN_ARGS_INFO:
6892 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6894 int *word_ptr = (int *) ¤t_function_args_info;
6895 tree type, elts, result;
6897 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6898 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6899 __FILE__, __LINE__);
6903 tree arg = TREE_VALUE (arglist);
6904 if (TREE_CODE (arg) != INTEGER_CST)
6905 error ("argument of `__builtin_args_info' must be constant");
6908 int wordnum = TREE_INT_CST_LOW (arg);
6910 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6911 error ("argument of `__builtin_args_info' out of range");
6913 return GEN_INT (word_ptr[wordnum]);
6917 error ("missing argument in `__builtin_args_info'");
6922 for (i = 0; i < nwords; i++)
6923 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6925 type = build_array_type (integer_type_node,
6926 build_index_type (build_int_2 (nwords, 0)));
6927 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6928 TREE_CONSTANT (result) = 1;
6929 TREE_STATIC (result) = 1;
6930 result = build (INDIRECT_REF, build_pointer_type (type), result);
6931 TREE_CONSTANT (result) = 1;
6932 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6936 /* Return the address of the first anonymous stack arg. */
6937 case BUILT_IN_NEXT_ARG:
6939 tree fntype = TREE_TYPE (current_function_decl);
6940 if (!(TYPE_ARG_TYPES (fntype) != 0
6941 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6942 != void_type_node)))
6944 error ("`va_start' used in function with fixed args");
6949 return expand_binop (Pmode, add_optab,
6950 current_function_internal_arg_pointer,
6951 current_function_arg_offset_rtx,
6952 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6954 case BUILT_IN_CLASSIFY_TYPE:
6957 tree type = TREE_TYPE (TREE_VALUE (arglist));
6958 enum tree_code code = TREE_CODE (type);
6959 if (code == VOID_TYPE)
6960 return GEN_INT (void_type_class);
6961 if (code == INTEGER_TYPE)
6962 return GEN_INT (integer_type_class);
6963 if (code == CHAR_TYPE)
6964 return GEN_INT (char_type_class);
6965 if (code == ENUMERAL_TYPE)
6966 return GEN_INT (enumeral_type_class);
6967 if (code == BOOLEAN_TYPE)
6968 return GEN_INT (boolean_type_class);
6969 if (code == POINTER_TYPE)
6970 return GEN_INT (pointer_type_class);
6971 if (code == REFERENCE_TYPE)
6972 return GEN_INT (reference_type_class);
6973 if (code == OFFSET_TYPE)
6974 return GEN_INT (offset_type_class);
6975 if (code == REAL_TYPE)
6976 return GEN_INT (real_type_class);
6977 if (code == COMPLEX_TYPE)
6978 return GEN_INT (complex_type_class);
6979 if (code == FUNCTION_TYPE)
6980 return GEN_INT (function_type_class);
6981 if (code == METHOD_TYPE)
6982 return GEN_INT (method_type_class);
6983 if (code == RECORD_TYPE)
6984 return GEN_INT (record_type_class);
6985 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6986 return GEN_INT (union_type_class);
6987 if (code == ARRAY_TYPE)
6988 return GEN_INT (array_type_class);
6989 if (code == STRING_TYPE)
6990 return GEN_INT (string_type_class);
6991 if (code == SET_TYPE)
6992 return GEN_INT (set_type_class);
6993 if (code == FILE_TYPE)
6994 return GEN_INT (file_type_class);
6995 if (code == LANG_TYPE)
6996 return GEN_INT (lang_type_class);
6998 return GEN_INT (no_type_class);
7000 case BUILT_IN_CONSTANT_P:
7004 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7005 ? const1_rtx : const0_rtx);
7007 case BUILT_IN_FRAME_ADDRESS:
7008 /* The argument must be a nonnegative integer constant.
7009 It counts the number of frames to scan up the stack.
7010 The value is the address of that frame. */
7011 case BUILT_IN_RETURN_ADDRESS:
7012 /* The argument must be a nonnegative integer constant.
7013 It counts the number of frames to scan up the stack.
7014 The value is the return address saved in that frame. */
7016 /* Warning about missing arg was already issued. */
7018 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7020 error ("invalid arg to `__builtin_return_address'");
7023 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
7025 error ("invalid arg to `__builtin_return_address'");
7030 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7031 rtx tem = frame_pointer_rtx;
7034 /* Some machines need special handling before we can access arbitrary
7035 frames. For example, on the sparc, we must first flush all
7036 register windows to the stack. */
7037 #ifdef SETUP_FRAME_ADDRESSES
7038 SETUP_FRAME_ADDRESSES ();
7041 /* On the sparc, the return address is not in the frame, it is
7042 in a register. There is no way to access it off of the current
7043 frame pointer, but it can be accessed off the previous frame
7044 pointer by reading the value from the register window save
7046 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7047 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7051 /* Scan back COUNT frames to the specified frame. */
7052 for (i = 0; i < count; i++)
7054 /* Assume the dynamic chain pointer is in the word that
7055 the frame address points to, unless otherwise specified. */
7056 #ifdef DYNAMIC_CHAIN_ADDRESS
7057 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7059 tem = memory_address (Pmode, tem);
7060 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7063 /* For __builtin_frame_address, return what we've got. */
7064 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7067 /* For __builtin_return_address,
7068 Get the return address from that frame. */
7069 #ifdef RETURN_ADDR_RTX
7070 return RETURN_ADDR_RTX (count, tem);
7072 tem = memory_address (Pmode,
7073 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7074 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7078 case BUILT_IN_ALLOCA:
7080 /* Arg could be non-integer if user redeclared this fcn wrong. */
7081 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7083 current_function_calls_alloca = 1;
7084 /* Compute the argument. */
7085 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7087 /* Allocate the desired space. */
7088 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7090 /* Record the new stack level for nonlocal gotos. */
7091 if (nonlocal_goto_handler_slot != 0)
7092 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7096 /* If not optimizing, call the library function. */
7101 /* Arg could be non-integer if user redeclared this fcn wrong. */
7102 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7105 /* Compute the argument. */
7106 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7107 /* Compute ffs, into TARGET if possible.
7108 Set TARGET to wherever the result comes back. */
7109 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7110 ffs_optab, op0, target, 1);
7115 case BUILT_IN_STRLEN:
7116 /* If not optimizing, call the library function. */
7121 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7122 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7126 tree src = TREE_VALUE (arglist);
7127 tree len = c_strlen (src);
7130 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7132 rtx result, src_rtx, char_rtx;
7133 enum machine_mode insn_mode = value_mode, char_mode;
7134 enum insn_code icode;
7136 /* If the length is known, just return it. */
7138 return expand_expr (len, target, mode, 0);
7140 /* If SRC is not a pointer type, don't do this operation inline. */
7144 /* Call a function if we can't compute strlen in the right mode. */
7146 while (insn_mode != VOIDmode)
7148 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7149 if (icode != CODE_FOR_nothing)
7152 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7154 if (insn_mode == VOIDmode)
7157 /* Make a place to write the result of the instruction. */
7160 && GET_CODE (result) == REG
7161 && GET_MODE (result) == insn_mode
7162 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7163 result = gen_reg_rtx (insn_mode);
7165 /* Make sure the operands are acceptable to the predicates. */
7167 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7168 result = gen_reg_rtx (insn_mode);
7170 src_rtx = memory_address (BLKmode,
7171 expand_expr (src, NULL_RTX, Pmode,
7173 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7174 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7176 char_rtx = const0_rtx;
7177 char_mode = insn_operand_mode[(int)icode][2];
7178 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7179 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7181 emit_insn (GEN_FCN (icode) (result,
7182 gen_rtx (MEM, BLKmode, src_rtx),
7183 char_rtx, GEN_INT (align)));
7185 /* Return the value in the proper mode for this function. */
7186 if (GET_MODE (result) == value_mode)
7188 else if (target != 0)
7190 convert_move (target, result, 0);
7194 return convert_to_mode (value_mode, result, 0);
7197 case BUILT_IN_STRCPY:
7198 /* If not optimizing, call the library function. */
7203 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7204 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7205 || TREE_CHAIN (arglist) == 0
7206 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7210 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7215 len = size_binop (PLUS_EXPR, len, integer_one_node);
7217 chainon (arglist, build_tree_list (NULL_TREE, len));
7221 case BUILT_IN_MEMCPY:
7222 /* If not optimizing, call the library function. */
7227 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7228 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7229 || TREE_CHAIN (arglist) == 0
7230 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7231 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7232 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7236 tree dest = TREE_VALUE (arglist);
7237 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7238 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7241 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7243 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7244 rtx dest_rtx, dest_mem, src_mem;
7246 /* If either SRC or DEST is not a pointer type, don't do
7247 this operation in-line. */
7248 if (src_align == 0 || dest_align == 0)
7250 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7251 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7255 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7256 dest_mem = gen_rtx (MEM, BLKmode,
7257 memory_address (BLKmode, dest_rtx));
7258 src_mem = gen_rtx (MEM, BLKmode,
7259 memory_address (BLKmode,
7260 expand_expr (src, NULL_RTX,
7264 /* Copy word part most expediently. */
7265 emit_block_move (dest_mem, src_mem,
7266 expand_expr (len, NULL_RTX, VOIDmode, 0),
7267 MIN (src_align, dest_align));
7271 /* These comparison functions need an instruction that returns an actual
7272 index. An ordinary compare that just sets the condition codes
7274 #ifdef HAVE_cmpstrsi
7275 case BUILT_IN_STRCMP:
7276 /* If not optimizing, call the library function. */
7281 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7282 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7283 || TREE_CHAIN (arglist) == 0
7284 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7286 else if (!HAVE_cmpstrsi)
7289 tree arg1 = TREE_VALUE (arglist);
7290 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7294 len = c_strlen (arg1);
7296 len = size_binop (PLUS_EXPR, integer_one_node, len);
7297 len2 = c_strlen (arg2);
7299 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7301 /* If we don't have a constant length for the first, use the length
7302 of the second, if we know it. We don't require a constant for
7303 this case; some cost analysis could be done if both are available
7304 but neither is constant. For now, assume they're equally cheap.
7306 If both strings have constant lengths, use the smaller. This
7307 could arise if optimization results in strcpy being called with
7308 two fixed strings, or if the code was machine-generated. We should
7309 add some code to the `memcmp' handler below to deal with such
7310 situations, someday. */
7311 if (!len || TREE_CODE (len) != INTEGER_CST)
7318 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7320 if (tree_int_cst_lt (len2, len))
7324 chainon (arglist, build_tree_list (NULL_TREE, len));
7328 case BUILT_IN_MEMCMP:
7329 /* If not optimizing, call the library function. */
7334 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7335 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7336 || TREE_CHAIN (arglist) == 0
7337 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7338 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7339 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7341 else if (!HAVE_cmpstrsi)
7344 tree arg1 = TREE_VALUE (arglist);
7345 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7346 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7350 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7352 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7353 enum machine_mode insn_mode
7354 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7356 /* If we don't have POINTER_TYPE, call the function. */
7357 if (arg1_align == 0 || arg2_align == 0)
7359 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7360 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7364 /* Make a place to write the result of the instruction. */
7367 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7368 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7369 result = gen_reg_rtx (insn_mode);
7371 emit_insn (gen_cmpstrsi (result,
7372 gen_rtx (MEM, BLKmode,
7373 expand_expr (arg1, NULL_RTX, Pmode,
7375 gen_rtx (MEM, BLKmode,
7376 expand_expr (arg2, NULL_RTX, Pmode,
7378 expand_expr (len, NULL_RTX, VOIDmode, 0),
7379 GEN_INT (MIN (arg1_align, arg2_align))));
7381 /* Return the value in the proper mode for this function. */
7382 mode = TYPE_MODE (TREE_TYPE (exp));
7383 if (GET_MODE (result) == mode)
7385 else if (target != 0)
7387 convert_move (target, result, 0);
7391 return convert_to_mode (mode, result, 0);
7394 case BUILT_IN_STRCMP:
7395 case BUILT_IN_MEMCMP:
7399 default: /* just do library call, if unknown builtin */
7400 error ("built-in function `%s' not currently supported",
7401 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7404 /* The switch statement above can drop through to cause the function
7405 to be called normally. */
7407 return expand_call (exp, target, ignore);
7410 /* Built-in functions to perform an untyped call and return. */
7412 /* For each register that may be used for calling a function, this
7413 gives a mode used to copy the register's value. VOIDmode indicates
7414 the register is not used for calling a function. If the machine
7415 has register windows, this gives only the outbound registers.
7416 INCOMING_REGNO gives the corresponding inbound register. */
7417 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7419 /* For each register that may be used for returning values, this gives
7420 a mode used to copy the register's value. VOIDmode indicates the
7421 register is not used for returning values. If the machine has
7422 register windows, this gives only the outbound registers.
7423 INCOMING_REGNO gives the corresponding inbound register. */
7424 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7426 /* For each register that may be used for calling a function, this
7427 gives the offset of that register into the block returned by
7428 __bultin_apply_args. 0 indicates that the register is not
7429 used for calling a function. */
7430 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7432 /* Return the offset of register REGNO into the block returned by
7433 __builtin_apply_args. This is not declared static, since it is
7434 needed in objc-act.c. */
7437 apply_args_register_offset (regno)
7442 /* Arguments are always put in outgoing registers (in the argument
7443 block) if such make sense. */
7444 #ifdef OUTGOING_REGNO
7445 regno = OUTGOING_REGNO(regno);
7447 return apply_args_reg_offset[regno];
7450 /* Return the size required for the block returned by __builtin_apply_args,
7451 and initialize apply_args_mode. */
7456 static int size = -1;
7458 enum machine_mode mode;
7460 /* The values computed by this function never change. */
7463 /* The first value is the incoming arg-pointer. */
7464 size = GET_MODE_SIZE (Pmode);
7466 /* The second value is the structure value address unless this is
7467 passed as an "invisible" first argument. */
7468 if (struct_value_rtx)
7469 size += GET_MODE_SIZE (Pmode);
7471 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7472 if (FUNCTION_ARG_REGNO_P (regno))
7474 /* Search for the proper mode for copying this register's
7475 value. I'm not sure this is right, but it works so far. */
7476 enum machine_mode best_mode = VOIDmode;
7478 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7480 mode = GET_MODE_WIDER_MODE (mode))
7481 if (HARD_REGNO_MODE_OK (regno, mode)
7482 && HARD_REGNO_NREGS (regno, mode) == 1)
7485 if (best_mode == VOIDmode)
7486 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7488 mode = GET_MODE_WIDER_MODE (mode))
7489 if (HARD_REGNO_MODE_OK (regno, mode)
7490 && (mov_optab->handlers[(int) mode].insn_code
7491 != CODE_FOR_nothing))
7495 if (mode == VOIDmode)
7498 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7499 if (size % align != 0)
7500 size = CEIL (size, align) * align;
7501 apply_args_reg_offset[regno] = size;
7502 size += GET_MODE_SIZE (mode);
7503 apply_args_mode[regno] = mode;
7507 apply_args_mode[regno] = VOIDmode;
7508 apply_args_reg_offset[regno] = 0;
7514 /* Return the size required for the block returned by __builtin_apply,
7515 and initialize apply_result_mode. */
7518 apply_result_size ()
7520 static int size = -1;
7522 enum machine_mode mode;
7524 /* The values computed by this function never change. */
7529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7530 if (FUNCTION_VALUE_REGNO_P (regno))
7532 /* Search for the proper mode for copying this register's
7533 value. I'm not sure this is right, but it works so far. */
7534 enum machine_mode best_mode = VOIDmode;
7536 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7538 mode = GET_MODE_WIDER_MODE (mode))
7539 if (HARD_REGNO_MODE_OK (regno, mode))
7542 if (best_mode == VOIDmode)
7543 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7545 mode = GET_MODE_WIDER_MODE (mode))
7546 if (HARD_REGNO_MODE_OK (regno, mode)
7547 && (mov_optab->handlers[(int) mode].insn_code
7548 != CODE_FOR_nothing))
7552 if (mode == VOIDmode)
7555 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7556 if (size % align != 0)
7557 size = CEIL (size, align) * align;
7558 size += GET_MODE_SIZE (mode);
7559 apply_result_mode[regno] = mode;
7562 apply_result_mode[regno] = VOIDmode;
7564 /* Allow targets that use untyped_call and untyped_return to override
7565 the size so that machine-specific information can be stored here. */
7566 #ifdef APPLY_RESULT_SIZE
7567 size = APPLY_RESULT_SIZE;
7573 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7574 /* Create a vector describing the result block RESULT. If SAVEP is true,
7575 the result block is used to save the values; otherwise it is used to
7576 restore the values. */
7579 result_vector (savep, result)
7583 int regno, size, align, nelts;
7584 enum machine_mode mode;
7586 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7589 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7590 if ((mode = apply_result_mode[regno]) != VOIDmode)
7592 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7593 if (size % align != 0)
7594 size = CEIL (size, align) * align;
7595 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7596 mem = change_address (result, mode,
7597 plus_constant (XEXP (result, 0), size));
7598 savevec[nelts++] = (savep
7599 ? gen_rtx (SET, VOIDmode, mem, reg)
7600 : gen_rtx (SET, VOIDmode, reg, mem));
7601 size += GET_MODE_SIZE (mode);
7603 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7605 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7607 /* Save the state required to perform an untyped call with the same
7608 arguments as were passed to the current function. */
7611 expand_builtin_apply_args ()
7614 int size, align, regno;
7615 enum machine_mode mode;
7617 /* Create a block where the arg-pointer, structure value address,
7618 and argument registers can be saved. */
7619 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7621 /* Walk past the arg-pointer and structure value address. */
7622 size = GET_MODE_SIZE (Pmode);
7623 if (struct_value_rtx)
7624 size += GET_MODE_SIZE (Pmode);
7626 /* Save each register used in calling a function to the block. */
7627 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7628 if ((mode = apply_args_mode[regno]) != VOIDmode)
7630 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7631 if (size % align != 0)
7632 size = CEIL (size, align) * align;
7633 emit_move_insn (change_address (registers, mode,
7634 plus_constant (XEXP (registers, 0),
7636 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7637 size += GET_MODE_SIZE (mode);
7640 /* Save the arg pointer to the block. */
7641 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7642 copy_to_reg (virtual_incoming_args_rtx));
7643 size = GET_MODE_SIZE (Pmode);
7645 /* Save the structure value address unless this is passed as an
7646 "invisible" first argument. */
7647 if (struct_value_incoming_rtx)
7649 emit_move_insn (change_address (registers, Pmode,
7650 plus_constant (XEXP (registers, 0),
7652 copy_to_reg (struct_value_incoming_rtx));
7653 size += GET_MODE_SIZE (Pmode);
7656 /* Return the address of the block. */
7657 return copy_addr_to_reg (XEXP (registers, 0));
7660 /* Perform an untyped call and save the state required to perform an
7661 untyped return of whatever value was returned by the given function. */
7664 expand_builtin_apply (function, arguments, argsize)
7665 rtx function, arguments, argsize;
7667 int size, align, regno;
7668 enum machine_mode mode;
7669 rtx incoming_args, result, reg, dest, call_insn;
7670 rtx old_stack_level = 0;
7673 /* Create a block where the return registers can be saved. */
7674 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7676 /* ??? The argsize value should be adjusted here. */
7678 /* Fetch the arg pointer from the ARGUMENTS block. */
7679 incoming_args = gen_reg_rtx (Pmode);
7680 emit_move_insn (incoming_args,
7681 gen_rtx (MEM, Pmode, arguments));
7682 #ifndef STACK_GROWS_DOWNWARD
7683 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7684 incoming_args, 0, OPTAB_LIB_WIDEN);
7687 /* Perform postincrements before actually calling the function. */
7690 /* Push a new argument block and copy the arguments. */
7691 do_pending_stack_adjust ();
7692 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7694 /* Push a block of memory onto the stack to store the memory arguments.
7695 Save the address in a register, and copy the memory arguments. ??? I
7696 haven't figured out how the calling convention macros effect this,
7697 but it's likely that the source and/or destination addresses in
7698 the block copy will need updating in machine specific ways. */
7699 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7700 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7701 gen_rtx (MEM, BLKmode, incoming_args),
7703 PARM_BOUNDARY / BITS_PER_UNIT);
7705 /* Refer to the argument block. */
7707 arguments = gen_rtx (MEM, BLKmode, arguments);
7709 /* Walk past the arg-pointer and structure value address. */
7710 size = GET_MODE_SIZE (Pmode);
7711 if (struct_value_rtx)
7712 size += GET_MODE_SIZE (Pmode);
7714 /* Restore each of the registers previously saved. Make USE insns
7715 for each of these registers for use in making the call. */
7716 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7717 if ((mode = apply_args_mode[regno]) != VOIDmode)
7719 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7720 if (size % align != 0)
7721 size = CEIL (size, align) * align;
7722 reg = gen_rtx (REG, mode, regno);
7723 emit_move_insn (reg,
7724 change_address (arguments, mode,
7725 plus_constant (XEXP (arguments, 0),
7728 push_to_sequence (use_insns);
7729 emit_insn (gen_rtx (USE, VOIDmode, reg));
7730 use_insns = get_insns ();
7732 size += GET_MODE_SIZE (mode);
7735 /* Restore the structure value address unless this is passed as an
7736 "invisible" first argument. */
7737 size = GET_MODE_SIZE (Pmode);
7738 if (struct_value_rtx)
7740 rtx value = gen_reg_rtx (Pmode);
7741 emit_move_insn (value,
7742 change_address (arguments, Pmode,
7743 plus_constant (XEXP (arguments, 0),
7745 emit_move_insn (struct_value_rtx, value);
7746 if (GET_CODE (struct_value_rtx) == REG)
7748 push_to_sequence (use_insns);
7749 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7750 use_insns = get_insns ();
7753 size += GET_MODE_SIZE (Pmode);
7756 /* All arguments and registers used for the call are set up by now! */
7757 function = prepare_call_address (function, NULL_TREE, &use_insns);
7759 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7760 and we don't want to load it into a register as an optimization,
7761 because prepare_call_address already did it if it should be done. */
7762 if (GET_CODE (function) != SYMBOL_REF)
7763 function = memory_address (FUNCTION_MODE, function);
7765 /* Generate the actual call instruction and save the return value. */
7766 #ifdef HAVE_untyped_call
7767 if (HAVE_untyped_call)
7768 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7769 result, result_vector (1, result)));
7772 #ifdef HAVE_call_value
7773 if (HAVE_call_value)
7777 /* Locate the unique return register. It is not possible to
7778 express a call that sets more than one return register using
7779 call_value; use untyped_call for that. In fact, untyped_call
7780 only needs to save the return registers in the given block. */
7781 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7782 if ((mode = apply_result_mode[regno]) != VOIDmode)
7785 abort (); /* HAVE_untyped_call required. */
7786 valreg = gen_rtx (REG, mode, regno);
7789 emit_call_insn (gen_call_value (valreg,
7790 gen_rtx (MEM, FUNCTION_MODE, function),
7791 const0_rtx, NULL_RTX, const0_rtx));
7793 emit_move_insn (change_address (result, GET_MODE (valreg),
7801 /* Find the CALL insn we just emitted and write the USE insns before it. */
7802 for (call_insn = get_last_insn ();
7803 call_insn && GET_CODE (call_insn) != CALL_INSN;
7804 call_insn = PREV_INSN (call_insn))
7810 /* Put the USE insns before the CALL. */
7811 emit_insns_before (use_insns, call_insn);
7813 /* Restore the stack. */
7814 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7816 /* Return the address of the result block. */
7817 return copy_addr_to_reg (XEXP (result, 0));
7820 /* Perform an untyped return. */
7823 expand_builtin_return (result)
7826 int size, align, regno;
7827 enum machine_mode mode;
7831 apply_result_size ();
7832 result = gen_rtx (MEM, BLKmode, result);
7834 #ifdef HAVE_untyped_return
7835 if (HAVE_untyped_return)
7837 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7843 /* Restore the return value and note that each value is used. */
7845 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7846 if ((mode = apply_result_mode[regno]) != VOIDmode)
7848 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7849 if (size % align != 0)
7850 size = CEIL (size, align) * align;
7851 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7852 emit_move_insn (reg,
7853 change_address (result, mode,
7854 plus_constant (XEXP (result, 0),
7857 push_to_sequence (use_insns);
7858 emit_insn (gen_rtx (USE, VOIDmode, reg));
7859 use_insns = get_insns ();
7861 size += GET_MODE_SIZE (mode);
7864 /* Put the USE insns before the return. */
7865 emit_insns (use_insns);
7867 /* Return whatever values was restored by jumping directly to the end
7869 expand_null_return ();
7872 /* Expand code for a post- or pre- increment or decrement
7873 and return the RTX for the result.
7874 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7877 expand_increment (exp, post)
7881 register rtx op0, op1;
7882 register rtx temp, value;
7883 register tree incremented = TREE_OPERAND (exp, 0);
7884 optab this_optab = add_optab;
7886 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7887 int op0_is_copy = 0;
7888 int single_insn = 0;
7889 /* 1 means we can't store into OP0 directly,
7890 because it is a subreg narrower than a word,
7891 and we don't dare clobber the rest of the word. */
7894 if (output_bytecode)
7896 bc_expand_expr (exp);
7900 /* Stabilize any component ref that might need to be
7901 evaluated more than once below. */
7903 || TREE_CODE (incremented) == BIT_FIELD_REF
7904 || (TREE_CODE (incremented) == COMPONENT_REF
7905 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7906 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7907 incremented = stabilize_reference (incremented);
7908 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7909 ones into save exprs so that they don't accidentally get evaluated
7910 more than once by the code below. */
7911 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7912 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7913 incremented = save_expr (incremented);
7915 /* Compute the operands as RTX.
7916 Note whether OP0 is the actual lvalue or a copy of it:
7917 I believe it is a copy iff it is a register or subreg
7918 and insns were generated in computing it. */
7920 temp = get_last_insn ();
7921 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7923 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7924 in place but intead must do sign- or zero-extension during assignment,
7925 so we copy it into a new register and let the code below use it as
7928 Note that we can safely modify this SUBREG since it is know not to be
7929 shared (it was made by the expand_expr call above). */
7931 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7932 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7933 else if (GET_CODE (op0) == SUBREG
7934 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7937 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7938 && temp != get_last_insn ());
7939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7941 /* Decide whether incrementing or decrementing. */
7942 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7943 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7944 this_optab = sub_optab;
7946 /* Convert decrement by a constant into a negative increment. */
7947 if (this_optab == sub_optab
7948 && GET_CODE (op1) == CONST_INT)
7950 op1 = GEN_INT (- INTVAL (op1));
7951 this_optab = add_optab;
7954 /* For a preincrement, see if we can do this with a single instruction. */
7957 icode = (int) this_optab->handlers[(int) mode].insn_code;
7958 if (icode != (int) CODE_FOR_nothing
7959 /* Make sure that OP0 is valid for operands 0 and 1
7960 of the insn we want to queue. */
7961 && (*insn_operand_predicate[icode][0]) (op0, mode)
7962 && (*insn_operand_predicate[icode][1]) (op0, mode)
7963 && (*insn_operand_predicate[icode][2]) (op1, mode))
7967 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7968 then we cannot just increment OP0. We must therefore contrive to
7969 increment the original value. Then, for postincrement, we can return
7970 OP0 since it is a copy of the old value. For preincrement, expand here
7971 unless we can do it with a single insn.
7973 Likewise if storing directly into OP0 would clobber high bits
7974 we need to preserve (bad_subreg). */
7975 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
7977 /* This is the easiest way to increment the value wherever it is.
7978 Problems with multiple evaluation of INCREMENTED are prevented
7979 because either (1) it is a component_ref or preincrement,
7980 in which case it was stabilized above, or (2) it is an array_ref
7981 with constant index in an array in a register, which is
7982 safe to reevaluate. */
7983 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7984 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7985 ? MINUS_EXPR : PLUS_EXPR),
7988 TREE_OPERAND (exp, 1));
7989 temp = expand_assignment (incremented, newexp, ! post, 0);
7990 return post ? op0 : temp;
7995 /* We have a true reference to the value in OP0.
7996 If there is an insn to add or subtract in this mode, queue it.
7997 Queueing the increment insn avoids the register shuffling
7998 that often results if we must increment now and first save
7999 the old value for subsequent use. */
8001 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8002 op0 = stabilize (op0);
8005 icode = (int) this_optab->handlers[(int) mode].insn_code;
8006 if (icode != (int) CODE_FOR_nothing
8007 /* Make sure that OP0 is valid for operands 0 and 1
8008 of the insn we want to queue. */
8009 && (*insn_operand_predicate[icode][0]) (op0, mode)
8010 && (*insn_operand_predicate[icode][1]) (op0, mode))
8012 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8013 op1 = force_reg (mode, op1);
8015 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8019 /* Preincrement, or we can't increment with one simple insn. */
8021 /* Save a copy of the value before inc or dec, to return it later. */
8022 temp = value = copy_to_reg (op0);
8024 /* Arrange to return the incremented value. */
8025 /* Copy the rtx because expand_binop will protect from the queue,
8026 and the results of that would be invalid for us to return
8027 if our caller does emit_queue before using our result. */
8028 temp = copy_rtx (value = op0);
8030 /* Increment however we can. */
8031 op1 = expand_binop (mode, this_optab, value, op1, op0,
8032 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8033 /* Make sure the value is stored into OP0. */
8035 emit_move_insn (op0, op1);
8040 /* Expand all function calls contained within EXP, innermost ones first.
8041 But don't look within expressions that have sequence points.
8042 For each CALL_EXPR, record the rtx for its value
8043 in the CALL_EXPR_RTL field. */
8046 preexpand_calls (exp)
8049 register int nops, i;
8050 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8052 if (! do_preexpand_calls)
8055 /* Only expressions and references can contain calls. */
8057 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8060 switch (TREE_CODE (exp))
8063 /* Do nothing if already expanded. */
8064 if (CALL_EXPR_RTL (exp) != 0)
8067 /* Do nothing to built-in functions. */
8068 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8069 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8070 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8071 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8076 case TRUTH_ANDIF_EXPR:
8077 case TRUTH_ORIF_EXPR:
8078 /* If we find one of these, then we can be sure
8079 the adjust will be done for it (since it makes jumps).
8080 Do it now, so that if this is inside an argument
8081 of a function, we don't get the stack adjustment
8082 after some other args have already been pushed. */
8083 do_pending_stack_adjust ();
8088 case WITH_CLEANUP_EXPR:
8092 if (SAVE_EXPR_RTL (exp) != 0)
8096 nops = tree_code_length[(int) TREE_CODE (exp)];
8097 for (i = 0; i < nops; i++)
8098 if (TREE_OPERAND (exp, i) != 0)
8100 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8101 if (type == 'e' || type == '<' || type == '1' || type == '2'
8103 preexpand_calls (TREE_OPERAND (exp, i));
8107 /* At the start of a function, record that we have no previously-pushed
8108 arguments waiting to be popped. */
8111 init_pending_stack_adjust ()
8113 pending_stack_adjust = 0;
8116 /* When exiting from function, if safe, clear out any pending stack adjust
8117 so the adjustment won't get done. */
8120 clear_pending_stack_adjust ()
8122 #ifdef EXIT_IGNORE_STACK
8123 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8124 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8125 && ! flag_inline_functions)
8126 pending_stack_adjust = 0;
8130 /* Pop any previously-pushed arguments that have not been popped yet. */
8133 do_pending_stack_adjust ()
8135 if (inhibit_defer_pop == 0)
8137 if (pending_stack_adjust != 0)
8138 adjust_stack (GEN_INT (pending_stack_adjust));
8139 pending_stack_adjust = 0;
8143 /* Expand all cleanups up to OLD_CLEANUPS.
8144 Needed here, and also for language-dependent calls. */
8147 expand_cleanups_to (old_cleanups)
8150 while (cleanups_this_call != old_cleanups)
8152 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8153 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8157 /* Expand conditional expressions. */
8159 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8160 LABEL is an rtx of code CODE_LABEL, in this function and all the
8164 jumpifnot (exp, label)
8168 do_jump (exp, label, NULL_RTX);
8171 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8178 do_jump (exp, NULL_RTX, label);
8181 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8182 the result is zero, or IF_TRUE_LABEL if the result is one.
8183 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8184 meaning fall through in that case.
8186 do_jump always does any pending stack adjust except when it does not
8187 actually perform a jump. An example where there is no jump
8188 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8190 This function is responsible for optimizing cases such as
8191 &&, || and comparison operators in EXP. */
8194 do_jump (exp, if_false_label, if_true_label)
8196 rtx if_false_label, if_true_label;
8198 register enum tree_code code = TREE_CODE (exp);
8199 /* Some cases need to create a label to jump to
8200 in order to properly fall through.
8201 These cases set DROP_THROUGH_LABEL nonzero. */
8202 rtx drop_through_label = 0;
8216 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8222 /* This is not true with #pragma weak */
8224 /* The address of something can never be zero. */
8226 emit_jump (if_true_label);
8231 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8232 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8233 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8236 /* If we are narrowing the operand, we have to do the compare in the
8238 if ((TYPE_PRECISION (TREE_TYPE (exp))
8239 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8241 case NON_LVALUE_EXPR:
8242 case REFERENCE_EXPR:
8247 /* These cannot change zero->non-zero or vice versa. */
8248 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8252 /* This is never less insns than evaluating the PLUS_EXPR followed by
8253 a test and can be longer if the test is eliminated. */
8255 /* Reduce to minus. */
8256 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8257 TREE_OPERAND (exp, 0),
8258 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8259 TREE_OPERAND (exp, 1))));
8260 /* Process as MINUS. */
8264 /* Non-zero iff operands of minus differ. */
8265 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8266 TREE_OPERAND (exp, 0),
8267 TREE_OPERAND (exp, 1)),
8272 /* If we are AND'ing with a small constant, do this comparison in the
8273 smallest type that fits. If the machine doesn't have comparisons
8274 that small, it will be converted back to the wider comparison.
8275 This helps if we are testing the sign bit of a narrower object.
8276 combine can't do this for us because it can't know whether a
8277 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8279 if (! SLOW_BYTE_ACCESS
8280 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8281 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8282 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8283 && (type = type_for_size (i + 1, 1)) != 0
8284 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8285 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8286 != CODE_FOR_nothing))
8288 do_jump (convert (type, exp), if_false_label, if_true_label);
8293 case TRUTH_NOT_EXPR:
8294 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8297 case TRUTH_ANDIF_EXPR:
8298 if (if_false_label == 0)
8299 if_false_label = drop_through_label = gen_label_rtx ();
8300 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8301 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8304 case TRUTH_ORIF_EXPR:
8305 if (if_true_label == 0)
8306 if_true_label = drop_through_label = gen_label_rtx ();
8307 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8308 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8313 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8317 do_pending_stack_adjust ();
8318 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8325 int bitsize, bitpos, unsignedp;
8326 enum machine_mode mode;
8331 /* Get description of this reference. We don't actually care
8332 about the underlying object here. */
8333 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8334 &mode, &unsignedp, &volatilep);
8336 type = type_for_size (bitsize, unsignedp);
8337 if (! SLOW_BYTE_ACCESS
8338 && type != 0 && bitsize >= 0
8339 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8340 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8341 != CODE_FOR_nothing))
8343 do_jump (convert (type, exp), if_false_label, if_true_label);
8350 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8351 if (integer_onep (TREE_OPERAND (exp, 1))
8352 && integer_zerop (TREE_OPERAND (exp, 2)))
8353 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8355 else if (integer_zerop (TREE_OPERAND (exp, 1))
8356 && integer_onep (TREE_OPERAND (exp, 2)))
8357 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8361 register rtx label1 = gen_label_rtx ();
8362 drop_through_label = gen_label_rtx ();
8363 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8364 /* Now the THEN-expression. */
8365 do_jump (TREE_OPERAND (exp, 1),
8366 if_false_label ? if_false_label : drop_through_label,
8367 if_true_label ? if_true_label : drop_through_label);
8368 /* In case the do_jump just above never jumps. */
8369 do_pending_stack_adjust ();
8370 emit_label (label1);
8371 /* Now the ELSE-expression. */
8372 do_jump (TREE_OPERAND (exp, 2),
8373 if_false_label ? if_false_label : drop_through_label,
8374 if_true_label ? if_true_label : drop_through_label);
8379 if (integer_zerop (TREE_OPERAND (exp, 1)))
8380 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8381 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8384 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8385 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8386 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8387 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8389 comparison = compare (exp, EQ, EQ);
8393 if (integer_zerop (TREE_OPERAND (exp, 1)))
8394 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8395 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8398 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8399 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8400 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8401 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8403 comparison = compare (exp, NE, NE);
8407 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8409 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8410 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8412 comparison = compare (exp, LT, LTU);
8416 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8418 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8419 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8421 comparison = compare (exp, LE, LEU);
8425 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8427 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8428 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8430 comparison = compare (exp, GT, GTU);
8434 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8436 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8437 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8439 comparison = compare (exp, GE, GEU);
8444 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8446 /* This is not needed any more and causes poor code since it causes
8447 comparisons and tests from non-SI objects to have different code
8449 /* Copy to register to avoid generating bad insns by cse
8450 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8451 if (!cse_not_expected && GET_CODE (temp) == MEM)
8452 temp = copy_to_reg (temp);
8454 do_pending_stack_adjust ();
8455 if (GET_CODE (temp) == CONST_INT)
8456 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8457 else if (GET_CODE (temp) == LABEL_REF)
8458 comparison = const_true_rtx;
8459 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8460 && !can_compare_p (GET_MODE (temp)))
8461 /* Note swapping the labels gives us not-equal. */
8462 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8463 else if (GET_MODE (temp) != VOIDmode)
8464 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8465 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8466 GET_MODE (temp), NULL_RTX, 0);
8471 /* Do any postincrements in the expression that was tested. */
8474 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8475 straight into a conditional jump instruction as the jump condition.
8476 Otherwise, all the work has been done already. */
8478 if (comparison == const_true_rtx)
8481 emit_jump (if_true_label);
8483 else if (comparison == const0_rtx)
8486 emit_jump (if_false_label);
8488 else if (comparison)
8489 do_jump_for_compare (comparison, if_false_label, if_true_label);
8491 if (drop_through_label)
8493 /* If do_jump produces code that might be jumped around,
8494 do any stack adjusts from that code, before the place
8495 where control merges in. */
8496 do_pending_stack_adjust ();
8497 emit_label (drop_through_label);
8501 /* Given a comparison expression EXP for values too wide to be compared
8502 with one insn, test the comparison and jump to the appropriate label.
8503 The code of EXP is ignored; we always test GT if SWAP is 0,
8504 and LT if SWAP is 1. */
8507 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8510 rtx if_false_label, if_true_label;
8512 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8513 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8514 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8515 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8516 rtx drop_through_label = 0;
8517 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8520 if (! if_true_label || ! if_false_label)
8521 drop_through_label = gen_label_rtx ();
8522 if (! if_true_label)
8523 if_true_label = drop_through_label;
8524 if (! if_false_label)
8525 if_false_label = drop_through_label;
8527 /* Compare a word at a time, high order first. */
8528 for (i = 0; i < nwords; i++)
8531 rtx op0_word, op1_word;
8533 if (WORDS_BIG_ENDIAN)
8535 op0_word = operand_subword_force (op0, i, mode);
8536 op1_word = operand_subword_force (op1, i, mode);
8540 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8541 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8544 /* All but high-order word must be compared as unsigned. */
8545 comp = compare_from_rtx (op0_word, op1_word,
8546 (unsignedp || i > 0) ? GTU : GT,
8547 unsignedp, word_mode, NULL_RTX, 0);
8548 if (comp == const_true_rtx)
8549 emit_jump (if_true_label);
8550 else if (comp != const0_rtx)
8551 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8553 /* Consider lower words only if these are equal. */
8554 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8556 if (comp == const_true_rtx)
8557 emit_jump (if_false_label);
8558 else if (comp != const0_rtx)
8559 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8563 emit_jump (if_false_label);
8564 if (drop_through_label)
8565 emit_label (drop_through_label);
8568 /* Compare OP0 with OP1, word at a time, in mode MODE.
8569 UNSIGNEDP says to do unsigned comparison.
8570 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8573 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8574 enum machine_mode mode;
8577 rtx if_false_label, if_true_label;
8579 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8580 rtx drop_through_label = 0;
8583 if (! if_true_label || ! if_false_label)
8584 drop_through_label = gen_label_rtx ();
8585 if (! if_true_label)
8586 if_true_label = drop_through_label;
8587 if (! if_false_label)
8588 if_false_label = drop_through_label;
8590 /* Compare a word at a time, high order first. */
8591 for (i = 0; i < nwords; i++)
8594 rtx op0_word, op1_word;
8596 if (WORDS_BIG_ENDIAN)
8598 op0_word = operand_subword_force (op0, i, mode);
8599 op1_word = operand_subword_force (op1, i, mode);
8603 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8604 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8607 /* All but high-order word must be compared as unsigned. */
8608 comp = compare_from_rtx (op0_word, op1_word,
8609 (unsignedp || i > 0) ? GTU : GT,
8610 unsignedp, word_mode, NULL_RTX, 0);
8611 if (comp == const_true_rtx)
8612 emit_jump (if_true_label);
8613 else if (comp != const0_rtx)
8614 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8616 /* Consider lower words only if these are equal. */
8617 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8619 if (comp == const_true_rtx)
8620 emit_jump (if_false_label);
8621 else if (comp != const0_rtx)
8622 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8626 emit_jump (if_false_label);
8627 if (drop_through_label)
8628 emit_label (drop_through_label);
8631 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8632 with one insn, test the comparison and jump to the appropriate label. */
8635 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8637 rtx if_false_label, if_true_label;
8639 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8640 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8642 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8644 rtx drop_through_label = 0;
8646 if (! if_false_label)
8647 drop_through_label = if_false_label = gen_label_rtx ();
8649 for (i = 0; i < nwords; i++)
8651 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8652 operand_subword_force (op1, i, mode),
8653 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8654 word_mode, NULL_RTX, 0);
8655 if (comp == const_true_rtx)
8656 emit_jump (if_false_label);
8657 else if (comp != const0_rtx)
8658 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8662 emit_jump (if_true_label);
8663 if (drop_through_label)
8664 emit_label (drop_through_label);
8667 /* Jump according to whether OP0 is 0.
8668 We assume that OP0 has an integer mode that is too wide
8669 for the available compare insns. */
8672 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8674 rtx if_false_label, if_true_label;
8676 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8678 rtx drop_through_label = 0;
8680 if (! if_false_label)
8681 drop_through_label = if_false_label = gen_label_rtx ();
8683 for (i = 0; i < nwords; i++)
8685 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8687 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8688 if (comp == const_true_rtx)
8689 emit_jump (if_false_label);
8690 else if (comp != const0_rtx)
8691 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8695 emit_jump (if_true_label);
8696 if (drop_through_label)
8697 emit_label (drop_through_label);
8700 /* Given a comparison expression in rtl form, output conditional branches to
8701 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8704 do_jump_for_compare (comparison, if_false_label, if_true_label)
8705 rtx comparison, if_false_label, if_true_label;
8709 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8710 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8715 emit_jump (if_false_label);
8717 else if (if_false_label)
8720 rtx prev = get_last_insn ();
8724 prev = PREV_INSN (prev);
8726 /* Output the branch with the opposite condition. Then try to invert
8727 what is generated. If more than one insn is a branch, or if the
8728 branch is not the last insn written, abort. If we can't invert
8729 the branch, emit make a true label, redirect this jump to that,
8730 emit a jump to the false label and define the true label. */
8732 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8733 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8737 /* Here we get the insn before what was just emitted.
8738 On some machines, emitting the branch can discard
8739 the previous compare insn and emit a replacement. */
8741 /* If there's only one preceding insn... */
8742 insn = get_insns ();
8744 insn = NEXT_INSN (prev);
8746 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8747 if (GET_CODE (insn) == JUMP_INSN)
8754 if (branch != get_last_insn ())
8757 if (! invert_jump (branch, if_false_label))
8759 if_true_label = gen_label_rtx ();
8760 redirect_jump (branch, if_true_label);
8761 emit_jump (if_false_label);
8762 emit_label (if_true_label);
8767 /* Generate code for a comparison expression EXP
8768 (including code to compute the values to be compared)
8769 and set (CC0) according to the result.
8770 SIGNED_CODE should be the rtx operation for this comparison for
8771 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8773 We force a stack adjustment unless there are currently
8774 things pushed on the stack that aren't yet used. */
8777 compare (exp, signed_code, unsigned_code)
8779 enum rtx_code signed_code, unsigned_code;
8782 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8784 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8785 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8786 register enum machine_mode mode = TYPE_MODE (type);
8787 int unsignedp = TREE_UNSIGNED (type);
8788 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8790 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8792 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8793 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8796 /* Like compare but expects the values to compare as two rtx's.
8797 The decision as to signed or unsigned comparison must be made by the caller.
8799 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8802 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8803 size of MODE should be used. */
8806 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8807 register rtx op0, op1;
8810 enum machine_mode mode;
8816 /* If one operand is constant, make it the second one. Only do this
8817 if the other operand is not constant as well. */
8819 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8820 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8825 code = swap_condition (code);
8830 op0 = force_not_mem (op0);
8831 op1 = force_not_mem (op1);
8834 do_pending_stack_adjust ();
8836 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8837 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8841 /* There's no need to do this now that combine.c can eliminate lots of
8842 sign extensions. This can be less efficient in certain cases on other
8845 /* If this is a signed equality comparison, we can do it as an
8846 unsigned comparison since zero-extension is cheaper than sign
8847 extension and comparisons with zero are done as unsigned. This is
8848 the case even on machines that can do fast sign extension, since
8849 zero-extension is easier to combine with other operations than
8850 sign-extension is. If we are comparing against a constant, we must
8851 convert it to what it would look like unsigned. */
8852 if ((code == EQ || code == NE) && ! unsignedp
8853 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8855 if (GET_CODE (op1) == CONST_INT
8856 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8857 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8862 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8864 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8867 /* Generate code to calculate EXP using a store-flag instruction
8868 and return an rtx for the result. EXP is either a comparison
8869 or a TRUTH_NOT_EXPR whose operand is a comparison.
8871 If TARGET is nonzero, store the result there if convenient.
8873 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8876 Return zero if there is no suitable set-flag instruction
8877 available on this machine.
8879 Once expand_expr has been called on the arguments of the comparison,
8880 we are committed to doing the store flag, since it is not safe to
8881 re-evaluate the expression. We emit the store-flag insn by calling
8882 emit_store_flag, but only expand the arguments if we have a reason
8883 to believe that emit_store_flag will be successful. If we think that
8884 it will, but it isn't, we have to simulate the store-flag with a
8885 set/jump/set sequence. */
8888 do_store_flag (exp, target, mode, only_cheap)
8891 enum machine_mode mode;
8895 tree arg0, arg1, type;
8897 enum machine_mode operand_mode;
8901 enum insn_code icode;
8902 rtx subtarget = target;
8903 rtx result, label, pattern, jump_pat;
8905 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8906 result at the end. We can't simply invert the test since it would
8907 have already been inverted if it were valid. This case occurs for
8908 some floating-point comparisons. */
8910 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8911 invert = 1, exp = TREE_OPERAND (exp, 0);
8913 arg0 = TREE_OPERAND (exp, 0);
8914 arg1 = TREE_OPERAND (exp, 1);
8915 type = TREE_TYPE (arg0);
8916 operand_mode = TYPE_MODE (type);
8917 unsignedp = TREE_UNSIGNED (type);
8919 /* We won't bother with BLKmode store-flag operations because it would mean
8920 passing a lot of information to emit_store_flag. */
8921 if (operand_mode == BLKmode)
8927 /* Get the rtx comparison code to use. We know that EXP is a comparison
8928 operation of some type. Some comparisons against 1 and -1 can be
8929 converted to comparisons with zero. Do so here so that the tests
8930 below will be aware that we have a comparison with zero. These
8931 tests will not catch constants in the first operand, but constants
8932 are rarely passed as the first operand. */
8934 switch (TREE_CODE (exp))
8943 if (integer_onep (arg1))
8944 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8946 code = unsignedp ? LTU : LT;
8949 if (! unsignedp && integer_all_onesp (arg1))
8950 arg1 = integer_zero_node, code = LT;
8952 code = unsignedp ? LEU : LE;
8955 if (! unsignedp && integer_all_onesp (arg1))
8956 arg1 = integer_zero_node, code = GE;
8958 code = unsignedp ? GTU : GT;
8961 if (integer_onep (arg1))
8962 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8964 code = unsignedp ? GEU : GE;
8970 /* Put a constant second. */
8971 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8973 tem = arg0; arg0 = arg1; arg1 = tem;
8974 code = swap_condition (code);
8977 /* If this is an equality or inequality test of a single bit, we can
8978 do this by shifting the bit being tested to the low-order bit and
8979 masking the result with the constant 1. If the condition was EQ,
8980 we xor it with 1. This does not require an scc insn and is faster
8981 than an scc insn even if we have it. */
8983 if ((code == NE || code == EQ)
8984 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8985 && integer_pow2p (TREE_OPERAND (arg0, 1))
8986 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8988 tree inner = TREE_OPERAND (arg0, 0);
8989 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8990 NULL_RTX, VOIDmode, 0)));
8993 /* If INNER is a right shift of a constant and it plus BITNUM does
8994 not overflow, adjust BITNUM and INNER. */
8996 if (TREE_CODE (inner) == RSHIFT_EXPR
8997 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8998 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8999 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9000 < TYPE_PRECISION (type)))
9002 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9003 inner = TREE_OPERAND (inner, 0);
9006 /* If we are going to be able to omit the AND below, we must do our
9007 operations as unsigned. If we must use the AND, we have a choice.
9008 Normally unsigned is faster, but for some machines signed is. */
9009 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9010 #ifdef LOAD_EXTEND_OP
9011 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9017 if (subtarget == 0 || GET_CODE (subtarget) != REG
9018 || GET_MODE (subtarget) != operand_mode
9019 || ! safe_from_p (subtarget, inner))
9022 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9025 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9026 size_int (bitnum), subtarget, ops_unsignedp);
9028 if (GET_MODE (op0) != mode)
9029 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9031 if ((code == EQ && ! invert) || (code == NE && invert))
9032 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9033 ops_unsignedp, OPTAB_LIB_WIDEN);
9035 /* Put the AND last so it can combine with more things. */
9036 if (bitnum != TYPE_PRECISION (type) - 1)
9037 op0 = expand_and (op0, const1_rtx, subtarget);
9042 /* Now see if we are likely to be able to do this. Return if not. */
9043 if (! can_compare_p (operand_mode))
9045 icode = setcc_gen_code[(int) code];
9046 if (icode == CODE_FOR_nothing
9047 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9049 /* We can only do this if it is one of the special cases that
9050 can be handled without an scc insn. */
9051 if ((code == LT && integer_zerop (arg1))
9052 || (! only_cheap && code == GE && integer_zerop (arg1)))
9054 else if (BRANCH_COST >= 0
9055 && ! only_cheap && (code == NE || code == EQ)
9056 && TREE_CODE (type) != REAL_TYPE
9057 && ((abs_optab->handlers[(int) operand_mode].insn_code
9058 != CODE_FOR_nothing)
9059 || (ffs_optab->handlers[(int) operand_mode].insn_code
9060 != CODE_FOR_nothing)))
9066 preexpand_calls (exp);
9067 if (subtarget == 0 || GET_CODE (subtarget) != REG
9068 || GET_MODE (subtarget) != operand_mode
9069 || ! safe_from_p (subtarget, arg1))
9072 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9073 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9076 target = gen_reg_rtx (mode);
9078 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9079 because, if the emit_store_flag does anything it will succeed and
9080 OP0 and OP1 will not be used subsequently. */
9082 result = emit_store_flag (target, code,
9083 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9084 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9085 operand_mode, unsignedp, 1);
9090 result = expand_binop (mode, xor_optab, result, const1_rtx,
9091 result, 0, OPTAB_LIB_WIDEN);
9095 /* If this failed, we have to do this with set/compare/jump/set code. */
9096 if (target == 0 || GET_CODE (target) != REG
9097 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9098 target = gen_reg_rtx (GET_MODE (target));
9100 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9101 result = compare_from_rtx (op0, op1, code, unsignedp,
9102 operand_mode, NULL_RTX, 0);
9103 if (GET_CODE (result) == CONST_INT)
9104 return (((result == const0_rtx && ! invert)
9105 || (result != const0_rtx && invert))
9106 ? const0_rtx : const1_rtx);
9108 label = gen_label_rtx ();
9109 if (bcc_gen_fctn[(int) code] == 0)
9112 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9113 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9119 /* Generate a tablejump instruction (used for switch statements). */
9121 #ifdef HAVE_tablejump
9123 /* INDEX is the value being switched on, with the lowest value
9124 in the table already subtracted.
9125 MODE is its expected mode (needed if INDEX is constant).
9126 RANGE is the length of the jump table.
9127 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9129 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9130 index value is out of range. */
9133 do_tablejump (index, mode, range, table_label, default_label)
9134 rtx index, range, table_label, default_label;
9135 enum machine_mode mode;
9137 register rtx temp, vector;
9139 /* Do an unsigned comparison (in the proper mode) between the index
9140 expression and the value which represents the length of the range.
9141 Since we just finished subtracting the lower bound of the range
9142 from the index expression, this comparison allows us to simultaneously
9143 check that the original index expression value is both greater than
9144 or equal to the minimum value of the range and less than or equal to
9145 the maximum value of the range. */
9147 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9148 emit_jump_insn (gen_bltu (default_label));
9150 /* If index is in range, it must fit in Pmode.
9151 Convert to Pmode so we can index with it. */
9153 index = convert_to_mode (Pmode, index, 1);
9155 /* Don't let a MEM slip thru, because then INDEX that comes
9156 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9157 and break_out_memory_refs will go to work on it and mess it up. */
9158 #ifdef PIC_CASE_VECTOR_ADDRESS
9159 if (flag_pic && GET_CODE (index) != REG)
9160 index = copy_to_mode_reg (Pmode, index);
9163 /* If flag_force_addr were to affect this address
9164 it could interfere with the tricky assumptions made
9165 about addresses that contain label-refs,
9166 which may be valid only very near the tablejump itself. */
9167 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9168 GET_MODE_SIZE, because this indicates how large insns are. The other
9169 uses should all be Pmode, because they are addresses. This code
9170 could fail if addresses and insns are not the same size. */
9171 index = gen_rtx (PLUS, Pmode,
9172 gen_rtx (MULT, Pmode, index,
9173 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9174 gen_rtx (LABEL_REF, Pmode, table_label));
9175 #ifdef PIC_CASE_VECTOR_ADDRESS
9177 index = PIC_CASE_VECTOR_ADDRESS (index);
9180 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9181 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9182 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9183 RTX_UNCHANGING_P (vector) = 1;
9184 convert_move (temp, vector, 0);
9186 emit_jump_insn (gen_tablejump (temp, table_label));
9188 #ifndef CASE_VECTOR_PC_RELATIVE
9189 /* If we are generating PIC code or if the table is PC-relative, the
9190 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9196 #endif /* HAVE_tablejump */
9199 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9200 to that value is on the top of the stack. The resulting type is TYPE, and
9201 the source declaration is DECL. */
9204 bc_load_memory (type, decl)
9207 enum bytecode_opcode opcode;
9210 /* Bit fields are special. We only know about signed and
9211 unsigned ints, and enums. The latter are treated as
9214 if (DECL_BIT_FIELD (decl))
9215 if (TREE_CODE (type) == ENUMERAL_TYPE
9216 || TREE_CODE (type) == INTEGER_TYPE)
9217 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9221 /* See corresponding comment in bc_store_memory(). */
9222 if (TYPE_MODE (type) == BLKmode
9223 || TYPE_MODE (type) == VOIDmode)
9226 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9228 if (opcode == neverneverland)
9231 bc_emit_bytecode (opcode);
9233 #ifdef DEBUG_PRINT_CODE
9234 fputc ('\n', stderr);
9239 /* Store the contents of the second stack slot to the address in the
9240 top stack slot. DECL is the declaration of the destination and is used
9241 to determine whether we're dealing with a bitfield. */
9244 bc_store_memory (type, decl)
9247 enum bytecode_opcode opcode;
9250 if (DECL_BIT_FIELD (decl))
9252 if (TREE_CODE (type) == ENUMERAL_TYPE
9253 || TREE_CODE (type) == INTEGER_TYPE)
9259 if (TYPE_MODE (type) == BLKmode)
9261 /* Copy structure. This expands to a block copy instruction, storeBLK.
9262 In addition to the arguments expected by the other store instructions,
9263 it also expects a type size (SImode) on top of the stack, which is the
9264 structure size in size units (usually bytes). The two first arguments
9265 are already on the stack; so we just put the size on level 1. For some
9266 other languages, the size may be variable, this is why we don't encode
9267 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9269 bc_expand_expr (TYPE_SIZE (type));
9273 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9275 if (opcode == neverneverland)
9278 bc_emit_bytecode (opcode);
9280 #ifdef DEBUG_PRINT_CODE
9281 fputc ('\n', stderr);
9286 /* Allocate local stack space sufficient to hold a value of the given
9287 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9288 integral power of 2. A special case is locals of type VOID, which
9289 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9290 remapped into the corresponding attribute of SI. */
9293 bc_allocate_local (size, alignment)
9294 int size, alignment;
9302 /* Normalize size and alignment */
9304 size = UNITS_PER_WORD;
9306 if (alignment < BITS_PER_UNIT)
9307 byte_alignment = 1 << (INT_ALIGN - 1);
9310 byte_alignment = alignment / BITS_PER_UNIT;
9312 if (local_vars_size & (byte_alignment - 1))
9313 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9315 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9316 local_vars_size += size;
9322 /* Allocate variable-sized local array. Variable-sized arrays are
9323 actually pointers to the address in memory where they are stored. */
9326 bc_allocate_variable_array (size)
9330 const int ptralign = (1 << (PTR_ALIGN - 1));
9333 if (local_vars_size & ptralign)
9334 local_vars_size += ptralign - (local_vars_size & ptralign);
9336 /* Note down local space needed: pointer to block; also return
9339 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9340 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9345 /* Push the machine address for the given external variable offset. */
9347 bc_load_externaddr (externaddr)
9350 bc_emit_bytecode (constP);
9351 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9352 BYTECODE_BC_LABEL (externaddr)->offset);
9354 #ifdef DEBUG_PRINT_CODE
9355 fputc ('\n', stderr);
9364 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9370 /* Like above, but expects an IDENTIFIER. */
9372 bc_load_externaddr_id (id, offset)
9376 if (!IDENTIFIER_POINTER (id))
9379 bc_emit_bytecode (constP);
9380 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9382 #ifdef DEBUG_PRINT_CODE
9383 fputc ('\n', stderr);
9388 /* Push the machine address for the given local variable offset. */
9390 bc_load_localaddr (localaddr)
9393 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9397 /* Push the machine address for the given parameter offset.
9398 NOTE: offset is in bits. */
9400 bc_load_parmaddr (parmaddr)
9403 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9408 /* Convert a[i] into *(a + i). */
9410 bc_canonicalize_array_ref (exp)
9413 tree type = TREE_TYPE (exp);
9414 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9415 TREE_OPERAND (exp, 0));
9416 tree index = TREE_OPERAND (exp, 1);
9419 /* Convert the integer argument to a type the same size as a pointer
9420 so the multiply won't overflow spuriously. */
9422 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9423 index = convert (type_for_size (POINTER_SIZE, 0), index);
9425 /* The array address isn't volatile even if the array is.
9426 (Of course this isn't terribly relevant since the bytecode
9427 translator treats nearly everything as volatile anyway.) */
9428 TREE_THIS_VOLATILE (array_adr) = 0;
9430 return build1 (INDIRECT_REF, type,
9431 fold (build (PLUS_EXPR,
9432 TYPE_POINTER_TO (type),
9434 fold (build (MULT_EXPR,
9435 TYPE_POINTER_TO (type),
9437 size_in_bytes (type))))));
9441 /* Load the address of the component referenced by the given
9442 COMPONENT_REF expression.
9444 Returns innermost lvalue. */
9447 bc_expand_component_address (exp)
9451 enum machine_mode mode;
9453 HOST_WIDE_INT SIval;
9456 tem = TREE_OPERAND (exp, 1);
9457 mode = DECL_MODE (tem);
9460 /* Compute cumulative bit offset for nested component refs
9461 and array refs, and find the ultimate containing object. */
9463 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9465 if (TREE_CODE (tem) == COMPONENT_REF)
9466 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9468 if (TREE_CODE (tem) == ARRAY_REF
9469 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9470 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9472 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9473 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9474 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9479 bc_expand_expr (tem);
9482 /* For bitfields also push their offset and size */
9483 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9484 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9486 if (SIval = bitpos / BITS_PER_UNIT)
9487 bc_emit_instruction (addconstPSI, SIval);
9489 return (TREE_OPERAND (exp, 1));
9493 /* Emit code to push two SI constants */
9495 bc_push_offset_and_size (offset, size)
9496 HOST_WIDE_INT offset, size;
9498 bc_emit_instruction (constSI, offset);
9499 bc_emit_instruction (constSI, size);
9503 /* Emit byte code to push the address of the given lvalue expression to
9504 the stack. If it's a bit field, we also push offset and size info.
9506 Returns innermost component, which allows us to determine not only
9507 its type, but also whether it's a bitfield. */
9510 bc_expand_address (exp)
9514 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9518 switch (TREE_CODE (exp))
9522 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9526 return (bc_expand_component_address (exp));
9530 bc_expand_expr (TREE_OPERAND (exp, 0));
9532 /* For variable-sized types: retrieve pointer. Sometimes the
9533 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9534 also make sure we have an operand, just in case... */
9536 if (TREE_OPERAND (exp, 0)
9537 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9538 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9539 bc_emit_instruction (loadP);
9541 /* If packed, also return offset and size */
9542 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9544 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9545 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9547 return (TREE_OPERAND (exp, 0));
9551 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9552 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9557 bc_load_parmaddr (DECL_RTL (exp));
9559 /* For variable-sized types: retrieve pointer */
9560 if (TYPE_SIZE (TREE_TYPE (exp))
9561 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9562 bc_emit_instruction (loadP);
9564 /* If packed, also return offset and size */
9565 if (DECL_BIT_FIELD (exp))
9566 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9567 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9573 bc_emit_instruction (returnP);
9579 if (BYTECODE_LABEL (DECL_RTL (exp)))
9580 bc_load_externaddr (DECL_RTL (exp));
9583 if (DECL_EXTERNAL (exp))
9584 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9585 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9587 bc_load_localaddr (DECL_RTL (exp));
9589 /* For variable-sized types: retrieve pointer */
9590 if (TYPE_SIZE (TREE_TYPE (exp))
9591 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9592 bc_emit_instruction (loadP);
9594 /* If packed, also return offset and size */
9595 if (DECL_BIT_FIELD (exp))
9596 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9597 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9605 bc_emit_bytecode (constP);
9606 r = output_constant_def (exp);
9607 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9609 #ifdef DEBUG_PRINT_CODE
9610 fputc ('\n', stderr);
9621 /* Most lvalues don't have components. */
9626 /* Emit a type code to be used by the runtime support in handling
9627 parameter passing. The type code consists of the machine mode
9628 plus the minimal alignment shifted left 8 bits. */
9631 bc_runtime_type_code (type)
9636 switch (TREE_CODE (type))
9646 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9658 return build_int_2 (val, 0);
9662 /* Generate constructor label */
9664 bc_gen_constr_label ()
9666 static int label_counter;
9667 static char label[20];
9669 sprintf (label, "*LR%d", label_counter++);
9671 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9675 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9676 expand the constructor data as static data, and push a pointer to it.
9677 The pointer is put in the pointer table and is retrieved by a constP
9678 bytecode instruction. We then loop and store each constructor member in
9679 the corresponding component. Finally, we return the original pointer on
9683 bc_expand_constructor (constr)
9687 HOST_WIDE_INT ptroffs;
9691 /* Literal constructors are handled as constants, whereas
9692 non-literals are evaluated and stored element by element
9693 into the data segment. */
9695 /* Allocate space in proper segment and push pointer to space on stack.
9698 l = bc_gen_constr_label ();
9700 if (TREE_CONSTANT (constr))
9704 bc_emit_const_labeldef (l);
9705 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9711 bc_emit_data_labeldef (l);
9712 bc_output_data_constructor (constr);
9716 /* Add reference to pointer table and recall pointer to stack;
9717 this code is common for both types of constructors: literals
9718 and non-literals. */
9720 ptroffs = bc_define_pointer (l);
9721 bc_emit_instruction (constP, ptroffs);
9723 /* This is all that has to be done if it's a literal. */
9724 if (TREE_CONSTANT (constr))
9728 /* At this point, we have the pointer to the structure on top of the stack.
9729 Generate sequences of store_memory calls for the constructor. */
9731 /* constructor type is structure */
9732 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9736 /* If the constructor has fewer fields than the structure,
9737 clear the whole structure first. */
9739 if (list_length (CONSTRUCTOR_ELTS (constr))
9740 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9742 bc_emit_instruction (duplicate);
9743 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9744 bc_emit_instruction (clearBLK);
9747 /* Store each element of the constructor into the corresponding
9750 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9752 register tree field = TREE_PURPOSE (elt);
9753 register enum machine_mode mode;
9758 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9759 mode = DECL_MODE (field);
9760 unsignedp = TREE_UNSIGNED (field);
9762 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9764 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9765 /* The alignment of TARGET is
9766 at least what its type requires. */
9768 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9769 int_size_in_bytes (TREE_TYPE (constr)));
9774 /* Constructor type is array */
9775 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9779 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9780 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9781 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9782 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9784 /* If the constructor has fewer fields than the structure,
9785 clear the whole structure first. */
9787 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9789 bc_emit_instruction (duplicate);
9790 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9791 bc_emit_instruction (clearBLK);
9795 /* Store each element of the constructor into the corresponding
9796 element of TARGET, determined by counting the elements. */
9798 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9800 elt = TREE_CHAIN (elt), i++)
9802 register enum machine_mode mode;
9807 mode = TYPE_MODE (elttype);
9808 bitsize = GET_MODE_BITSIZE (mode);
9809 unsignedp = TREE_UNSIGNED (elttype);
9811 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9812 /* * TYPE_SIZE_UNIT (elttype) */ );
9814 bc_store_field (elt, bitsize, bitpos, mode,
9815 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9816 /* The alignment of TARGET is
9817 at least what its type requires. */
9819 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9820 int_size_in_bytes (TREE_TYPE (constr)));
9827 /* Store the value of EXP (an expression tree) into member FIELD of
9828 structure at address on stack, which has type TYPE, mode MODE and
9829 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9832 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9833 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9836 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9837 value_mode, unsignedp, align, total_size)
9838 int bitsize, bitpos;
9839 enum machine_mode mode;
9840 tree field, exp, type;
9841 enum machine_mode value_mode;
9847 /* Expand expression and copy pointer */
9848 bc_expand_expr (exp);
9849 bc_emit_instruction (over);
9852 /* If the component is a bit field, we cannot use addressing to access
9853 it. Use bit-field techniques to store in it. */
9855 if (DECL_BIT_FIELD (field))
9857 bc_store_bit_field (bitpos, bitsize, unsignedp);
9863 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9865 /* Advance pointer to the desired member */
9867 bc_emit_instruction (addconstPSI, offset);
9870 bc_store_memory (type, field);
9875 /* Store SI/SU in bitfield */
9877 bc_store_bit_field (offset, size, unsignedp)
9878 int offset, size, unsignedp;
9880 /* Push bitfield offset and size */
9881 bc_push_offset_and_size (offset, size);
9884 bc_emit_instruction (sstoreBI);
9888 /* Load SI/SU from bitfield */
9890 bc_load_bit_field (offset, size, unsignedp)
9891 int offset, size, unsignedp;
9893 /* Push bitfield offset and size */
9894 bc_push_offset_and_size (offset, size);
9896 /* Load: sign-extend if signed, else zero-extend */
9897 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9901 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9902 (adjust stack pointer upwards), negative means add that number of
9903 levels (adjust the stack pointer downwards). Only positive values
9904 normally make sense. */
9907 bc_adjust_stack (nlevels)
9916 bc_emit_instruction (drop);
9919 bc_emit_instruction (drop);
9924 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9925 stack_depth -= nlevels;
9928 #if defined (VALIDATE_STACK_FOR_BC)
9929 VALIDATE_STACK_FOR_BC ();