1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-flags.h"
29 #include "insn-codes.h"
31 #include "insn-config.h"
34 #include "typeclass.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
108 struct move_by_pieces
117 int explicit_inc_from;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void group_insns PROTO((rtx));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
152 static int apply_args_size PROTO((void));
153 static int apply_result_size PROTO((void));
154 static rtx result_vector PROTO((int, rtx));
155 static rtx expand_builtin_apply_args PROTO((void));
156 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157 static void expand_builtin_return PROTO((rtx));
158 static rtx expand_increment PROTO((tree, int));
159 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160 tree bc_runtime_type_code PROTO((tree));
161 rtx bc_allocate_local PROTO((int, int));
162 void bc_store_memory PROTO((tree, tree));
163 tree bc_expand_component_address PROTO((tree));
164 tree bc_expand_address PROTO((tree));
165 void bc_expand_constructor PROTO((tree));
166 void bc_adjust_stack PROTO((int));
167 tree bc_canonicalize_array_ref PROTO((tree));
168 void bc_load_memory PROTO((tree, tree));
169 void bc_load_externaddr PROTO((rtx));
170 void bc_load_externaddr_id PROTO((tree, int));
171 void bc_load_localaddr PROTO((rtx));
172 void bc_load_parmaddr PROTO((rtx));
173 static void preexpand_calls PROTO((tree));
174 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
175 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
176 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* MOVE_RATIO is the number of move instructions that is better than
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 /* A value of around 6 would minimize code size; infinity would minimize
198 #define MOVE_RATIO 15
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS 0
211 /* Register mappings for target machines without register windows. */
212 #ifndef INCOMING_REGNO
213 #define INCOMING_REGNO(OUT) (OUT)
215 #ifndef OUTGOING_REGNO
216 #define OUTGOING_REGNO(IN) (IN)
219 /* Maps used to convert modes to const, load, and store bytecodes. */
220 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
224 /* Initialize maps used to convert modes to const, load, and store
227 bc_init_mode_to_opcode_maps ()
231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
236 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
241 #include "modemap.def"
245 /* This is run once per compilation to set up which modes can be used
246 directly in memory and to initialize the block move optab. */
252 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 if (! HARD_REGNO_MODE_OK (regno, mode))
285 reg = gen_rtx (REG, mode, regno);
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
312 /* This is run at the start of compiling a function. */
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
323 apply_args_value = 0;
327 /* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
334 /* Instead of saving the postincrement queue, empty it. */
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
341 p->apply_args_value = apply_args_value;
342 p->forced_labels = forced_labels;
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
348 apply_args_value = 0;
352 /* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
356 restore_expr_status (p)
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
363 apply_args_value = p->apply_args_value;
364 forced_labels = p->forced_labels;
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 static rtx pending_chain;
372 /* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
380 enqueue_insn (var, body)
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
384 var, NULL_RTX, NULL_RTX, body, pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
448 else if (code == PLUS || code == MULT)
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 register enum rtx_code code = GET_CODE (x);
491 return queued_subexp_p (XEXP (x, 0));
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
501 /* Perform all the pending incrementations. */
507 while (p = pending_chain)
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
521 /* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
527 convert_move (to, from, unsignedp)
528 register rtx to, from;
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
544 if (to_real != from_real)
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
563 emit_move_insn (to, from);
571 #ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
578 #ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
585 #ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
592 #ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
599 #ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
607 #ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
614 #ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
621 #ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
628 #ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
636 #ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
643 #ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
650 #ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
657 #ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
664 #ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
693 #ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
700 #ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
707 #ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
714 #ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
721 #ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
728 #ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
795 libcall = truncdfsf2_libfunc;
799 libcall = extenddfxf2_libfunc;
803 libcall = extenddftf2_libfunc;
812 libcall = truncxfsf2_libfunc;
816 libcall = truncxfdf2_libfunc;
825 libcall = trunctfsf2_libfunc;
829 libcall = trunctfdf2_libfunc;
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
841 emit_move_insn (to, value);
845 /* Now both modes are integers. */
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
869 emit_unop_insn (code, to, from, equiv_code);
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
885 /* No special multiword conversion insn; do it by hand. */
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
892 lowpart_mode = from_mode;
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
899 /* Compute the value to put in each remaining word. */
901 fill_value = const0_rtx;
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
938 insns = get_insns ();
941 emit_no_conflict_block (insns, to, from, NULL_RTX,
942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
957 convert_move (to, gen_lowpart (word_mode, from), 0);
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
967 #ifdef HAVE_truncsipsi
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
973 #endif /* HAVE_truncsipsi */
977 if (from_mode == PSImode)
979 if (to_mode != SImode)
981 from = convert_to_mode (SImode, from, unsignedp);
986 #ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
992 #endif /* HAVE_extendpsisi */
997 /* Now follow all the conversions between integers
998 no more than a word long. */
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1003 GET_MODE_BITSIZE (from_mode)))
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1016 /* Handle extension. */
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
1029 emit_unop_insn (code, to, from, equiv_code);
1034 enum machine_mode intermediate;
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1049 /* No suitable intermediate mode. */
1054 /* Support special truncate insns for certain modes. */
1056 if (from_mode == DImode && to_mode == SImode)
1058 #ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1069 if (from_mode == DImode && to_mode == HImode)
1071 #ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1082 if (from_mode == DImode && to_mode == QImode)
1084 #ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1095 if (from_mode == SImode && to_mode == HImode)
1097 #ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1108 if (from_mode == SImode && to_mode == QImode)
1110 #ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 if (from_mode == HImode && to_mode == QImode)
1123 #ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1144 /* Mode combination is not recognized. */
1148 /* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
1153 or by copying to a new temporary with conversion.
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
1159 convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1167 /* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1181 convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1199 if (mode == oldmode)
1202 /* There is one case that we must handle specially: If we are converting
1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1220 || (GET_MODE_CLASS (mode) == MODE_INT
1221 && GET_MODE_CLASS (oldmode) == MODE_INT
1222 && (GET_CODE (x) == CONST_DOUBLE
1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
1226 || GET_CODE (x) == REG)))))
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1244 return GEN_INT (val);
1247 return gen_lowpart (mode, x);
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1255 /* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1262 move_by_pieces (to, from, len, align)
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1268 int max_size = MOVE_MAX + 1;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1296 #ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1304 #ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314 #ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1319 data.explicit_inc_to = -1;
1322 #ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1325 data.to_addr = copy_addr_to_reg (to_addr);
1327 data.explicit_inc_to = 1;
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1341 while (max_size > 1)
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
1351 if (mode == VOIDmode)
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1360 max_size = GET_MODE_SIZE (mode);
1363 /* The code above should have handled everything. */
1368 /* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1372 move_by_pieces_ninsns (l, align)
1376 register int n_insns = 0;
1377 int max_size = MOVE_MAX + 1;
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1383 while (max_size > 1)
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
1393 if (mode == VOIDmode)
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1402 max_size = GET_MODE_SIZE (mode);
1408 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1413 move_by_pieces_1 (genfun, mode, data)
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1421 while (data->len >= size)
1423 if (data->reverse) data->offset -= size;
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1435 #ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1438 if (data->explicit_inc_from < 0)
1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1442 emit_insn ((*genfun) (to1, from1));
1443 #ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1446 if (data->explicit_inc_from > 0)
1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1450 if (! data->reverse) data->offset += size;
1456 /* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1467 emit_block_move (x, y, size, align)
1472 if (GET_MODE (x) != BLKmode)
1475 if (GET_MODE (y) != BLKmode)
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
1480 size = protect_from_queue (size, 0);
1482 if (GET_CODE (x) != MEM)
1484 if (GET_CODE (y) != MEM)
1489 if (GET_CODE (size) == CONST_INT
1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1491 move_by_pieces (x, y, INTVAL (size), align);
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
1498 rtx opalign = GEN_INT (align);
1499 enum machine_mode mode;
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
1504 enum insn_code code = movstr_optab[(int) mode];
1506 if (code != CODE_FOR_nothing
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
1509 returned by the macro, it will definitely be less than the
1510 actual mode mask. */
1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1521 rtx last = get_last_insn ();
1524 op2 = convert_to_mode (mode, size, 1);
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1527 op2 = copy_to_mode_reg (mode, op2);
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1536 delete_insns_since (last);
1540 #ifdef TARGET_MEM_FUNCTIONS
1541 emit_library_call (memcpy_libfunc, 0,
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
1548 emit_library_call (bcopy_libfunc, 0,
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
1558 /* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1562 move_block_to_reg (regno, x, nregs, mode)
1566 enum machine_mode mode;
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1574 /* See if the machine can do this with a load multiple insn. */
1575 #ifdef HAVE_load_multiple
1576 if (HAVE_load_multiple)
1578 last = get_last_insn ();
1579 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1587 delete_insns_since (last);
1591 for (i = 0; i < nregs; i++)
1592 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1593 operand_subword_force (x, i, mode));
1596 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1597 The number of registers to be filled is NREGS. SIZE indicates the number
1598 of bytes in the object X. */
1602 move_block_from_reg (regno, x, nregs, size)
1611 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1612 to the left before storing to memory. */
1613 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1615 rtx tem = operand_subword (x, 0, 1, BLKmode);
1621 shift = expand_shift (LSHIFT_EXPR, word_mode,
1622 gen_rtx (REG, word_mode, regno),
1623 build_int_2 ((UNITS_PER_WORD - size)
1624 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1625 emit_move_insn (tem, shift);
1629 /* See if the machine can do this with a store multiple insn. */
1630 #ifdef HAVE_store_multiple
1631 if (HAVE_store_multiple)
1633 last = get_last_insn ();
1634 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1642 delete_insns_since (last);
1646 for (i = 0; i < nregs; i++)
1648 rtx tem = operand_subword (x, i, 1, BLKmode);
1653 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1657 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1660 use_regs (regno, nregs)
1666 for (i = 0; i < nregs; i++)
1667 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1670 /* Mark the instructions since PREV as a libcall block.
1671 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1680 /* Find the instructions to mark */
1682 insn_first = NEXT_INSN (prev);
1684 insn_first = get_insns ();
1686 insn_last = get_last_insn ();
1688 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1689 REG_NOTES (insn_last));
1691 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1692 REG_NOTES (insn_first));
1695 /* Write zeros through the storage of OBJECT.
1696 If OBJECT has BLKmode, SIZE is its length in bytes. */
1699 clear_storage (object, size)
1703 if (GET_MODE (object) == BLKmode)
1705 #ifdef TARGET_MEM_FUNCTIONS
1706 emit_library_call (memset_libfunc, 0,
1708 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1709 GEN_INT (size), Pmode);
1711 emit_library_call (bzero_libfunc, 0,
1713 XEXP (object, 0), Pmode,
1714 GEN_INT (size), Pmode);
1718 emit_move_insn (object, const0_rtx);
1721 /* Generate code to copy Y into X.
1722 Both Y and X must have the same mode, except that
1723 Y can be a constant with VOIDmode.
1724 This mode cannot be BLKmode; use emit_block_move for that.
1726 Return the last instruction emitted. */
1729 emit_move_insn (x, y)
1732 enum machine_mode mode = GET_MODE (x);
1733 enum machine_mode submode;
1734 enum mode_class class = GET_MODE_CLASS (mode);
1737 x = protect_from_queue (x, 1);
1738 y = protect_from_queue (y, 0);
1740 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1743 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1744 y = force_const_mem (mode, y);
1746 /* If X or Y are memory references, verify that their addresses are valid
1748 if (GET_CODE (x) == MEM
1749 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1750 && ! push_operand (x, GET_MODE (x)))
1752 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1753 x = change_address (x, VOIDmode, XEXP (x, 0));
1755 if (GET_CODE (y) == MEM
1756 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1758 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1759 y = change_address (y, VOIDmode, XEXP (y, 0));
1761 if (mode == BLKmode)
1764 return emit_move_insn_1 (x, y);
1767 /* Low level part of emit_move_insn.
1768 Called just like emit_move_insn, but assumes X and Y
1769 are basically valid. */
1772 emit_move_insn_1 (x, y)
1775 enum machine_mode mode = GET_MODE (x);
1776 enum machine_mode submode;
1777 enum mode_class class = GET_MODE_CLASS (mode);
1780 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1781 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1782 (class == MODE_COMPLEX_INT
1783 ? MODE_INT : MODE_FLOAT),
1786 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1788 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1790 /* Expand complex moves by moving real part and imag part, if possible. */
1791 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1792 && submode != BLKmode
1793 && (mov_optab->handlers[(int) submode].insn_code
1794 != CODE_FOR_nothing))
1796 /* Don't split destination if it is a stack push. */
1797 int stack = push_operand (x, GET_MODE (x));
1798 rtx prev = get_last_insn ();
1800 /* Tell flow that the whole of the destination is being set. */
1801 if (GET_CODE (x) == REG)
1802 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1804 /* If this is a stack, push the highpart first, so it
1805 will be in the argument order.
1807 In that case, change_address is used only to convert
1808 the mode, not to change the address. */
1811 /* Note that the real part always precedes the imag part in memory
1812 regardless of machine's endianness. */
1813 #ifdef STACK_GROWS_DOWNWARD
1814 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1815 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1816 gen_imagpart (submode, y)));
1817 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1818 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1819 gen_realpart (submode, y)));
1821 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1822 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1823 gen_realpart (submode, y)));
1824 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1825 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1826 gen_imagpart (submode, y)));
1831 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1832 (gen_highpart (submode, x), gen_highpart (submode, y)));
1833 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1834 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1837 if (GET_CODE (x) != CONCAT)
1838 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1839 each with a separate pseudo as destination.
1840 It's not correct for flow to treat them as a unit. */
1843 return get_last_insn ();
1846 /* This will handle any multi-word mode that lacks a move_insn pattern.
1847 However, you will get better code if you define such patterns,
1848 even if they must turn into multiple assembler instructions. */
1849 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1852 rtx prev_insn = get_last_insn ();
1855 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1858 rtx xpart = operand_subword (x, i, 1, mode);
1859 rtx ypart = operand_subword (y, i, 1, mode);
1861 /* If we can't get a part of Y, put Y into memory if it is a
1862 constant. Otherwise, force it into a register. If we still
1863 can't get a part of Y, abort. */
1864 if (ypart == 0 && CONSTANT_P (y))
1866 y = force_const_mem (mode, y);
1867 ypart = operand_subword (y, i, 1, mode);
1869 else if (ypart == 0)
1870 ypart = operand_subword_force (y, i, mode);
1872 if (xpart == 0 || ypart == 0)
1875 last_insn = emit_move_insn (xpart, ypart);
1877 /* Mark these insns as a libcall block. */
1878 group_insns (prev_insn);
1886 /* Pushing data onto the stack. */
1888 /* Push a block of length SIZE (perhaps variable)
1889 and return an rtx to address the beginning of the block.
1890 Note that it is not possible for the value returned to be a QUEUED.
1891 The value may be virtual_outgoing_args_rtx.
1893 EXTRA is the number of bytes of padding to push in addition to SIZE.
1894 BELOW nonzero means this padding comes at low addresses;
1895 otherwise, the padding comes at high addresses. */
1898 push_block (size, extra, below)
1903 if (CONSTANT_P (size))
1904 anti_adjust_stack (plus_constant (size, extra));
1905 else if (GET_CODE (size) == REG && extra == 0)
1906 anti_adjust_stack (size);
1909 rtx temp = copy_to_mode_reg (Pmode, size);
1911 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1912 temp, 0, OPTAB_LIB_WIDEN);
1913 anti_adjust_stack (temp);
1916 #ifdef STACK_GROWS_DOWNWARD
1917 temp = virtual_outgoing_args_rtx;
1918 if (extra != 0 && below)
1919 temp = plus_constant (temp, extra);
1921 if (GET_CODE (size) == CONST_INT)
1922 temp = plus_constant (virtual_outgoing_args_rtx,
1923 - INTVAL (size) - (below ? 0 : extra));
1924 else if (extra != 0 && !below)
1925 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1926 negate_rtx (Pmode, plus_constant (size, extra)));
1928 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1929 negate_rtx (Pmode, size));
1932 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1938 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1941 /* Generate code to push X onto the stack, assuming it has mode MODE and
1943 MODE is redundant except when X is a CONST_INT (since they don't
1945 SIZE is an rtx for the size of data to be copied (in bytes),
1946 needed only if X is BLKmode.
1948 ALIGN (in bytes) is maximum alignment we can assume.
1950 If PARTIAL and REG are both nonzero, then copy that many of the first
1951 words of X into registers starting with REG, and push the rest of X.
1952 The amount of space pushed is decreased by PARTIAL words,
1953 rounded *down* to a multiple of PARM_BOUNDARY.
1954 REG must be a hard register in this case.
1955 If REG is zero but PARTIAL is not, take any all others actions for an
1956 argument partially in registers, but do not actually load any
1959 EXTRA is the amount in bytes of extra space to leave next to this arg.
1960 This is ignored if an argument block has already been allocated.
1962 On a machine that lacks real push insns, ARGS_ADDR is the address of
1963 the bottom of the argument block for this call. We use indexing off there
1964 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1965 argument block has not been preallocated.
1967 ARGS_SO_FAR is the size of args previously pushed for this call. */
1970 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1971 args_addr, args_so_far)
1973 enum machine_mode mode;
1984 enum direction stack_direction
1985 #ifdef STACK_GROWS_DOWNWARD
1991 /* Decide where to pad the argument: `downward' for below,
1992 `upward' for above, or `none' for don't pad it.
1993 Default is below for small data on big-endian machines; else above. */
1994 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1996 /* Invert direction if stack is post-update. */
1997 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1998 if (where_pad != none)
1999 where_pad = (where_pad == downward ? upward : downward);
2001 xinner = x = protect_from_queue (x, 0);
2003 if (mode == BLKmode)
2005 /* Copy a block into the stack, entirely or partially. */
2008 int used = partial * UNITS_PER_WORD;
2009 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2017 /* USED is now the # of bytes we need not copy to the stack
2018 because registers will take care of them. */
2021 xinner = change_address (xinner, BLKmode,
2022 plus_constant (XEXP (xinner, 0), used));
2024 /* If the partial register-part of the arg counts in its stack size,
2025 skip the part of stack space corresponding to the registers.
2026 Otherwise, start copying to the beginning of the stack space,
2027 by setting SKIP to 0. */
2028 #ifndef REG_PARM_STACK_SPACE
2034 #ifdef PUSH_ROUNDING
2035 /* Do it with several push insns if that doesn't take lots of insns
2036 and if there is no difficulty with push insns that skip bytes
2037 on the stack for alignment purposes. */
2039 && GET_CODE (size) == CONST_INT
2041 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2043 /* Here we avoid the case of a structure whose weak alignment
2044 forces many pushes of a small amount of data,
2045 and such small pushes do rounding that causes trouble. */
2046 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2047 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2048 || PUSH_ROUNDING (align) == align)
2049 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2051 /* Push padding now if padding above and stack grows down,
2052 or if padding below and stack grows up.
2053 But if space already allocated, this has already been done. */
2054 if (extra && args_addr == 0
2055 && where_pad != none && where_pad != stack_direction)
2056 anti_adjust_stack (GEN_INT (extra));
2058 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2059 INTVAL (size) - used, align);
2062 #endif /* PUSH_ROUNDING */
2064 /* Otherwise make space on the stack and copy the data
2065 to the address of that space. */
2067 /* Deduct words put into registers from the size we must copy. */
2070 if (GET_CODE (size) == CONST_INT)
2071 size = GEN_INT (INTVAL (size) - used);
2073 size = expand_binop (GET_MODE (size), sub_optab, size,
2074 GEN_INT (used), NULL_RTX, 0,
2078 /* Get the address of the stack space.
2079 In this case, we do not deal with EXTRA separately.
2080 A single stack adjust will do. */
2083 temp = push_block (size, extra, where_pad == downward);
2086 else if (GET_CODE (args_so_far) == CONST_INT)
2087 temp = memory_address (BLKmode,
2088 plus_constant (args_addr,
2089 skip + INTVAL (args_so_far)));
2091 temp = memory_address (BLKmode,
2092 plus_constant (gen_rtx (PLUS, Pmode,
2093 args_addr, args_so_far),
2096 /* TEMP is the address of the block. Copy the data there. */
2097 if (GET_CODE (size) == CONST_INT
2098 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2101 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2102 INTVAL (size), align);
2105 /* Try the most limited insn first, because there's no point
2106 including more than one in the machine description unless
2107 the more limited one has some advantage. */
2108 #ifdef HAVE_movstrqi
2110 && GET_CODE (size) == CONST_INT
2111 && ((unsigned) INTVAL (size)
2112 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2114 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2115 xinner, size, GEN_INT (align));
2123 #ifdef HAVE_movstrhi
2125 && GET_CODE (size) == CONST_INT
2126 && ((unsigned) INTVAL (size)
2127 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2129 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2130 xinner, size, GEN_INT (align));
2138 #ifdef HAVE_movstrsi
2141 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2142 xinner, size, GEN_INT (align));
2150 #ifdef HAVE_movstrdi
2153 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2154 xinner, size, GEN_INT (align));
2163 #ifndef ACCUMULATE_OUTGOING_ARGS
2164 /* If the source is referenced relative to the stack pointer,
2165 copy it to another register to stabilize it. We do not need
2166 to do this if we know that we won't be changing sp. */
2168 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2169 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2170 temp = copy_to_reg (temp);
2173 /* Make inhibit_defer_pop nonzero around the library call
2174 to force it to pop the bcopy-arguments right away. */
2176 #ifdef TARGET_MEM_FUNCTIONS
2177 emit_library_call (memcpy_libfunc, 0,
2178 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
2181 TYPE_MODE (sizetype));
2183 emit_library_call (bcopy_libfunc, 0,
2184 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2185 convert_to_mode (TYPE_MODE (sizetype),
2186 size, TREE_UNSIGNED (sizetype)),
2187 TYPE_MODE (sizetype));
2192 else if (partial > 0)
2194 /* Scalar partly in registers. */
2196 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2199 /* # words of start of argument
2200 that we must make space for but need not store. */
2201 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2202 int args_offset = INTVAL (args_so_far);
2205 /* Push padding now if padding above and stack grows down,
2206 or if padding below and stack grows up.
2207 But if space already allocated, this has already been done. */
2208 if (extra && args_addr == 0
2209 && where_pad != none && where_pad != stack_direction)
2210 anti_adjust_stack (GEN_INT (extra));
2212 /* If we make space by pushing it, we might as well push
2213 the real data. Otherwise, we can leave OFFSET nonzero
2214 and leave the space uninitialized. */
2218 /* Now NOT_STACK gets the number of words that we don't need to
2219 allocate on the stack. */
2220 not_stack = partial - offset;
2222 /* If the partial register-part of the arg counts in its stack size,
2223 skip the part of stack space corresponding to the registers.
2224 Otherwise, start copying to the beginning of the stack space,
2225 by setting SKIP to 0. */
2226 #ifndef REG_PARM_STACK_SPACE
2232 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2233 x = validize_mem (force_const_mem (mode, x));
2235 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2236 SUBREGs of such registers are not allowed. */
2237 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2238 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2239 x = copy_to_reg (x);
2241 /* Loop over all the words allocated on the stack for this arg. */
2242 /* We can do it by words, because any scalar bigger than a word
2243 has a size a multiple of a word. */
2244 #ifndef PUSH_ARGS_REVERSED
2245 for (i = not_stack; i < size; i++)
2247 for (i = size - 1; i >= not_stack; i--)
2249 if (i >= not_stack + offset)
2250 emit_push_insn (operand_subword_force (x, i, mode),
2251 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2253 GEN_INT (args_offset + ((i - not_stack + skip)
2254 * UNITS_PER_WORD)));
2260 /* Push padding now if padding above and stack grows down,
2261 or if padding below and stack grows up.
2262 But if space already allocated, this has already been done. */
2263 if (extra && args_addr == 0
2264 && where_pad != none && where_pad != stack_direction)
2265 anti_adjust_stack (GEN_INT (extra));
2267 #ifdef PUSH_ROUNDING
2269 addr = gen_push_operand ();
2272 if (GET_CODE (args_so_far) == CONST_INT)
2274 = memory_address (mode,
2275 plus_constant (args_addr, INTVAL (args_so_far)));
2277 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2280 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2284 /* If part should go in registers, copy that part
2285 into the appropriate registers. Do this now, at the end,
2286 since mem-to-mem copies above may do function calls. */
2287 if (partial > 0 && reg != 0)
2288 move_block_to_reg (REGNO (reg), x, partial, mode);
2290 if (extra && args_addr == 0 && where_pad == stack_direction)
2291 anti_adjust_stack (GEN_INT (extra));
2294 /* Expand an assignment that stores the value of FROM into TO.
2295 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2296 (This may contain a QUEUED rtx;
2297 if the value is constant, this rtx is a constant.)
2298 Otherwise, the returned value is NULL_RTX.
2300 SUGGEST_REG is no longer actually used.
2301 It used to mean, copy the value through a register
2302 and return that register, if that is possible.
2303 We now use WANT_VALUE to decide whether to do this. */
2306 expand_assignment (to, from, want_value, suggest_reg)
2311 register rtx to_rtx = 0;
2314 /* Don't crash if the lhs of the assignment was erroneous. */
2316 if (TREE_CODE (to) == ERROR_MARK)
2318 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2319 return want_value ? result : NULL_RTX;
2322 if (output_bytecode)
2324 tree dest_innermost;
2326 bc_expand_expr (from);
2327 bc_emit_instruction (duplicate);
2329 dest_innermost = bc_expand_address (to);
2331 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2332 take care of it here. */
2334 bc_store_memory (TREE_TYPE (to), dest_innermost);
2338 /* Assignment of a structure component needs special treatment
2339 if the structure component's rtx is not simply a MEM.
2340 Assignment of an array element at a constant index
2341 has the same problem. */
2343 if (TREE_CODE (to) == COMPONENT_REF
2344 || TREE_CODE (to) == BIT_FIELD_REF
2345 || (TREE_CODE (to) == ARRAY_REF
2346 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2347 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2349 enum machine_mode mode1;
2359 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2360 &mode1, &unsignedp, &volatilep);
2362 /* If we are going to use store_bit_field and extract_bit_field,
2363 make sure to_rtx will be safe for multiple use. */
2365 if (mode1 == VOIDmode && want_value)
2366 tem = stabilize_reference (tem);
2368 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2369 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2372 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2374 if (GET_CODE (to_rtx) != MEM)
2376 to_rtx = change_address (to_rtx, VOIDmode,
2377 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2378 force_reg (Pmode, offset_rtx)));
2379 /* If we have a variable offset, the known alignment
2380 is only that of the innermost structure containing the field.
2381 (Actually, we could sometimes do better by using the
2382 align of an element of the innermost array, but no need.) */
2383 if (TREE_CODE (to) == COMPONENT_REF
2384 || TREE_CODE (to) == BIT_FIELD_REF)
2386 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2390 if (GET_CODE (to_rtx) == MEM)
2391 MEM_VOLATILE_P (to_rtx) = 1;
2392 #if 0 /* This was turned off because, when a field is volatile
2393 in an object which is not volatile, the object may be in a register,
2394 and then we would abort over here. */
2400 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2402 /* Spurious cast makes HPUX compiler happy. */
2403 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2406 /* Required alignment of containing datum. */
2408 int_size_in_bytes (TREE_TYPE (tem)));
2409 preserve_temp_slots (result);
2413 /* If the value is meaningful, convert RESULT to the proper mode.
2414 Otherwise, return nothing. */
2415 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2416 TYPE_MODE (TREE_TYPE (from)),
2418 TREE_UNSIGNED (TREE_TYPE (to)))
2422 /* If the rhs is a function call and its value is not an aggregate,
2423 call the function before we start to compute the lhs.
2424 This is needed for correct code for cases such as
2425 val = setjmp (buf) on machines where reference to val
2426 requires loading up part of an address in a separate insn.
2428 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2429 a promoted variable where the zero- or sign- extension needs to be done.
2430 Handling this in the normal way is safe because no computation is done
2432 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2433 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2438 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2440 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2441 emit_move_insn (to_rtx, value);
2442 preserve_temp_slots (to_rtx);
2445 return want_value ? to_rtx : NULL_RTX;
2448 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2449 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2452 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2454 /* Don't move directly into a return register. */
2455 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2460 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2461 emit_move_insn (to_rtx, temp);
2462 preserve_temp_slots (to_rtx);
2465 return want_value ? to_rtx : NULL_RTX;
2468 /* In case we are returning the contents of an object which overlaps
2469 the place the value is being stored, use a safe function when copying
2470 a value through a pointer into a structure value return block. */
2471 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2472 && current_function_returns_struct
2473 && !current_function_returns_pcc_struct)
2478 size = expr_size (from);
2479 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2481 #ifdef TARGET_MEM_FUNCTIONS
2482 emit_library_call (memcpy_libfunc, 0,
2483 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2484 XEXP (from_rtx, 0), Pmode,
2485 convert_to_mode (TYPE_MODE (sizetype),
2486 size, TREE_UNSIGNED (sizetype)),
2487 TYPE_MODE (sizetype));
2489 emit_library_call (bcopy_libfunc, 0,
2490 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2491 XEXP (to_rtx, 0), Pmode,
2492 convert_to_mode (TYPE_MODE (sizetype),
2493 size, TREE_UNSIGNED (sizetype)),
2494 TYPE_MODE (sizetype));
2497 preserve_temp_slots (to_rtx);
2500 return want_value ? to_rtx : NULL_RTX;
2503 /* Compute FROM and store the value in the rtx we got. */
2506 result = store_expr (from, to_rtx, want_value);
2507 preserve_temp_slots (result);
2510 return want_value ? result : NULL_RTX;
2513 /* Generate code for computing expression EXP,
2514 and storing the value into TARGET.
2515 TARGET may contain a QUEUED rtx.
2517 If WANT_VALUE is nonzero, return a copy of the value
2518 not in TARGET, so that we can be sure to use the proper
2519 value in a containing expression even if TARGET has something
2520 else stored in it. If possible, we copy the value through a pseudo
2521 and return that pseudo. Or, if the value is constant, we try to
2522 return the constant. In some cases, we return a pseudo
2523 copied *from* TARGET.
2525 If the mode is BLKmode then we may return TARGET itself.
2526 It turns out that in BLKmode it doesn't cause a problem.
2527 because C has no operators that could combine two different
2528 assignments into the same BLKmode object with different values
2529 with no sequence point. Will other languages need this to
2532 If WANT_VALUE is 0, we return NULL, to make sure
2533 to catch quickly any cases where the caller uses the value
2534 and fails to set WANT_VALUE. */
2537 store_expr (exp, target, want_value)
2539 register rtx target;
2543 int dont_return_target = 0;
2545 if (TREE_CODE (exp) == COMPOUND_EXPR)
2547 /* Perform first part of compound expression, then assign from second
2549 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2551 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2553 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2555 /* For conditional expression, get safe form of the target. Then
2556 test the condition, doing the appropriate assignment on either
2557 side. This avoids the creation of unnecessary temporaries.
2558 For non-BLKmode, it is more efficient not to do this. */
2560 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2563 target = protect_from_queue (target, 1);
2566 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2567 store_expr (TREE_OPERAND (exp, 1), target, 0);
2569 emit_jump_insn (gen_jump (lab2));
2572 store_expr (TREE_OPERAND (exp, 2), target, 0);
2576 return want_value ? target : NULL_RTX;
2578 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2579 && GET_MODE (target) != BLKmode)
2580 /* If target is in memory and caller wants value in a register instead,
2581 arrange that. Pass TARGET as target for expand_expr so that,
2582 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2583 We know expand_expr will not use the target in that case.
2584 Don't do this if TARGET is volatile because we are supposed
2585 to write it and then read it. */
2587 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2588 GET_MODE (target), 0);
2589 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2590 temp = copy_to_reg (temp);
2591 dont_return_target = 1;
2593 else if (queued_subexp_p (target))
2594 /* If target contains a postincrement, let's not risk
2595 using it as the place to generate the rhs. */
2597 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2599 /* Expand EXP into a new pseudo. */
2600 temp = gen_reg_rtx (GET_MODE (target));
2601 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2604 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2606 /* If target is volatile, ANSI requires accessing the value
2607 *from* the target, if it is accessed. So make that happen.
2608 In no case return the target itself. */
2609 if (! MEM_VOLATILE_P (target) && want_value)
2610 dont_return_target = 1;
2612 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2613 /* If this is an scalar in a register that is stored in a wider mode
2614 than the declared mode, compute the result into its declared mode
2615 and then convert to the wider mode. Our value is the computed
2618 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2620 /* If TEMP is a VOIDmode constant, use convert_modes to make
2621 sure that we properly convert it. */
2622 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2623 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2624 TYPE_MODE (TREE_TYPE (exp)), temp,
2625 SUBREG_PROMOTED_UNSIGNED_P (target));
2627 convert_move (SUBREG_REG (target), temp,
2628 SUBREG_PROMOTED_UNSIGNED_P (target));
2629 return want_value ? temp : NULL_RTX;
2633 temp = expand_expr (exp, target, GET_MODE (target), 0);
2634 /* DO return TARGET if it's a specified hardware register.
2635 expand_return relies on this.
2636 If TARGET is a volatile mem ref, either return TARGET
2637 or return a reg copied *from* TARGET; ANSI requires this.
2639 Otherwise, if TEMP is not TARGET, return TEMP
2640 if it is constant (for efficiency),
2641 or if we really want the correct value. */
2642 if (!(target && GET_CODE (target) == REG
2643 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2644 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2646 && (CONSTANT_P (temp) || want_value))
2647 dont_return_target = 1;
2650 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2651 the same as that of TARGET, adjust the constant. This is needed, for
2652 example, in case it is a CONST_DOUBLE and we want only a word-sized
2654 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2655 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2656 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2657 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2659 /* If value was not generated in the target, store it there.
2660 Convert the value to TARGET's type first if nec. */
2662 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2664 target = protect_from_queue (target, 1);
2665 if (GET_MODE (temp) != GET_MODE (target)
2666 && GET_MODE (temp) != VOIDmode)
2668 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2669 if (dont_return_target)
2671 /* In this case, we will return TEMP,
2672 so make sure it has the proper mode.
2673 But don't forget to store the value into TARGET. */
2674 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2675 emit_move_insn (target, temp);
2678 convert_move (target, temp, unsignedp);
2681 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2683 /* Handle copying a string constant into an array.
2684 The string constant may be shorter than the array.
2685 So copy just the string's actual length, and clear the rest. */
2688 /* Get the size of the data type of the string,
2689 which is actually the size of the target. */
2690 size = expr_size (exp);
2691 if (GET_CODE (size) == CONST_INT
2692 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2693 emit_block_move (target, temp, size,
2694 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2697 /* Compute the size of the data to copy from the string. */
2699 = size_binop (MIN_EXPR,
2700 make_tree (sizetype, size),
2702 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2703 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2707 /* Copy that much. */
2708 emit_block_move (target, temp, copy_size_rtx,
2709 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2711 /* Figure out how much is left in TARGET
2712 that we have to clear. */
2713 if (GET_CODE (copy_size_rtx) == CONST_INT)
2715 temp = plus_constant (XEXP (target, 0),
2716 TREE_STRING_LENGTH (exp));
2717 size = plus_constant (size,
2718 - TREE_STRING_LENGTH (exp));
2722 enum machine_mode size_mode = Pmode;
2724 temp = force_reg (Pmode, XEXP (target, 0));
2725 temp = expand_binop (size_mode, add_optab, temp,
2726 copy_size_rtx, NULL_RTX, 0,
2729 size = expand_binop (size_mode, sub_optab, size,
2730 copy_size_rtx, NULL_RTX, 0,
2733 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2734 GET_MODE (size), 0, 0);
2735 label = gen_label_rtx ();
2736 emit_jump_insn (gen_blt (label));
2739 if (size != const0_rtx)
2741 #ifdef TARGET_MEM_FUNCTIONS
2742 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2743 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2745 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2746 temp, Pmode, size, Pmode);
2753 else if (GET_MODE (temp) == BLKmode)
2754 emit_block_move (target, temp, expr_size (exp),
2755 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2757 emit_move_insn (target, temp);
2760 if (dont_return_target && GET_CODE (temp) != MEM)
2762 if (want_value && GET_MODE (target) != BLKmode)
2763 return copy_to_reg (target);
2769 /* Store the value of constructor EXP into the rtx TARGET.
2770 TARGET is either a REG or a MEM. */
2773 store_constructor (exp, target)
2777 tree type = TREE_TYPE (exp);
2779 /* We know our target cannot conflict, since safe_from_p has been called. */
2781 /* Don't try copying piece by piece into a hard register
2782 since that is vulnerable to being clobbered by EXP.
2783 Instead, construct in a pseudo register and then copy it all. */
2784 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2786 rtx temp = gen_reg_rtx (GET_MODE (target));
2787 store_constructor (exp, temp);
2788 emit_move_insn (target, temp);
2793 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2794 || TREE_CODE (type) == QUAL_UNION_TYPE)
2798 /* Inform later passes that the whole union value is dead. */
2799 if (TREE_CODE (type) == UNION_TYPE
2800 || TREE_CODE (type) == QUAL_UNION_TYPE)
2801 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2803 /* If we are building a static constructor into a register,
2804 set the initial value as zero so we can fold the value into
2806 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2807 emit_move_insn (target, const0_rtx);
2809 /* If the constructor has fewer fields than the structure,
2810 clear the whole structure first. */
2811 else if (list_length (CONSTRUCTOR_ELTS (exp))
2812 != list_length (TYPE_FIELDS (type)))
2813 clear_storage (target, int_size_in_bytes (type));
2815 /* Inform later passes that the old value is dead. */
2816 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2818 /* Store each element of the constructor into
2819 the corresponding field of TARGET. */
2821 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2823 register tree field = TREE_PURPOSE (elt);
2824 register enum machine_mode mode;
2828 tree pos, constant = 0, offset = 0;
2829 rtx to_rtx = target;
2831 /* Just ignore missing fields.
2832 We cleared the whole structure, above,
2833 if any fields are missing. */
2837 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2838 unsignedp = TREE_UNSIGNED (field);
2839 mode = DECL_MODE (field);
2840 if (DECL_BIT_FIELD (field))
2843 pos = DECL_FIELD_BITPOS (field);
2844 if (TREE_CODE (pos) == INTEGER_CST)
2846 else if (TREE_CODE (pos) == PLUS_EXPR
2847 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2848 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2853 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2859 if (contains_placeholder_p (offset))
2860 offset = build (WITH_RECORD_EXPR, sizetype,
2863 offset = size_binop (FLOOR_DIV_EXPR, offset,
2864 size_int (BITS_PER_UNIT));
2866 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2867 if (GET_CODE (to_rtx) != MEM)
2871 = change_address (to_rtx, VOIDmode,
2872 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2873 force_reg (Pmode, offset_rtx)));
2876 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2877 /* The alignment of TARGET is
2878 at least what its type requires. */
2880 TYPE_ALIGN (type) / BITS_PER_UNIT,
2881 int_size_in_bytes (type));
2884 else if (TREE_CODE (type) == ARRAY_TYPE)
2888 tree domain = TYPE_DOMAIN (type);
2889 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2890 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2891 tree elttype = TREE_TYPE (type);
2893 /* If the constructor has fewer fields than the structure,
2894 clear the whole structure first. Similarly if this this is
2895 static constructor of a non-BLKmode object. */
2897 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2898 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2899 clear_storage (target, int_size_in_bytes (type));
2901 /* Inform later passes that the old value is dead. */
2902 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2904 /* Store each element of the constructor into
2905 the corresponding element of TARGET, determined
2906 by counting the elements. */
2907 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2909 elt = TREE_CHAIN (elt), i++)
2911 register enum machine_mode mode;
2915 tree index = TREE_PURPOSE (elt);
2916 rtx xtarget = target;
2918 mode = TYPE_MODE (elttype);
2919 bitsize = GET_MODE_BITSIZE (mode);
2920 unsignedp = TREE_UNSIGNED (elttype);
2922 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2924 /* We don't currently allow variable indices in a
2925 C initializer, but let's try here to support them. */
2926 rtx pos_rtx, addr, xtarget;
2929 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2930 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2931 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2932 xtarget = change_address (target, mode, addr);
2933 store_expr (TREE_VALUE (elt), xtarget, 0);
2938 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2939 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2941 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2943 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2944 /* The alignment of TARGET is
2945 at least what its type requires. */
2947 TYPE_ALIGN (type) / BITS_PER_UNIT,
2948 int_size_in_bytes (type));
2957 /* Store the value of EXP (an expression tree)
2958 into a subfield of TARGET which has mode MODE and occupies
2959 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2960 If MODE is VOIDmode, it means that we are storing into a bit-field.
2962 If VALUE_MODE is VOIDmode, return nothing in particular.
2963 UNSIGNEDP is not used in this case.
2965 Otherwise, return an rtx for the value stored. This rtx
2966 has mode VALUE_MODE if that is convenient to do.
2967 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2969 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2970 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2973 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2974 unsignedp, align, total_size)
2976 int bitsize, bitpos;
2977 enum machine_mode mode;
2979 enum machine_mode value_mode;
2984 HOST_WIDE_INT width_mask = 0;
2986 if (bitsize < HOST_BITS_PER_WIDE_INT)
2987 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2989 /* If we are storing into an unaligned field of an aligned union that is
2990 in a register, we may have the mode of TARGET being an integer mode but
2991 MODE == BLKmode. In that case, get an aligned object whose size and
2992 alignment are the same as TARGET and store TARGET into it (we can avoid
2993 the store if the field being stored is the entire width of TARGET). Then
2994 call ourselves recursively to store the field into a BLKmode version of
2995 that object. Finally, load from the object into TARGET. This is not
2996 very efficient in general, but should only be slightly more expensive
2997 than the otherwise-required unaligned accesses. Perhaps this can be
2998 cleaned up later. */
3001 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3003 rtx object = assign_stack_temp (GET_MODE (target),
3004 GET_MODE_SIZE (GET_MODE (target)), 0);
3005 rtx blk_object = copy_rtx (object);
3007 PUT_MODE (blk_object, BLKmode);
3009 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3010 emit_move_insn (object, target);
3012 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3015 /* Even though we aren't returning target, we need to
3016 give it the updated value. */
3017 emit_move_insn (target, object);
3022 /* If the structure is in a register or if the component
3023 is a bit field, we cannot use addressing to access it.
3024 Use bit-field techniques or SUBREG to store in it. */
3026 if (mode == VOIDmode
3027 || (mode != BLKmode && ! direct_store[(int) mode])
3028 || GET_CODE (target) == REG
3029 || GET_CODE (target) == SUBREG
3030 /* If the field isn't aligned enough to store as an ordinary memref,
3031 store it as a bit field. */
3032 || (STRICT_ALIGNMENT
3033 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3034 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3036 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3038 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3040 if (mode != VOIDmode && mode != BLKmode
3041 && mode != TYPE_MODE (TREE_TYPE (exp)))
3042 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3044 /* Store the value in the bitfield. */
3045 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3046 if (value_mode != VOIDmode)
3048 /* The caller wants an rtx for the value. */
3049 /* If possible, avoid refetching from the bitfield itself. */
3051 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3054 enum machine_mode tmode;
3057 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3058 tmode = GET_MODE (temp);
3059 if (tmode == VOIDmode)
3061 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3062 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3063 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3065 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3066 NULL_RTX, value_mode, 0, align,
3073 rtx addr = XEXP (target, 0);
3076 /* If a value is wanted, it must be the lhs;
3077 so make the address stable for multiple use. */
3079 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3080 && ! CONSTANT_ADDRESS_P (addr)
3081 /* A frame-pointer reference is already stable. */
3082 && ! (GET_CODE (addr) == PLUS
3083 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3084 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3085 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3086 addr = copy_to_reg (addr);
3088 /* Now build a reference to just the desired component. */
3090 to_rtx = change_address (target, mode,
3091 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3092 MEM_IN_STRUCT_P (to_rtx) = 1;
3094 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3098 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3099 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3100 ARRAY_REFs and find the ultimate containing object, which we return.
3102 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3103 bit position, and *PUNSIGNEDP to the signedness of the field.
3104 If the position of the field is variable, we store a tree
3105 giving the variable offset (in units) in *POFFSET.
3106 This offset is in addition to the bit position.
3107 If the position is not variable, we store 0 in *POFFSET.
3109 If any of the extraction expressions is volatile,
3110 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3112 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3113 is a mode that can be used to access the field. In that case, *PBITSIZE
3116 If the field describes a variable-sized object, *PMODE is set to
3117 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3118 this case, but the address of the object can be found. */
3121 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3122 punsignedp, pvolatilep)
3127 enum machine_mode *pmode;
3131 tree orig_exp = exp;
3133 enum machine_mode mode = VOIDmode;
3134 tree offset = integer_zero_node;
3136 if (TREE_CODE (exp) == COMPONENT_REF)
3138 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3139 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3140 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3141 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3143 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3145 size_tree = TREE_OPERAND (exp, 1);
3146 *punsignedp = TREE_UNSIGNED (exp);
3150 mode = TYPE_MODE (TREE_TYPE (exp));
3151 *pbitsize = GET_MODE_BITSIZE (mode);
3152 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3157 if (TREE_CODE (size_tree) != INTEGER_CST)
3158 mode = BLKmode, *pbitsize = -1;
3160 *pbitsize = TREE_INT_CST_LOW (size_tree);
3163 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3164 and find the ultimate containing object. */
3170 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3172 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3173 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3174 : TREE_OPERAND (exp, 2));
3176 /* If this field hasn't been filled in yet, don't go
3177 past it. This should only happen when folding expressions
3178 made during type construction. */
3182 if (TREE_CODE (pos) == PLUS_EXPR)
3185 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3187 constant = TREE_OPERAND (pos, 0);
3188 var = TREE_OPERAND (pos, 1);
3190 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3192 constant = TREE_OPERAND (pos, 1);
3193 var = TREE_OPERAND (pos, 0);
3198 *pbitpos += TREE_INT_CST_LOW (constant);
3199 offset = size_binop (PLUS_EXPR, offset,
3200 size_binop (FLOOR_DIV_EXPR, var,
3201 size_int (BITS_PER_UNIT)));
3203 else if (TREE_CODE (pos) == INTEGER_CST)
3204 *pbitpos += TREE_INT_CST_LOW (pos);
3207 /* Assume here that the offset is a multiple of a unit.
3208 If not, there should be an explicitly added constant. */
3209 offset = size_binop (PLUS_EXPR, offset,
3210 size_binop (FLOOR_DIV_EXPR, pos,
3211 size_int (BITS_PER_UNIT)));
3215 else if (TREE_CODE (exp) == ARRAY_REF)
3217 /* This code is based on the code in case ARRAY_REF in expand_expr
3218 below. We assume here that the size of an array element is
3219 always an integral multiple of BITS_PER_UNIT. */
3221 tree index = TREE_OPERAND (exp, 1);
3222 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3224 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3225 tree index_type = TREE_TYPE (index);
3227 if (! integer_zerop (low_bound))
3228 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3230 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3232 index = convert (type_for_size (POINTER_SIZE, 0), index);
3233 index_type = TREE_TYPE (index);
3236 index = fold (build (MULT_EXPR, index_type, index,
3237 TYPE_SIZE (TREE_TYPE (exp))));
3239 if (TREE_CODE (index) == INTEGER_CST
3240 && TREE_INT_CST_HIGH (index) == 0)
3241 *pbitpos += TREE_INT_CST_LOW (index);
3243 offset = size_binop (PLUS_EXPR, offset,
3244 size_binop (FLOOR_DIV_EXPR, index,
3245 size_int (BITS_PER_UNIT)));
3247 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3248 && ! ((TREE_CODE (exp) == NOP_EXPR
3249 || TREE_CODE (exp) == CONVERT_EXPR)
3250 && (TYPE_MODE (TREE_TYPE (exp))
3251 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3254 /* If any reference in the chain is volatile, the effect is volatile. */
3255 if (TREE_THIS_VOLATILE (exp))
3257 exp = TREE_OPERAND (exp, 0);
3260 /* If this was a bit-field, see if there is a mode that allows direct
3261 access in case EXP is in memory. */
3262 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3264 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3265 if (mode == BLKmode)
3269 if (integer_zerop (offset))
3272 if (offset != 0 && contains_placeholder_p (offset))
3273 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3280 /* Given an rtx VALUE that may contain additions and multiplications,
3281 return an equivalent value that just refers to a register or memory.
3282 This is done by generating instructions to perform the arithmetic
3283 and returning a pseudo-register containing the value.
3285 The returned value may be a REG, SUBREG, MEM or constant. */
3288 force_operand (value, target)
3291 register optab binoptab = 0;
3292 /* Use a temporary to force order of execution of calls to
3296 /* Use subtarget as the target for operand 0 of a binary operation. */
3297 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3299 if (GET_CODE (value) == PLUS)
3300 binoptab = add_optab;
3301 else if (GET_CODE (value) == MINUS)
3302 binoptab = sub_optab;
3303 else if (GET_CODE (value) == MULT)
3305 op2 = XEXP (value, 1);
3306 if (!CONSTANT_P (op2)
3307 && !(GET_CODE (op2) == REG && op2 != subtarget))
3309 tmp = force_operand (XEXP (value, 0), subtarget);
3310 return expand_mult (GET_MODE (value), tmp,
3311 force_operand (op2, NULL_RTX),
3317 op2 = XEXP (value, 1);
3318 if (!CONSTANT_P (op2)
3319 && !(GET_CODE (op2) == REG && op2 != subtarget))
3321 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3323 binoptab = add_optab;
3324 op2 = negate_rtx (GET_MODE (value), op2);
3327 /* Check for an addition with OP2 a constant integer and our first
3328 operand a PLUS of a virtual register and something else. In that
3329 case, we want to emit the sum of the virtual register and the
3330 constant first and then add the other value. This allows virtual
3331 register instantiation to simply modify the constant rather than
3332 creating another one around this addition. */
3333 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3334 && GET_CODE (XEXP (value, 0)) == PLUS
3335 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3336 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3337 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3339 rtx temp = expand_binop (GET_MODE (value), binoptab,
3340 XEXP (XEXP (value, 0), 0), op2,
3341 subtarget, 0, OPTAB_LIB_WIDEN);
3342 return expand_binop (GET_MODE (value), binoptab, temp,
3343 force_operand (XEXP (XEXP (value, 0), 1), 0),
3344 target, 0, OPTAB_LIB_WIDEN);
3347 tmp = force_operand (XEXP (value, 0), subtarget);
3348 return expand_binop (GET_MODE (value), binoptab, tmp,
3349 force_operand (op2, NULL_RTX),
3350 target, 0, OPTAB_LIB_WIDEN);
3351 /* We give UNSIGNEDP = 0 to expand_binop
3352 because the only operations we are expanding here are signed ones. */
3357 /* Subroutine of expand_expr:
3358 save the non-copied parts (LIST) of an expr (LHS), and return a list
3359 which can restore these values to their previous values,
3360 should something modify their storage. */
3363 save_noncopied_parts (lhs, list)
3370 for (tail = list; tail; tail = TREE_CHAIN (tail))
3371 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3372 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3375 tree part = TREE_VALUE (tail);
3376 tree part_type = TREE_TYPE (part);
3377 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3378 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3379 int_size_in_bytes (part_type), 0);
3380 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3381 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3382 parts = tree_cons (to_be_saved,
3383 build (RTL_EXPR, part_type, NULL_TREE,
3386 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3391 /* Subroutine of expand_expr:
3392 record the non-copied parts (LIST) of an expr (LHS), and return a list
3393 which specifies the initial values of these parts. */
3396 init_noncopied_parts (lhs, list)
3403 for (tail = list; tail; tail = TREE_CHAIN (tail))
3404 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3405 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3408 tree part = TREE_VALUE (tail);
3409 tree part_type = TREE_TYPE (part);
3410 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3411 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3416 /* Subroutine of expand_expr: return nonzero iff there is no way that
3417 EXP can reference X, which is being modified. */
3420 safe_from_p (x, exp)
3430 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3431 find the underlying pseudo. */
3432 if (GET_CODE (x) == SUBREG)
3435 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3439 /* If X is a location in the outgoing argument area, it is always safe. */
3440 if (GET_CODE (x) == MEM
3441 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3442 || (GET_CODE (XEXP (x, 0)) == PLUS
3443 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3446 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3449 exp_rtl = DECL_RTL (exp);
3456 if (TREE_CODE (exp) == TREE_LIST)
3457 return ((TREE_VALUE (exp) == 0
3458 || safe_from_p (x, TREE_VALUE (exp)))
3459 && (TREE_CHAIN (exp) == 0
3460 || safe_from_p (x, TREE_CHAIN (exp))));
3465 return safe_from_p (x, TREE_OPERAND (exp, 0));
3469 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3470 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3474 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3475 the expression. If it is set, we conflict iff we are that rtx or
3476 both are in memory. Otherwise, we check all operands of the
3477 expression recursively. */
3479 switch (TREE_CODE (exp))
3482 return (staticp (TREE_OPERAND (exp, 0))
3483 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3486 if (GET_CODE (x) == MEM)
3491 exp_rtl = CALL_EXPR_RTL (exp);
3494 /* Assume that the call will clobber all hard registers and
3496 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3497 || GET_CODE (x) == MEM)
3504 exp_rtl = RTL_EXPR_RTL (exp);
3506 /* We don't know what this can modify. */
3511 case WITH_CLEANUP_EXPR:
3512 exp_rtl = RTL_EXPR_RTL (exp);
3516 exp_rtl = SAVE_EXPR_RTL (exp);
3520 /* The only operand we look at is operand 1. The rest aren't
3521 part of the expression. */
3522 return safe_from_p (x, TREE_OPERAND (exp, 1));
3524 case METHOD_CALL_EXPR:
3525 /* This takes a rtx argument, but shouldn't appear here. */
3529 /* If we have an rtx, we do not need to scan our operands. */
3533 nops = tree_code_length[(int) TREE_CODE (exp)];
3534 for (i = 0; i < nops; i++)
3535 if (TREE_OPERAND (exp, i) != 0
3536 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3540 /* If we have an rtl, find any enclosed object. Then see if we conflict
3544 if (GET_CODE (exp_rtl) == SUBREG)
3546 exp_rtl = SUBREG_REG (exp_rtl);
3547 if (GET_CODE (exp_rtl) == REG
3548 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3552 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3553 are memory and EXP is not readonly. */
3554 return ! (rtx_equal_p (x, exp_rtl)
3555 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3556 && ! TREE_READONLY (exp)));
3559 /* If we reach here, it is safe. */
3563 /* Subroutine of expand_expr: return nonzero iff EXP is an
3564 expression whose type is statically determinable. */
3570 if (TREE_CODE (exp) == PARM_DECL
3571 || TREE_CODE (exp) == VAR_DECL
3572 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3573 || TREE_CODE (exp) == COMPONENT_REF
3574 || TREE_CODE (exp) == ARRAY_REF)
3579 /* expand_expr: generate code for computing expression EXP.
3580 An rtx for the computed value is returned. The value is never null.
3581 In the case of a void EXP, const0_rtx is returned.
3583 The value may be stored in TARGET if TARGET is nonzero.
3584 TARGET is just a suggestion; callers must assume that
3585 the rtx returned may not be the same as TARGET.
3587 If TARGET is CONST0_RTX, it means that the value will be ignored.
3589 If TMODE is not VOIDmode, it suggests generating the
3590 result in mode TMODE. But this is done only when convenient.
3591 Otherwise, TMODE is ignored and the value generated in its natural mode.
3592 TMODE is just a suggestion; callers must assume that
3593 the rtx returned may not have mode TMODE.
3595 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3596 with a constant address even if that address is not normally legitimate.
3597 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3599 If MODIFIER is EXPAND_SUM then when EXP is an addition
3600 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3601 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3602 products as above, or REG or MEM, or constant.
3603 Ordinarily in such cases we would output mul or add instructions
3604 and then return a pseudo reg containing the sum.
3606 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3607 it also marks a label as absolutely required (it can't be dead).
3608 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3609 This is used for outputting expressions used in initializers. */
3612 expand_expr (exp, target, tmode, modifier)
3615 enum machine_mode tmode;
3616 enum expand_modifier modifier;
3618 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3619 This is static so it will be accessible to our recursive callees. */
3620 static tree placeholder_list = 0;
3621 register rtx op0, op1, temp;
3622 tree type = TREE_TYPE (exp);
3623 int unsignedp = TREE_UNSIGNED (type);
3624 register enum machine_mode mode = TYPE_MODE (type);
3625 register enum tree_code code = TREE_CODE (exp);
3627 /* Use subtarget as the target for operand 0 of a binary operation. */
3628 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3629 rtx original_target = target;
3630 /* Maybe defer this until sure not doing bytecode? */
3631 int ignore = (target == const0_rtx
3632 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3633 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3634 || code == COND_EXPR)
3635 && TREE_CODE (type) == VOID_TYPE));
3639 if (output_bytecode)
3641 bc_expand_expr (exp);
3645 /* Don't use hard regs as subtargets, because the combiner
3646 can only handle pseudo regs. */
3647 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3649 /* Avoid subtargets inside loops,
3650 since they hide some invariant expressions. */
3651 if (preserve_subexpressions_p ())
3654 /* If we are going to ignore this result, we need only do something
3655 if there is a side-effect somewhere in the expression. If there
3656 is, short-circuit the most common cases here. Note that we must
3657 not call expand_expr with anything but const0_rtx in case this
3658 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3662 if (! TREE_SIDE_EFFECTS (exp))
3665 /* Ensure we reference a volatile object even if value is ignored. */
3666 if (TREE_THIS_VOLATILE (exp)
3667 && TREE_CODE (exp) != FUNCTION_DECL
3668 && mode != VOIDmode && mode != BLKmode)
3670 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3671 if (GET_CODE (temp) == MEM)
3672 temp = copy_to_reg (temp);
3676 if (TREE_CODE_CLASS (code) == '1')
3677 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3678 VOIDmode, modifier);
3679 else if (TREE_CODE_CLASS (code) == '2'
3680 || TREE_CODE_CLASS (code) == '<')
3682 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3683 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3686 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3687 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3688 /* If the second operand has no side effects, just evaluate
3690 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3691 VOIDmode, modifier);
3696 /* If will do cse, generate all results into pseudo registers
3697 since 1) that allows cse to find more things
3698 and 2) otherwise cse could produce an insn the machine
3701 if (! cse_not_expected && mode != BLKmode && target
3702 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3709 tree function = decl_function_context (exp);
3710 /* Handle using a label in a containing function. */
3711 if (function != current_function_decl && function != 0)
3713 struct function *p = find_function_data (function);
3714 /* Allocate in the memory associated with the function
3715 that the label is in. */
3716 push_obstacks (p->function_obstack,
3717 p->function_maybepermanent_obstack);
3719 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3720 label_rtx (exp), p->forced_labels);
3723 else if (modifier == EXPAND_INITIALIZER)
3724 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3725 label_rtx (exp), forced_labels);
3726 temp = gen_rtx (MEM, FUNCTION_MODE,
3727 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3728 if (function != current_function_decl && function != 0)
3729 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3734 if (DECL_RTL (exp) == 0)
3736 error_with_decl (exp, "prior parameter's size depends on `%s'");
3737 return CONST0_RTX (mode);
3741 /* If a static var's type was incomplete when the decl was written,
3742 but the type is complete now, lay out the decl now. */
3743 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3744 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3746 push_obstacks_nochange ();
3747 end_temporary_allocation ();
3748 layout_decl (exp, 0);
3749 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3754 if (DECL_RTL (exp) == 0)
3756 /* Ensure variable marked as used even if it doesn't go through
3757 a parser. If it hasn't be used yet, write out an external
3759 if (! TREE_USED (exp))
3761 assemble_external (exp);
3762 TREE_USED (exp) = 1;
3765 /* Handle variables inherited from containing functions. */
3766 context = decl_function_context (exp);
3768 /* We treat inline_function_decl as an alias for the current function
3769 because that is the inline function whose vars, types, etc.
3770 are being merged into the current function.
3771 See expand_inline_function. */
3772 if (context != 0 && context != current_function_decl
3773 && context != inline_function_decl
3774 /* If var is static, we don't need a static chain to access it. */
3775 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3776 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3780 /* Mark as non-local and addressable. */
3781 DECL_NONLOCAL (exp) = 1;
3782 mark_addressable (exp);
3783 if (GET_CODE (DECL_RTL (exp)) != MEM)
3785 addr = XEXP (DECL_RTL (exp), 0);
3786 if (GET_CODE (addr) == MEM)
3787 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3789 addr = fix_lexical_addr (addr, exp);
3790 return change_address (DECL_RTL (exp), mode, addr);
3793 /* This is the case of an array whose size is to be determined
3794 from its initializer, while the initializer is still being parsed.
3796 if (GET_CODE (DECL_RTL (exp)) == MEM
3797 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3798 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3799 XEXP (DECL_RTL (exp), 0));
3800 if (GET_CODE (DECL_RTL (exp)) == MEM
3801 && modifier != EXPAND_CONST_ADDRESS
3802 && modifier != EXPAND_SUM
3803 && modifier != EXPAND_INITIALIZER)
3805 /* DECL_RTL probably contains a constant address.
3806 On RISC machines where a constant address isn't valid,
3807 make some insns to get that address into a register. */
3808 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3810 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3811 return change_address (DECL_RTL (exp), VOIDmode,
3812 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3815 /* If the mode of DECL_RTL does not match that of the decl, it
3816 must be a promoted value. We return a SUBREG of the wanted mode,
3817 but mark it so that we know that it was already extended. */
3819 if (GET_CODE (DECL_RTL (exp)) == REG
3820 && GET_MODE (DECL_RTL (exp)) != mode)
3822 enum machine_mode decl_mode = DECL_MODE (exp);
3824 /* Get the signedness used for this variable. Ensure we get the
3825 same mode we got when the variable was declared. */
3827 PROMOTE_MODE (decl_mode, unsignedp, type);
3829 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3832 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3833 SUBREG_PROMOTED_VAR_P (temp) = 1;
3834 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3838 return DECL_RTL (exp);
3841 return immed_double_const (TREE_INT_CST_LOW (exp),
3842 TREE_INT_CST_HIGH (exp),
3846 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3849 /* If optimized, generate immediate CONST_DOUBLE
3850 which will be turned into memory by reload if necessary.
3852 We used to force a register so that loop.c could see it. But
3853 this does not allow gen_* patterns to perform optimizations with
3854 the constants. It also produces two insns in cases like "x = 1.0;".
3855 On most machines, floating-point constants are not permitted in
3856 many insns, so we'd end up copying it to a register in any case.
3858 Now, we do the copying in expand_binop, if appropriate. */
3859 return immed_real_const (exp);
3863 if (! TREE_CST_RTL (exp))
3864 output_constant_def (exp);
3866 /* TREE_CST_RTL probably contains a constant address.
3867 On RISC machines where a constant address isn't valid,
3868 make some insns to get that address into a register. */
3869 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3870 && modifier != EXPAND_CONST_ADDRESS
3871 && modifier != EXPAND_INITIALIZER
3872 && modifier != EXPAND_SUM
3873 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3874 return change_address (TREE_CST_RTL (exp), VOIDmode,
3875 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3876 return TREE_CST_RTL (exp);
3879 context = decl_function_context (exp);
3880 /* We treat inline_function_decl as an alias for the current function
3881 because that is the inline function whose vars, types, etc.
3882 are being merged into the current function.
3883 See expand_inline_function. */
3884 if (context == current_function_decl || context == inline_function_decl)
3887 /* If this is non-local, handle it. */
3890 temp = SAVE_EXPR_RTL (exp);
3891 if (temp && GET_CODE (temp) == REG)
3893 put_var_into_stack (exp);
3894 temp = SAVE_EXPR_RTL (exp);
3896 if (temp == 0 || GET_CODE (temp) != MEM)
3898 return change_address (temp, mode,
3899 fix_lexical_addr (XEXP (temp, 0), exp));
3901 if (SAVE_EXPR_RTL (exp) == 0)
3903 if (mode == BLKmode)
3906 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3907 MEM_IN_STRUCT_P (temp)
3908 = (TREE_CODE (type) == RECORD_TYPE
3909 || TREE_CODE (type) == UNION_TYPE
3910 || TREE_CODE (type) == QUAL_UNION_TYPE
3911 || TREE_CODE (type) == ARRAY_TYPE);
3915 enum machine_mode var_mode = mode;
3917 if (TREE_CODE (type) == INTEGER_TYPE
3918 || TREE_CODE (type) == ENUMERAL_TYPE
3919 || TREE_CODE (type) == BOOLEAN_TYPE
3920 || TREE_CODE (type) == CHAR_TYPE
3921 || TREE_CODE (type) == REAL_TYPE
3922 || TREE_CODE (type) == POINTER_TYPE
3923 || TREE_CODE (type) == OFFSET_TYPE)
3925 PROMOTE_MODE (var_mode, unsignedp, type);
3928 temp = gen_reg_rtx (var_mode);
3931 SAVE_EXPR_RTL (exp) = temp;
3932 if (!optimize && GET_CODE (temp) == REG)
3933 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3936 /* If the mode of TEMP does not match that of the expression, it
3937 must be a promoted value. We pass store_expr a SUBREG of the
3938 wanted mode but mark it so that we know that it was already
3939 extended. Note that `unsignedp' was modified above in
3942 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3944 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3945 SUBREG_PROMOTED_VAR_P (temp) = 1;
3946 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3949 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3952 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3953 must be a promoted value. We return a SUBREG of the wanted mode,
3954 but mark it so that we know that it was already extended. */
3956 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3957 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3959 enum machine_mode var_mode = mode;
3961 if (TREE_CODE (type) == INTEGER_TYPE
3962 || TREE_CODE (type) == ENUMERAL_TYPE
3963 || TREE_CODE (type) == BOOLEAN_TYPE
3964 || TREE_CODE (type) == CHAR_TYPE
3965 || TREE_CODE (type) == REAL_TYPE
3966 || TREE_CODE (type) == POINTER_TYPE
3967 || TREE_CODE (type) == OFFSET_TYPE)
3969 PROMOTE_MODE (var_mode, unsignedp, type);
3972 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3973 SUBREG_PROMOTED_VAR_P (temp) = 1;
3974 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3978 return SAVE_EXPR_RTL (exp);
3980 case PLACEHOLDER_EXPR:
3981 /* If there is an object on the head of the placeholder list,
3982 see if some object in it's references is of type TYPE. For
3983 further information, see tree.def. */
3984 if (placeholder_list)
3987 tree old_list = placeholder_list;
3989 for (object = TREE_PURPOSE (placeholder_list);
3990 TREE_TYPE (object) != type
3991 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3992 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3993 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3994 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
3995 object = TREE_OPERAND (object, 0))
3998 if (object && TREE_TYPE (object) == type)
4000 /* Expand this object skipping the list entries before
4001 it was found in case it is also a PLACEHOLDER_EXPR.
4002 In that case, we want to translate it using subsequent
4004 placeholder_list = TREE_CHAIN (placeholder_list);
4005 temp = expand_expr (object, original_target, tmode, modifier);
4006 placeholder_list = old_list;
4011 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4014 case WITH_RECORD_EXPR:
4015 /* Put the object on the placeholder list, expand our first operand,
4016 and pop the list. */
4017 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4019 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4021 placeholder_list = TREE_CHAIN (placeholder_list);
4025 expand_exit_loop_if_false (NULL_PTR,
4026 invert_truthvalue (TREE_OPERAND (exp, 0)));
4031 expand_start_loop (1);
4032 expand_expr_stmt (TREE_OPERAND (exp, 0));
4040 tree vars = TREE_OPERAND (exp, 0);
4041 int vars_need_expansion = 0;
4043 /* Need to open a binding contour here because
4044 if there are any cleanups they most be contained here. */
4045 expand_start_bindings (0);
4047 /* Mark the corresponding BLOCK for output in its proper place. */
4048 if (TREE_OPERAND (exp, 2) != 0
4049 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4050 insert_block (TREE_OPERAND (exp, 2));
4052 /* If VARS have not yet been expanded, expand them now. */
4055 if (DECL_RTL (vars) == 0)
4057 vars_need_expansion = 1;
4060 expand_decl_init (vars);
4061 vars = TREE_CHAIN (vars);
4064 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4066 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4072 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4074 emit_insns (RTL_EXPR_SEQUENCE (exp));
4075 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4076 free_temps_for_rtl_expr (exp);
4077 return RTL_EXPR_RTL (exp);
4080 /* If we don't need the result, just ensure we evaluate any
4085 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4086 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4089 /* All elts simple constants => refer to a constant in memory. But
4090 if this is a non-BLKmode mode, let it store a field at a time
4091 since that should make a CONST_INT or CONST_DOUBLE when we
4092 fold. If we are making an initializer and all operands are
4093 constant, put it in memory as well. */
4094 else if ((TREE_STATIC (exp)
4095 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4096 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4098 rtx constructor = output_constant_def (exp);
4099 if (modifier != EXPAND_CONST_ADDRESS
4100 && modifier != EXPAND_INITIALIZER
4101 && modifier != EXPAND_SUM
4102 && !memory_address_p (GET_MODE (constructor),
4103 XEXP (constructor, 0)))
4104 constructor = change_address (constructor, VOIDmode,
4105 XEXP (constructor, 0));
4111 if (target == 0 || ! safe_from_p (target, exp))
4113 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4114 target = gen_reg_rtx (mode);
4117 enum tree_code c = TREE_CODE (type);
4119 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4120 if (c == RECORD_TYPE || c == UNION_TYPE
4121 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
4122 MEM_IN_STRUCT_P (target) = 1;
4125 store_constructor (exp, target);
4131 tree exp1 = TREE_OPERAND (exp, 0);
4134 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4135 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4136 This code has the same general effect as simply doing
4137 expand_expr on the save expr, except that the expression PTR
4138 is computed for use as a memory address. This means different
4139 code, suitable for indexing, may be generated. */
4140 if (TREE_CODE (exp1) == SAVE_EXPR
4141 && SAVE_EXPR_RTL (exp1) == 0
4142 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4143 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4144 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4146 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4147 VOIDmode, EXPAND_SUM);
4148 op0 = memory_address (mode, temp);
4149 op0 = copy_all_regs (op0);
4150 SAVE_EXPR_RTL (exp1) = op0;
4154 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4155 op0 = memory_address (mode, op0);
4158 temp = gen_rtx (MEM, mode, op0);
4159 /* If address was computed by addition,
4160 mark this as an element of an aggregate. */
4161 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4162 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4163 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4164 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4165 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4166 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4167 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
4168 || (TREE_CODE (exp1) == ADDR_EXPR
4169 && (exp2 = TREE_OPERAND (exp1, 0))
4170 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4171 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4172 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4173 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
4174 MEM_IN_STRUCT_P (temp) = 1;
4175 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4176 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4177 a location is accessed through a pointer to const does not mean
4178 that the value there can never change. */
4179 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4185 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4189 tree array = TREE_OPERAND (exp, 0);
4190 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4191 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4192 tree index = TREE_OPERAND (exp, 1);
4193 tree index_type = TREE_TYPE (index);
4196 if (TREE_CODE (low_bound) != INTEGER_CST
4197 && contains_placeholder_p (low_bound))
4198 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4200 /* Optimize the special-case of a zero lower bound.
4202 We convert the low_bound to sizetype to avoid some problems
4203 with constant folding. (E.g. suppose the lower bound is 1,
4204 and its mode is QI. Without the conversion, (ARRAY
4205 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4206 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4208 But sizetype isn't quite right either (especially if
4209 the lowbound is negative). FIXME */
4211 if (! integer_zerop (low_bound))
4212 index = fold (build (MINUS_EXPR, index_type, index,
4213 convert (sizetype, low_bound)));
4215 if (TREE_CODE (index) != INTEGER_CST
4216 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4218 /* Nonconstant array index or nonconstant element size.
4219 Generate the tree for *(&array+index) and expand that,
4220 except do it in a language-independent way
4221 and don't complain about non-lvalue arrays.
4222 `mark_addressable' should already have been called
4223 for any array for which this case will be reached. */
4225 /* Don't forget the const or volatile flag from the array
4227 tree variant_type = build_type_variant (type,
4228 TREE_READONLY (exp),
4229 TREE_THIS_VOLATILE (exp));
4230 tree array_adr = build1 (ADDR_EXPR,
4231 build_pointer_type (variant_type), array);
4233 tree size = size_in_bytes (type);
4235 /* Convert the integer argument to a type the same size as a
4236 pointer so the multiply won't overflow spuriously. */
4237 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4238 index = convert (type_for_size (POINTER_SIZE, 0), index);
4240 if (TREE_CODE (size) != INTEGER_CST
4241 && contains_placeholder_p (size))
4242 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4244 /* Don't think the address has side effects
4245 just because the array does.
4246 (In some cases the address might have side effects,
4247 and we fail to record that fact here. However, it should not
4248 matter, since expand_expr should not care.) */
4249 TREE_SIDE_EFFECTS (array_adr) = 0;
4251 elt = build1 (INDIRECT_REF, type,
4252 fold (build (PLUS_EXPR,
4253 TYPE_POINTER_TO (variant_type),
4255 fold (build (MULT_EXPR,
4256 TYPE_POINTER_TO (variant_type),
4259 /* Volatility, etc., of new expression is same as old
4261 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4262 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4263 TREE_READONLY (elt) = TREE_READONLY (exp);
4265 return expand_expr (elt, target, tmode, modifier);
4268 /* Fold an expression like: "foo"[2].
4269 This is not done in fold so it won't happen inside &. */
4271 if (TREE_CODE (array) == STRING_CST
4272 && TREE_CODE (index) == INTEGER_CST
4273 && !TREE_INT_CST_HIGH (index)
4274 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
4276 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
4278 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
4279 TREE_TYPE (exp) = integer_type_node;
4280 return expand_expr (exp, target, tmode, modifier);
4282 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
4284 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
4285 TREE_TYPE (exp) = integer_type_node;
4286 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4288 target, tmode, modifier);
4292 /* If this is a constant index into a constant array,
4293 just get the value from the array. Handle both the cases when
4294 we have an explicit constructor and when our operand is a variable
4295 that was declared const. */
4297 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4299 if (TREE_CODE (index) == INTEGER_CST
4300 && TREE_INT_CST_HIGH (index) == 0)
4302 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4304 i = TREE_INT_CST_LOW (index);
4306 elem = TREE_CHAIN (elem);
4308 return expand_expr (fold (TREE_VALUE (elem)), target,
4313 else if (optimize >= 1
4314 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4315 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4316 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4318 if (TREE_CODE (index) == INTEGER_CST
4319 && TREE_INT_CST_HIGH (index) == 0)
4321 tree init = DECL_INITIAL (array);
4323 i = TREE_INT_CST_LOW (index);
4324 if (TREE_CODE (init) == CONSTRUCTOR)
4326 tree elem = CONSTRUCTOR_ELTS (init);
4329 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4330 elem = TREE_CHAIN (elem);
4332 return expand_expr (fold (TREE_VALUE (elem)), target,
4335 else if (TREE_CODE (init) == STRING_CST
4336 && i < TREE_STRING_LENGTH (init))
4338 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4339 return convert_to_mode (mode, temp, 0);
4345 /* Treat array-ref with constant index as a component-ref. */
4349 /* If the operand is a CONSTRUCTOR, we can just extract the
4350 appropriate field if it is present. */
4351 if (code != ARRAY_REF
4352 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4356 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4357 elt = TREE_CHAIN (elt))
4358 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4359 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4363 enum machine_mode mode1;
4368 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4369 &mode1, &unsignedp, &volatilep);
4372 /* If we got back the original object, something is wrong. Perhaps
4373 we are evaluating an expression too early. In any event, don't
4374 infinitely recurse. */
4378 /* In some cases, we will be offsetting OP0's address by a constant.
4379 So get it as a sum, if possible. If we will be using it
4380 directly in an insn, we validate it. */
4381 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4383 /* If this is a constant, put it into a register if it is a
4384 legitimate constant and memory if it isn't. */
4385 if (CONSTANT_P (op0))
4387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4388 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4389 op0 = force_reg (mode, op0);
4391 op0 = validize_mem (force_const_mem (mode, op0));
4394 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4397 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4399 if (GET_CODE (op0) != MEM)
4401 op0 = change_address (op0, VOIDmode,
4402 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4403 force_reg (Pmode, offset_rtx)));
4404 /* If we have a variable offset, the known alignment
4405 is only that of the innermost structure containing the field.
4406 (Actually, we could sometimes do better by using the
4407 size of an element of the innermost array, but no need.) */
4408 if (TREE_CODE (exp) == COMPONENT_REF
4409 || TREE_CODE (exp) == BIT_FIELD_REF)
4410 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4414 /* Don't forget about volatility even if this is a bitfield. */
4415 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4417 op0 = copy_rtx (op0);
4418 MEM_VOLATILE_P (op0) = 1;
4421 /* In cases where an aligned union has an unaligned object
4422 as a field, we might be extracting a BLKmode value from
4423 an integer-mode (e.g., SImode) object. Handle this case
4424 by doing the extract into an object as wide as the field
4425 (which we know to be the width of a basic mode), then
4426 storing into memory, and changing the mode to BLKmode. */
4427 if (mode1 == VOIDmode
4428 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4429 && modifier != EXPAND_CONST_ADDRESS
4430 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4431 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4432 /* If the field isn't aligned enough to fetch as a memref,
4433 fetch it as a bit field. */
4434 || (STRICT_ALIGNMENT
4435 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4436 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4438 enum machine_mode ext_mode = mode;
4440 if (ext_mode == BLKmode)
4441 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4443 if (ext_mode == BLKmode)
4446 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4447 unsignedp, target, ext_mode, ext_mode,
4449 int_size_in_bytes (TREE_TYPE (tem)));
4450 if (mode == BLKmode)
4452 rtx new = assign_stack_temp (ext_mode,
4453 bitsize / BITS_PER_UNIT, 0);
4455 emit_move_insn (new, op0);
4456 op0 = copy_rtx (new);
4457 PUT_MODE (op0, BLKmode);
4458 MEM_IN_STRUCT_P (op0) = 1;
4464 /* Get a reference to just this component. */
4465 if (modifier == EXPAND_CONST_ADDRESS
4466 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4467 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4468 (bitpos / BITS_PER_UNIT)));
4470 op0 = change_address (op0, mode1,
4471 plus_constant (XEXP (op0, 0),
4472 (bitpos / BITS_PER_UNIT)));
4473 MEM_IN_STRUCT_P (op0) = 1;
4474 MEM_VOLATILE_P (op0) |= volatilep;
4475 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4478 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4479 convert_move (target, op0, unsignedp);
4485 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4486 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4487 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4488 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4489 MEM_IN_STRUCT_P (temp) = 1;
4490 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4491 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4492 a location is accessed through a pointer to const does not mean
4493 that the value there can never change. */
4494 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4499 /* Intended for a reference to a buffer of a file-object in Pascal.
4500 But it's not certain that a special tree code will really be
4501 necessary for these. INDIRECT_REF might work for them. */
4505 /* IN_EXPR: Inlined pascal set IN expression.
4508 rlo = set_low - (set_low%bits_per_word);
4509 the_word = set [ (index - rlo)/bits_per_word ];
4510 bit_index = index % bits_per_word;
4511 bitmask = 1 << bit_index;
4512 return !!(the_word & bitmask); */
4514 preexpand_calls (exp);
4516 tree set = TREE_OPERAND (exp, 0);
4517 tree index = TREE_OPERAND (exp, 1);
4518 tree set_type = TREE_TYPE (set);
4520 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4521 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4527 rtx diff, quo, rem, addr, bit, result;
4528 rtx setval, setaddr;
4529 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4532 target = gen_reg_rtx (mode);
4534 /* If domain is empty, answer is no. */
4535 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4538 index_val = expand_expr (index, 0, VOIDmode, 0);
4539 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4540 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4541 setval = expand_expr (set, 0, VOIDmode, 0);
4542 setaddr = XEXP (setval, 0);
4544 /* Compare index against bounds, if they are constant. */
4545 if (GET_CODE (index_val) == CONST_INT
4546 && GET_CODE (lo_r) == CONST_INT
4547 && INTVAL (index_val) < INTVAL (lo_r))
4550 if (GET_CODE (index_val) == CONST_INT
4551 && GET_CODE (hi_r) == CONST_INT
4552 && INTVAL (hi_r) < INTVAL (index_val))
4555 /* If we get here, we have to generate the code for both cases
4556 (in range and out of range). */
4558 op0 = gen_label_rtx ();
4559 op1 = gen_label_rtx ();
4561 if (! (GET_CODE (index_val) == CONST_INT
4562 && GET_CODE (lo_r) == CONST_INT))
4564 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4565 GET_MODE (index_val), 0, 0);
4566 emit_jump_insn (gen_blt (op1));
4569 if (! (GET_CODE (index_val) == CONST_INT
4570 && GET_CODE (hi_r) == CONST_INT))
4572 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4573 GET_MODE (index_val), 0, 0);
4574 emit_jump_insn (gen_bgt (op1));
4577 /* Calculate the element number of bit zero in the first word
4579 if (GET_CODE (lo_r) == CONST_INT)
4580 rlow = GEN_INT (INTVAL (lo_r)
4581 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4583 rlow = expand_binop (index_mode, and_optab, lo_r,
4584 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4585 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4587 diff = expand_binop (index_mode, sub_optab,
4588 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4590 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4591 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4592 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4593 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4594 addr = memory_address (byte_mode,
4595 expand_binop (index_mode, add_optab,
4596 diff, setaddr, NULL_RTX, 0,
4598 /* Extract the bit we want to examine */
4599 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4600 gen_rtx (MEM, byte_mode, addr),
4601 make_tree (TREE_TYPE (index), rem),
4603 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4604 GET_MODE (target) == byte_mode ? target : 0,
4605 1, OPTAB_LIB_WIDEN);
4607 if (result != target)
4608 convert_move (target, result, 1);
4610 /* Output the code to handle the out-of-range case. */
4613 emit_move_insn (target, const0_rtx);
4618 case WITH_CLEANUP_EXPR:
4619 if (RTL_EXPR_RTL (exp) == 0)
4622 = expand_expr (TREE_OPERAND (exp, 0),
4623 target ? target : const0_rtx,
4626 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4627 /* That's it for this cleanup. */
4628 TREE_OPERAND (exp, 2) = 0;
4630 return RTL_EXPR_RTL (exp);
4633 /* Check for a built-in function. */
4634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
4642 return expand_call (exp, target, ignore);
4644 case NON_LVALUE_EXPR:
4647 case REFERENCE_EXPR:
4648 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4649 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4650 if (TREE_CODE (type) == UNION_TYPE)
4652 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4655 if (mode == BLKmode)
4657 if (TYPE_SIZE (type) == 0
4658 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4660 target = assign_stack_temp (BLKmode,
4661 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4662 + BITS_PER_UNIT - 1)
4663 / BITS_PER_UNIT, 0);
4666 target = gen_reg_rtx (mode);
4668 if (GET_CODE (target) == MEM)
4669 /* Store data into beginning of memory target. */
4670 store_expr (TREE_OPERAND (exp, 0),
4671 change_address (target, TYPE_MODE (valtype), 0), 0);
4673 else if (GET_CODE (target) == REG)
4674 /* Store this field into a union of the proper type. */
4675 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4676 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4678 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4682 /* Return the entire union. */
4685 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4686 if (GET_MODE (op0) == mode)
4688 /* If arg is a constant integer being extended from a narrower mode,
4689 we must really truncate to get the extended bits right. Otherwise
4690 (unsigned long) (unsigned char) ("\377"[0])
4691 would come out as ffffffff. */
4692 if (GET_MODE (op0) == VOIDmode
4693 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4694 < GET_MODE_BITSIZE (mode)))
4696 /* MODE must be narrower than HOST_BITS_PER_INT. */
4697 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4699 if (width < HOST_BITS_PER_WIDE_INT)
4701 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4702 : CONST_DOUBLE_LOW (op0));
4703 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4704 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4705 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4707 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4709 op0 = GEN_INT (val);
4713 op0 = (simplify_unary_operation
4714 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4715 ? ZERO_EXTEND : SIGN_EXTEND),
4717 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4722 if (GET_MODE (op0) == VOIDmode)
4724 if (modifier == EXPAND_INITIALIZER)
4725 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4726 if (flag_force_mem && GET_CODE (op0) == MEM)
4727 op0 = copy_to_reg (op0);
4730 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4732 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4736 /* We come here from MINUS_EXPR when the second operand is a constant. */
4738 this_optab = add_optab;
4740 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4741 something else, make sure we add the register to the constant and
4742 then to the other thing. This case can occur during strength
4743 reduction and doing it this way will produce better code if the
4744 frame pointer or argument pointer is eliminated.
4746 fold-const.c will ensure that the constant is always in the inner
4747 PLUS_EXPR, so the only case we need to do anything about is if
4748 sp, ap, or fp is our second argument, in which case we must swap
4749 the innermost first argument and our second argument. */
4751 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4753 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4754 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4755 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4756 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4758 tree t = TREE_OPERAND (exp, 1);
4760 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4761 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4764 /* If the result is to be Pmode and we are adding an integer to
4765 something, we might be forming a constant. So try to use
4766 plus_constant. If it produces a sum and we can't accept it,
4767 use force_operand. This allows P = &ARR[const] to generate
4768 efficient code on machines where a SYMBOL_REF is not a valid
4771 If this is an EXPAND_SUM call, always return the sum. */
4772 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4775 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4776 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4777 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4779 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4781 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4782 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4783 op1 = force_operand (op1, target);
4787 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4789 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4791 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4793 if (! CONSTANT_P (op0))
4795 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4796 VOIDmode, modifier);
4797 /* Don't go to both_summands if modifier
4798 says it's not right to return a PLUS. */
4799 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4803 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4804 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4805 op0 = force_operand (op0, target);
4810 /* No sense saving up arithmetic to be done
4811 if it's all in the wrong mode to form part of an address.
4812 And force_operand won't know whether to sign-extend or
4814 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4818 preexpand_calls (exp);
4819 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4822 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4823 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4826 /* Make sure any term that's a sum with a constant comes last. */
4827 if (GET_CODE (op0) == PLUS
4828 && CONSTANT_P (XEXP (op0, 1)))
4834 /* If adding to a sum including a constant,
4835 associate it to put the constant outside. */
4836 if (GET_CODE (op1) == PLUS
4837 && CONSTANT_P (XEXP (op1, 1)))
4839 rtx constant_term = const0_rtx;
4841 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4844 /* Ensure that MULT comes first if there is one. */
4845 else if (GET_CODE (op0) == MULT)
4846 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4848 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4850 /* Let's also eliminate constants from op0 if possible. */
4851 op0 = eliminate_constant_term (op0, &constant_term);
4853 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4854 their sum should be a constant. Form it into OP1, since the
4855 result we want will then be OP0 + OP1. */
4857 temp = simplify_binary_operation (PLUS, mode, constant_term,
4862 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4865 /* Put a constant term last and put a multiplication first. */
4866 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4867 temp = op1, op1 = op0, op0 = temp;
4869 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4870 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4873 /* Handle difference of two symbolic constants,
4874 for the sake of an initializer. */
4875 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4876 && really_constant_p (TREE_OPERAND (exp, 0))
4877 && really_constant_p (TREE_OPERAND (exp, 1)))
4879 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4880 VOIDmode, modifier);
4881 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4882 VOIDmode, modifier);
4883 return gen_rtx (MINUS, mode, op0, op1);
4885 /* Convert A - const to A + (-const). */
4886 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4888 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4889 fold (build1 (NEGATE_EXPR, type,
4890 TREE_OPERAND (exp, 1))));
4893 this_optab = sub_optab;
4897 preexpand_calls (exp);
4898 /* If first operand is constant, swap them.
4899 Thus the following special case checks need only
4900 check the second operand. */
4901 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4903 register tree t1 = TREE_OPERAND (exp, 0);
4904 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4905 TREE_OPERAND (exp, 1) = t1;
4908 /* Attempt to return something suitable for generating an
4909 indexed address, for machines that support that. */
4911 if (modifier == EXPAND_SUM && mode == Pmode
4912 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4913 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4915 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4917 /* Apply distributive law if OP0 is x+c. */
4918 if (GET_CODE (op0) == PLUS
4919 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4920 return gen_rtx (PLUS, mode,
4921 gen_rtx (MULT, mode, XEXP (op0, 0),
4922 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4923 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4924 * INTVAL (XEXP (op0, 1))));
4926 if (GET_CODE (op0) != REG)
4927 op0 = force_operand (op0, NULL_RTX);
4928 if (GET_CODE (op0) != REG)
4929 op0 = copy_to_mode_reg (mode, op0);
4931 return gen_rtx (MULT, mode, op0,
4932 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4935 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4938 /* Check for multiplying things that have been extended
4939 from a narrower type. If this machine supports multiplying
4940 in that narrower type with a result in the desired type,
4941 do it that way, and avoid the explicit type-conversion. */
4942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4943 && TREE_CODE (type) == INTEGER_TYPE
4944 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4945 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4946 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4947 && int_fits_type_p (TREE_OPERAND (exp, 1),
4948 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4949 /* Don't use a widening multiply if a shift will do. */
4950 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4951 > HOST_BITS_PER_WIDE_INT)
4952 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4954 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4955 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4957 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4958 /* If both operands are extended, they must either both
4959 be zero-extended or both be sign-extended. */
4960 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4962 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4964 enum machine_mode innermode
4965 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4966 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4967 ? umul_widen_optab : smul_widen_optab);
4968 if (mode == GET_MODE_WIDER_MODE (innermode)
4969 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4971 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4972 NULL_RTX, VOIDmode, 0);
4973 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4977 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4978 NULL_RTX, VOIDmode, 0);
4982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4984 return expand_mult (mode, op0, op1, target, unsignedp);
4986 case TRUNC_DIV_EXPR:
4987 case FLOOR_DIV_EXPR:
4989 case ROUND_DIV_EXPR:
4990 case EXACT_DIV_EXPR:
4991 preexpand_calls (exp);
4992 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4994 /* Possible optimization: compute the dividend with EXPAND_SUM
4995 then if the divisor is constant can optimize the case
4996 where some terms of the dividend have coeffs divisible by it. */
4997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4998 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4999 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5002 this_optab = flodiv_optab;
5005 case TRUNC_MOD_EXPR:
5006 case FLOOR_MOD_EXPR:
5008 case ROUND_MOD_EXPR:
5009 preexpand_calls (exp);
5010 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5013 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5014 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5016 case FIX_ROUND_EXPR:
5017 case FIX_FLOOR_EXPR:
5019 abort (); /* Not used for C. */
5021 case FIX_TRUNC_EXPR:
5022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5024 target = gen_reg_rtx (mode);
5025 expand_fix (target, op0, unsignedp);
5029 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5031 target = gen_reg_rtx (mode);
5032 /* expand_float can't figure out what to do if FROM has VOIDmode.
5033 So give it the correct mode. With -O, cse will optimize this. */
5034 if (GET_MODE (op0) == VOIDmode)
5035 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5037 expand_float (target, op0,
5038 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5042 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5043 temp = expand_unop (mode, neg_optab, op0, target, 0);
5049 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5051 /* Handle complex values specially. */
5053 enum machine_mode opmode
5054 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5056 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5057 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5058 return expand_complex_abs (opmode, op0, target, unsignedp);
5061 /* Unsigned abs is simply the operand. Testing here means we don't
5062 risk generating incorrect code below. */
5063 if (TREE_UNSIGNED (type))
5066 /* First try to do it with a special abs instruction. */
5067 temp = expand_unop (mode, abs_optab, op0, target, 0);
5071 /* If this machine has expensive jumps, we can do integer absolute
5072 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5073 where W is the width of MODE. */
5075 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5077 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5078 size_int (GET_MODE_BITSIZE (mode) - 1),
5081 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5084 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5091 /* If that does not win, use conditional jump and negate. */
5092 target = original_target;
5093 temp = gen_label_rtx ();
5094 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5095 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5096 || (GET_CODE (target) == REG
5097 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5098 target = gen_reg_rtx (mode);
5099 emit_move_insn (target, op0);
5100 emit_cmp_insn (target,
5101 expand_expr (convert (type, integer_zero_node),
5102 NULL_RTX, VOIDmode, 0),
5103 GE, NULL_RTX, mode, 0, 0);
5105 emit_jump_insn (gen_bge (temp));
5106 op0 = expand_unop (mode, neg_optab, target, target, 0);
5108 emit_move_insn (target, op0);
5115 target = original_target;
5116 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5117 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5118 || (GET_CODE (target) == REG
5119 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5120 target = gen_reg_rtx (mode);
5121 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5122 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5124 /* First try to do it with a special MIN or MAX instruction.
5125 If that does not win, use a conditional jump to select the proper
5127 this_optab = (TREE_UNSIGNED (type)
5128 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5129 : (code == MIN_EXPR ? smin_optab : smax_optab));
5131 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5137 emit_move_insn (target, op0);
5138 op0 = gen_label_rtx ();
5139 /* If this mode is an integer too wide to compare properly,
5140 compare word by word. Rely on cse to optimize constant cases. */
5141 if (GET_MODE_CLASS (mode) == MODE_INT
5142 && !can_compare_p (mode))
5144 if (code == MAX_EXPR)
5145 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
5147 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5148 emit_move_insn (target, op1);
5152 if (code == MAX_EXPR)
5153 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5154 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5155 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5157 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5158 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5159 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5160 if (temp == const0_rtx)
5161 emit_move_insn (target, op1);
5162 else if (temp != const_true_rtx)
5164 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5165 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5168 emit_move_insn (target, op1);
5174 /* ??? Can optimize when the operand of this is a bitwise operation,
5175 by using a different bitwise operation. */
5177 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5178 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5185 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5190 /* ??? Can optimize bitwise operations with one arg constant.
5191 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5192 and (a bitwise1 b) bitwise2 b (etc)
5193 but that is probably not worth while. */
5195 /* BIT_AND_EXPR is for bitwise anding.
5196 TRUTH_AND_EXPR is for anding two boolean values
5197 when we want in all cases to compute both of them.
5198 In general it is fastest to do TRUTH_AND_EXPR by
5199 computing both operands as actual zero-or-1 values
5200 and then bitwise anding. In cases where there cannot
5201 be any side effects, better code would be made by
5202 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5203 but the question is how to recognize those cases. */
5205 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5206 th operands. If so, don't use our target. */
5207 case TRUTH_AND_EXPR:
5208 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5211 this_optab = and_optab;
5214 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5216 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5219 this_optab = ior_optab;
5222 case TRUTH_XOR_EXPR:
5223 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5226 this_optab = xor_optab;
5233 preexpand_calls (exp);
5234 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5236 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5237 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5240 /* Could determine the answer when only additive constants differ.
5241 Also, the addition of one can be handled by changing the condition. */
5248 preexpand_calls (exp);
5249 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5252 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5253 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5255 && GET_CODE (original_target) == REG
5256 && (GET_MODE (original_target)
5257 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5259 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5260 if (temp != original_target)
5261 temp = copy_to_reg (temp);
5262 op1 = gen_label_rtx ();
5263 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5264 GET_MODE (temp), unsignedp, 0);
5265 emit_jump_insn (gen_beq (op1));
5266 emit_move_insn (temp, const1_rtx);
5270 /* If no set-flag instruction, must generate a conditional
5271 store into a temporary variable. Drop through
5272 and handle this like && and ||. */
5274 case TRUTH_ANDIF_EXPR:
5275 case TRUTH_ORIF_EXPR:
5277 && (target == 0 || ! safe_from_p (target, exp)
5278 /* Make sure we don't have a hard reg (such as function's return
5279 value) live across basic blocks, if not optimizing. */
5280 || (!optimize && GET_CODE (target) == REG
5281 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5282 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5285 emit_clr_insn (target);
5287 op1 = gen_label_rtx ();
5288 jumpifnot (exp, op1);
5291 emit_0_to_1_insn (target);
5294 return ignore ? const0_rtx : target;
5296 case TRUTH_NOT_EXPR:
5297 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5298 /* The parser is careful to generate TRUTH_NOT_EXPR
5299 only with operands that are always zero or one. */
5300 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5301 target, 1, OPTAB_LIB_WIDEN);
5307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5309 return expand_expr (TREE_OPERAND (exp, 1),
5310 (ignore ? const0_rtx : target),
5315 /* Note that COND_EXPRs whose type is a structure or union
5316 are required to be constructed to contain assignments of
5317 a temporary variable, so that we can evaluate them here
5318 for side effect only. If type is void, we must do likewise. */
5320 /* If an arm of the branch requires a cleanup,
5321 only that cleanup is performed. */
5324 tree binary_op = 0, unary_op = 0;
5325 tree old_cleanups = cleanups_this_call;
5326 cleanups_this_call = 0;
5328 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5329 convert it to our mode, if necessary. */
5330 if (integer_onep (TREE_OPERAND (exp, 1))
5331 && integer_zerop (TREE_OPERAND (exp, 2))
5332 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5336 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5341 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5342 if (GET_MODE (op0) == mode)
5345 target = gen_reg_rtx (mode);
5346 convert_move (target, op0, unsignedp);
5350 /* If we are not to produce a result, we have no target. Otherwise,
5351 if a target was specified use it; it will not be used as an
5352 intermediate target unless it is safe. If no target, use a
5357 else if (original_target
5358 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5359 temp = original_target;
5360 else if (mode == BLKmode)
5362 if (TYPE_SIZE (type) == 0
5363 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5366 temp = assign_stack_temp (BLKmode,
5367 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5368 + BITS_PER_UNIT - 1)
5369 / BITS_PER_UNIT, 0);
5370 MEM_IN_STRUCT_P (temp)
5371 = (TREE_CODE (type) == RECORD_TYPE
5372 || TREE_CODE (type) == UNION_TYPE
5373 || TREE_CODE (type) == QUAL_UNION_TYPE
5374 || TREE_CODE (type) == ARRAY_TYPE);
5377 temp = gen_reg_rtx (mode);
5379 /* Check for X ? A + B : A. If we have this, we can copy
5380 A to the output and conditionally add B. Similarly for unary
5381 operations. Don't do this if X has side-effects because
5382 those side effects might affect A or B and the "?" operation is
5383 a sequence point in ANSI. (We test for side effects later.) */
5385 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5386 && operand_equal_p (TREE_OPERAND (exp, 2),
5387 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5388 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5389 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5390 && operand_equal_p (TREE_OPERAND (exp, 1),
5391 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5392 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5393 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5394 && operand_equal_p (TREE_OPERAND (exp, 2),
5395 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5396 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5397 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5398 && operand_equal_p (TREE_OPERAND (exp, 1),
5399 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5400 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5402 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5403 operation, do this as A + (X != 0). Similarly for other simple
5404 binary operators. */
5405 if (temp && singleton && binary_op
5406 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5407 && (TREE_CODE (binary_op) == PLUS_EXPR
5408 || TREE_CODE (binary_op) == MINUS_EXPR
5409 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5410 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5411 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5412 && integer_onep (TREE_OPERAND (binary_op, 1))
5413 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5416 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5417 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5418 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5419 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5422 /* If we had X ? A : A + 1, do this as A + (X == 0).
5424 We have to invert the truth value here and then put it
5425 back later if do_store_flag fails. We cannot simply copy
5426 TREE_OPERAND (exp, 0) to another variable and modify that
5427 because invert_truthvalue can modify the tree pointed to
5429 if (singleton == TREE_OPERAND (exp, 1))
5430 TREE_OPERAND (exp, 0)
5431 = invert_truthvalue (TREE_OPERAND (exp, 0));
5433 result = do_store_flag (TREE_OPERAND (exp, 0),
5434 (safe_from_p (temp, singleton)
5436 mode, BRANCH_COST <= 1);
5440 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5441 return expand_binop (mode, boptab, op1, result, temp,
5442 unsignedp, OPTAB_LIB_WIDEN);
5444 else if (singleton == TREE_OPERAND (exp, 1))
5445 TREE_OPERAND (exp, 0)
5446 = invert_truthvalue (TREE_OPERAND (exp, 0));
5450 op0 = gen_label_rtx ();
5452 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5456 /* If the target conflicts with the other operand of the
5457 binary op, we can't use it. Also, we can't use the target
5458 if it is a hard register, because evaluating the condition
5459 might clobber it. */
5461 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5462 || (GET_CODE (temp) == REG
5463 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5464 temp = gen_reg_rtx (mode);
5465 store_expr (singleton, temp, 0);
5468 expand_expr (singleton,
5469 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5470 if (cleanups_this_call)
5472 sorry ("aggregate value in COND_EXPR");
5473 cleanups_this_call = 0;
5475 if (singleton == TREE_OPERAND (exp, 1))
5476 jumpif (TREE_OPERAND (exp, 0), op0);
5478 jumpifnot (TREE_OPERAND (exp, 0), op0);
5480 if (binary_op && temp == 0)
5481 /* Just touch the other operand. */
5482 expand_expr (TREE_OPERAND (binary_op, 1),
5483 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5485 store_expr (build (TREE_CODE (binary_op), type,
5486 make_tree (type, temp),
5487 TREE_OPERAND (binary_op, 1)),
5490 store_expr (build1 (TREE_CODE (unary_op), type,
5491 make_tree (type, temp)),
5496 /* This is now done in jump.c and is better done there because it
5497 produces shorter register lifetimes. */
5499 /* Check for both possibilities either constants or variables
5500 in registers (but not the same as the target!). If so, can
5501 save branches by assigning one, branching, and assigning the
5503 else if (temp && GET_MODE (temp) != BLKmode
5504 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5505 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5506 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5507 && DECL_RTL (TREE_OPERAND (exp, 1))
5508 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5509 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5510 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5511 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5512 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5513 && DECL_RTL (TREE_OPERAND (exp, 2))
5514 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5515 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5517 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5518 temp = gen_reg_rtx (mode);
5519 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5520 jumpifnot (TREE_OPERAND (exp, 0), op0);
5521 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5525 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5526 comparison operator. If we have one of these cases, set the
5527 output to A, branch on A (cse will merge these two references),
5528 then set the output to FOO. */
5530 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5531 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5532 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5533 TREE_OPERAND (exp, 1), 0)
5534 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5535 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5537 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5538 temp = gen_reg_rtx (mode);
5539 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5540 jumpif (TREE_OPERAND (exp, 0), op0);
5541 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5545 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5546 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5547 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5548 TREE_OPERAND (exp, 2), 0)
5549 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5550 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5552 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5553 temp = gen_reg_rtx (mode);
5554 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5555 jumpifnot (TREE_OPERAND (exp, 0), op0);
5556 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5561 op1 = gen_label_rtx ();
5562 jumpifnot (TREE_OPERAND (exp, 0), op0);
5564 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5566 expand_expr (TREE_OPERAND (exp, 1),
5567 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5568 if (cleanups_this_call)
5570 sorry ("aggregate value in COND_EXPR");
5571 cleanups_this_call = 0;
5575 emit_jump_insn (gen_jump (op1));
5579 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5581 expand_expr (TREE_OPERAND (exp, 2),
5582 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5585 if (cleanups_this_call)
5587 sorry ("aggregate value in COND_EXPR");
5588 cleanups_this_call = 0;
5594 cleanups_this_call = old_cleanups;
5600 /* Something needs to be initialized, but we didn't know
5601 where that thing was when building the tree. For example,
5602 it could be the return value of a function, or a parameter
5603 to a function which lays down in the stack, or a temporary
5604 variable which must be passed by reference.
5606 We guarantee that the expression will either be constructed
5607 or copied into our original target. */
5609 tree slot = TREE_OPERAND (exp, 0);
5612 if (TREE_CODE (slot) != VAR_DECL)
5617 if (DECL_RTL (slot) != 0)
5619 target = DECL_RTL (slot);
5620 /* If we have already expanded the slot, so don't do
5622 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5627 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5628 /* All temp slots at this level must not conflict. */
5629 preserve_temp_slots (target);
5630 DECL_RTL (slot) = target;
5633 /* We set IGNORE when we know that we're already
5634 doing this for a cleanup. */
5637 /* Since SLOT is not known to the called function
5638 to belong to its stack frame, we must build an explicit
5639 cleanup. This case occurs when we must build up a reference
5640 to pass the reference as an argument. In this case,
5641 it is very likely that such a reference need not be
5644 if (TREE_OPERAND (exp, 2) == 0)
5645 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5646 if (TREE_OPERAND (exp, 2))
5647 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5648 cleanups_this_call);
5653 /* This case does occur, when expanding a parameter which
5654 needs to be constructed on the stack. The target
5655 is the actual stack address that we want to initialize.
5656 The function we call will perform the cleanup in this case. */
5658 /* If we have already assigned it space, use that space,
5659 not target that we were passed in, as our target
5660 parameter is only a hint. */
5661 if (DECL_RTL (slot) != 0)
5663 target = DECL_RTL (slot);
5664 /* If we have already expanded the slot, so don't do
5666 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5670 DECL_RTL (slot) = target;
5673 exp1 = TREE_OPERAND (exp, 1);
5674 /* Mark it as expanded. */
5675 TREE_OPERAND (exp, 1) = NULL_TREE;
5677 return expand_expr (exp1, target, tmode, modifier);
5682 tree lhs = TREE_OPERAND (exp, 0);
5683 tree rhs = TREE_OPERAND (exp, 1);
5684 tree noncopied_parts = 0;
5685 tree lhs_type = TREE_TYPE (lhs);
5687 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5688 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5689 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5690 TYPE_NONCOPIED_PARTS (lhs_type));
5691 while (noncopied_parts != 0)
5693 expand_assignment (TREE_VALUE (noncopied_parts),
5694 TREE_PURPOSE (noncopied_parts), 0, 0);
5695 noncopied_parts = TREE_CHAIN (noncopied_parts);
5702 /* If lhs is complex, expand calls in rhs before computing it.
5703 That's so we don't compute a pointer and save it over a call.
5704 If lhs is simple, compute it first so we can give it as a
5705 target if the rhs is just a call. This avoids an extra temp and copy
5706 and that prevents a partial-subsumption which makes bad code.
5707 Actually we could treat component_ref's of vars like vars. */
5709 tree lhs = TREE_OPERAND (exp, 0);
5710 tree rhs = TREE_OPERAND (exp, 1);
5711 tree noncopied_parts = 0;
5712 tree lhs_type = TREE_TYPE (lhs);
5716 if (TREE_CODE (lhs) != VAR_DECL
5717 && TREE_CODE (lhs) != RESULT_DECL
5718 && TREE_CODE (lhs) != PARM_DECL)
5719 preexpand_calls (exp);
5721 /* Check for |= or &= of a bitfield of size one into another bitfield
5722 of size 1. In this case, (unless we need the result of the
5723 assignment) we can do this more efficiently with a
5724 test followed by an assignment, if necessary.
5726 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5727 things change so we do, this code should be enhanced to
5730 && TREE_CODE (lhs) == COMPONENT_REF
5731 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5732 || TREE_CODE (rhs) == BIT_AND_EXPR)
5733 && TREE_OPERAND (rhs, 0) == lhs
5734 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5735 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5736 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5738 rtx label = gen_label_rtx ();
5740 do_jump (TREE_OPERAND (rhs, 1),
5741 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5742 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5743 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5744 (TREE_CODE (rhs) == BIT_IOR_EXPR
5746 : integer_zero_node)),
5748 do_pending_stack_adjust ();
5753 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5754 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5755 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5756 TYPE_NONCOPIED_PARTS (lhs_type));
5758 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5759 while (noncopied_parts != 0)
5761 expand_assignment (TREE_PURPOSE (noncopied_parts),
5762 TREE_VALUE (noncopied_parts), 0, 0);
5763 noncopied_parts = TREE_CHAIN (noncopied_parts);
5768 case PREINCREMENT_EXPR:
5769 case PREDECREMENT_EXPR:
5770 return expand_increment (exp, 0);
5772 case POSTINCREMENT_EXPR:
5773 case POSTDECREMENT_EXPR:
5774 /* Faster to treat as pre-increment if result is not used. */
5775 return expand_increment (exp, ! ignore);
5778 /* Are we taking the address of a nested function? */
5779 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5780 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5782 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5783 op0 = force_operand (op0, target);
5787 /* We make sure to pass const0_rtx down if we came in with
5788 ignore set, to avoid doing the cleanups twice for something. */
5789 op0 = expand_expr (TREE_OPERAND (exp, 0),
5790 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5791 (modifier == EXPAND_INITIALIZER
5792 ? modifier : EXPAND_CONST_ADDRESS));
5794 /* We would like the object in memory. If it is a constant,
5795 we can have it be statically allocated into memory. For
5796 a non-constant (REG or SUBREG), we need to allocate some
5797 memory and store the value into it. */
5799 if (CONSTANT_P (op0))
5800 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5803 /* These cases happen in Fortran. Is that legitimate?
5804 Should Fortran work in another way?
5805 Do they happen in C? */
5806 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5807 || GET_CODE (op0) == CONCAT)
5809 /* If this object is in a register, it must be not
5811 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5812 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5814 = assign_stack_temp (inner_mode,
5815 int_size_in_bytes (inner_type), 1);
5817 emit_move_insn (memloc, op0);
5821 if (GET_CODE (op0) != MEM)
5824 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5825 return XEXP (op0, 0);
5826 op0 = force_operand (XEXP (op0, 0), target);
5828 if (flag_force_addr && GET_CODE (op0) != REG)
5829 return force_reg (Pmode, op0);
5832 case ENTRY_VALUE_EXPR:
5835 /* COMPLEX type for Extended Pascal & Fortran */
5838 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5842 /* Get the rtx code of the operands. */
5843 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5844 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5847 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5849 prev = get_last_insn ();
5851 /* Tell flow that the whole of the destination is being set. */
5852 if (GET_CODE (target) == REG)
5853 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5855 /* Move the real (op0) and imaginary (op1) parts to their location. */
5856 emit_move_insn (gen_realpart (mode, target), op0);
5857 emit_move_insn (gen_imagpart (mode, target), op1);
5859 /* Complex construction should appear as a single unit. */
5860 if (GET_CODE (target) != CONCAT)
5861 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5862 each with a separate pseudo as destination.
5863 It's not correct for flow to treat them as a unit. */
5870 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5871 return gen_realpart (mode, op0);
5874 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5875 return gen_imagpart (mode, op0);
5879 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5883 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5886 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5888 prev = get_last_insn ();
5890 /* Tell flow that the whole of the destination is being set. */
5891 if (GET_CODE (target) == REG)
5892 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5894 /* Store the realpart and the negated imagpart to target. */
5895 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5897 imag_t = gen_imagpart (mode, target);
5898 temp = expand_unop (mode, neg_optab,
5899 gen_imagpart (mode, op0), imag_t, 0);
5901 emit_move_insn (imag_t, temp);
5903 /* Conjugate should appear as a single unit */
5904 if (GET_CODE (target) != CONCAT)
5905 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5906 each with a separate pseudo as destination.
5907 It's not correct for flow to treat them as a unit. */
5914 op0 = CONST0_RTX (tmode);
5920 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
5923 /* Here to do an ordinary binary operator, generating an instruction
5924 from the optab already placed in `this_optab'. */
5926 preexpand_calls (exp);
5927 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5929 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5932 temp = expand_binop (mode, this_optab, op0, op1, target,
5933 unsignedp, OPTAB_LIB_WIDEN);
5940 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5942 bc_expand_expr (exp)
5945 enum tree_code code;
5948 struct binary_operator *binoptab;
5949 struct unary_operator *unoptab;
5950 struct increment_operator *incroptab;
5951 struct bc_label *lab, *lab1;
5952 enum bytecode_opcode opcode;
5955 code = TREE_CODE (exp);
5961 if (DECL_RTL (exp) == 0)
5963 error_with_decl (exp, "prior parameter's size depends on `%s'");
5967 bc_load_parmaddr (DECL_RTL (exp));
5968 bc_load_memory (TREE_TYPE (exp), exp);
5974 if (DECL_RTL (exp) == 0)
5978 if (BYTECODE_LABEL (DECL_RTL (exp)))
5979 bc_load_externaddr (DECL_RTL (exp));
5981 bc_load_localaddr (DECL_RTL (exp));
5983 if (TREE_PUBLIC (exp))
5984 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5985 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5987 bc_load_localaddr (DECL_RTL (exp));
5989 bc_load_memory (TREE_TYPE (exp), exp);
5994 #ifdef DEBUG_PRINT_CODE
5995 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5997 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
5999 : TYPE_MODE (TREE_TYPE (exp)))],
6000 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6006 #ifdef DEBUG_PRINT_CODE
6007 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6009 /* FIX THIS: find a better way to pass real_cst's. -bson */
6010 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6011 (double) TREE_REAL_CST (exp));
6020 /* We build a call description vector describing the type of
6021 the return value and of the arguments; this call vector,
6022 together with a pointer to a location for the return value
6023 and the base of the argument list, is passed to the low
6024 level machine dependent call subroutine, which is responsible
6025 for putting the arguments wherever real functions expect
6026 them, as well as getting the return value back. */
6028 tree calldesc = 0, arg;
6032 /* Push the evaluated args on the evaluation stack in reverse
6033 order. Also make an entry for each arg in the calldesc
6034 vector while we're at it. */
6036 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6038 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6041 bc_expand_expr (TREE_VALUE (arg));
6043 calldesc = tree_cons ((tree) 0,
6044 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6046 calldesc = tree_cons ((tree) 0,
6047 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6051 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6053 /* Allocate a location for the return value and push its
6054 address on the evaluation stack. Also make an entry
6055 at the front of the calldesc for the return value type. */
6057 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6058 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6059 bc_load_localaddr (retval);
6061 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6062 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6064 /* Prepend the argument count. */
6065 calldesc = tree_cons ((tree) 0,
6066 build_int_2 (nargs, 0),
6069 /* Push the address of the call description vector on the stack. */
6070 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6071 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6072 build_index_type (build_int_2 (nargs * 2, 0)));
6073 r = output_constant_def (calldesc);
6074 bc_load_externaddr (r);
6076 /* Push the address of the function to be called. */
6077 bc_expand_expr (TREE_OPERAND (exp, 0));
6079 /* Call the function, popping its address and the calldesc vector
6080 address off the evaluation stack in the process. */
6081 bc_emit_instruction (call);
6083 /* Pop the arguments off the stack. */
6084 bc_adjust_stack (nargs);
6086 /* Load the return value onto the stack. */
6087 bc_load_localaddr (retval);
6088 bc_load_memory (type, TREE_OPERAND (exp, 0));
6094 if (!SAVE_EXPR_RTL (exp))
6096 /* First time around: copy to local variable */
6097 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6098 TYPE_ALIGN (TREE_TYPE(exp)));
6099 bc_expand_expr (TREE_OPERAND (exp, 0));
6100 bc_emit_instruction (duplicate);
6102 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6103 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6107 /* Consecutive reference: use saved copy */
6108 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6109 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6114 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6115 how are they handled instead? */
6118 TREE_USED (exp) = 1;
6119 bc_expand_expr (STMT_BODY (exp));
6126 bc_expand_expr (TREE_OPERAND (exp, 0));
6127 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6132 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6137 bc_expand_address (TREE_OPERAND (exp, 0));
6142 bc_expand_expr (TREE_OPERAND (exp, 0));
6143 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6148 bc_expand_expr (bc_canonicalize_array_ref (exp));
6153 bc_expand_component_address (exp);
6155 /* If we have a bitfield, generate a proper load */
6156 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6161 bc_expand_expr (TREE_OPERAND (exp, 0));
6162 bc_emit_instruction (drop);
6163 bc_expand_expr (TREE_OPERAND (exp, 1));
6168 bc_expand_expr (TREE_OPERAND (exp, 0));
6169 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6170 lab = bc_get_bytecode_label ();
6171 bc_emit_bytecode (xjumpifnot);
6172 bc_emit_bytecode_labelref (lab);
6174 #ifdef DEBUG_PRINT_CODE
6175 fputc ('\n', stderr);
6177 bc_expand_expr (TREE_OPERAND (exp, 1));
6178 lab1 = bc_get_bytecode_label ();
6179 bc_emit_bytecode (jump);
6180 bc_emit_bytecode_labelref (lab1);
6182 #ifdef DEBUG_PRINT_CODE
6183 fputc ('\n', stderr);
6186 bc_emit_bytecode_labeldef (lab);
6187 bc_expand_expr (TREE_OPERAND (exp, 2));
6188 bc_emit_bytecode_labeldef (lab1);
6191 case TRUTH_ANDIF_EXPR:
6193 opcode = xjumpifnot;
6196 case TRUTH_ORIF_EXPR:
6203 binoptab = optab_plus_expr;
6208 binoptab = optab_minus_expr;
6213 binoptab = optab_mult_expr;
6216 case TRUNC_DIV_EXPR:
6217 case FLOOR_DIV_EXPR:
6219 case ROUND_DIV_EXPR:
6220 case EXACT_DIV_EXPR:
6222 binoptab = optab_trunc_div_expr;
6225 case TRUNC_MOD_EXPR:
6226 case FLOOR_MOD_EXPR:
6228 case ROUND_MOD_EXPR:
6230 binoptab = optab_trunc_mod_expr;
6233 case FIX_ROUND_EXPR:
6234 case FIX_FLOOR_EXPR:
6236 abort (); /* Not used for C. */
6238 case FIX_TRUNC_EXPR:
6245 abort (); /* FIXME */
6249 binoptab = optab_rdiv_expr;
6254 binoptab = optab_bit_and_expr;
6259 binoptab = optab_bit_ior_expr;
6264 binoptab = optab_bit_xor_expr;
6269 binoptab = optab_lshift_expr;
6274 binoptab = optab_rshift_expr;
6277 case TRUTH_AND_EXPR:
6279 binoptab = optab_truth_and_expr;
6284 binoptab = optab_truth_or_expr;
6289 binoptab = optab_lt_expr;
6294 binoptab = optab_le_expr;
6299 binoptab = optab_ge_expr;
6304 binoptab = optab_gt_expr;
6309 binoptab = optab_eq_expr;
6314 binoptab = optab_ne_expr;
6319 unoptab = optab_negate_expr;
6324 unoptab = optab_bit_not_expr;
6327 case TRUTH_NOT_EXPR:
6329 unoptab = optab_truth_not_expr;
6332 case PREDECREMENT_EXPR:
6334 incroptab = optab_predecrement_expr;
6337 case PREINCREMENT_EXPR:
6339 incroptab = optab_preincrement_expr;
6342 case POSTDECREMENT_EXPR:
6344 incroptab = optab_postdecrement_expr;
6347 case POSTINCREMENT_EXPR:
6349 incroptab = optab_postincrement_expr;
6354 bc_expand_constructor (exp);
6364 tree vars = TREE_OPERAND (exp, 0);
6365 int vars_need_expansion = 0;
6367 /* Need to open a binding contour here because
6368 if there are any cleanups they most be contained here. */
6369 expand_start_bindings (0);
6371 /* Mark the corresponding BLOCK for output. */
6372 if (TREE_OPERAND (exp, 2) != 0)
6373 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6375 /* If VARS have not yet been expanded, expand them now. */
6378 if (DECL_RTL (vars) == 0)
6380 vars_need_expansion = 1;
6381 bc_expand_decl (vars, 0);
6383 bc_expand_decl_init (vars);
6384 vars = TREE_CHAIN (vars);
6387 bc_expand_expr (TREE_OPERAND (exp, 1));
6389 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6399 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6400 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6406 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6412 bc_expand_expr (TREE_OPERAND (exp, 0));
6413 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6414 lab = bc_get_bytecode_label ();
6416 bc_emit_instruction (duplicate);
6417 bc_emit_bytecode (opcode);
6418 bc_emit_bytecode_labelref (lab);
6420 #ifdef DEBUG_PRINT_CODE
6421 fputc ('\n', stderr);
6424 bc_emit_instruction (drop);
6426 bc_expand_expr (TREE_OPERAND (exp, 1));
6427 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6428 bc_emit_bytecode_labeldef (lab);
6434 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6436 /* Push the quantum. */
6437 bc_expand_expr (TREE_OPERAND (exp, 1));
6439 /* Convert it to the lvalue's type. */
6440 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6442 /* Push the address of the lvalue */
6443 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6445 /* Perform actual increment */
6446 bc_expand_increment (incroptab, type);
6450 /* Return the alignment in bits of EXP, a pointer valued expression.
6451 But don't return more than MAX_ALIGN no matter what.
6452 The alignment returned is, by default, the alignment of the thing that
6453 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6455 Otherwise, look at the expression to see if we can do better, i.e., if the
6456 expression is actually pointing at an object whose alignment is tighter. */
6459 get_pointer_alignment (exp, max_align)
6463 unsigned align, inner;
6465 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6468 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6469 align = MIN (align, max_align);
6473 switch (TREE_CODE (exp))
6477 case NON_LVALUE_EXPR:
6478 exp = TREE_OPERAND (exp, 0);
6479 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6481 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6482 inner = MIN (inner, max_align);
6483 align = MAX (align, inner);
6487 /* If sum of pointer + int, restrict our maximum alignment to that
6488 imposed by the integer. If not, we can't do any better than
6490 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6493 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6498 exp = TREE_OPERAND (exp, 0);
6502 /* See what we are pointing at and look at its alignment. */
6503 exp = TREE_OPERAND (exp, 0);
6504 if (TREE_CODE (exp) == FUNCTION_DECL)
6505 align = MAX (align, FUNCTION_BOUNDARY);
6506 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6507 align = MAX (align, DECL_ALIGN (exp));
6508 #ifdef CONSTANT_ALIGNMENT
6509 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6510 align = CONSTANT_ALIGNMENT (exp, align);
6512 return MIN (align, max_align);
6520 /* Return the tree node and offset if a given argument corresponds to
6521 a string constant. */
6524 string_constant (arg, ptr_offset)
6530 if (TREE_CODE (arg) == ADDR_EXPR
6531 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6533 *ptr_offset = integer_zero_node;
6534 return TREE_OPERAND (arg, 0);
6536 else if (TREE_CODE (arg) == PLUS_EXPR)
6538 tree arg0 = TREE_OPERAND (arg, 0);
6539 tree arg1 = TREE_OPERAND (arg, 1);
6544 if (TREE_CODE (arg0) == ADDR_EXPR
6545 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6548 return TREE_OPERAND (arg0, 0);
6550 else if (TREE_CODE (arg1) == ADDR_EXPR
6551 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6554 return TREE_OPERAND (arg1, 0);
6561 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6562 way, because it could contain a zero byte in the middle.
6563 TREE_STRING_LENGTH is the size of the character array, not the string.
6565 Unfortunately, string_constant can't access the values of const char
6566 arrays with initializers, so neither can we do so here. */
6576 src = string_constant (src, &offset_node);
6579 max = TREE_STRING_LENGTH (src);
6580 ptr = TREE_STRING_POINTER (src);
6581 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6583 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6584 compute the offset to the following null if we don't know where to
6585 start searching for it. */
6587 for (i = 0; i < max; i++)
6590 /* We don't know the starting offset, but we do know that the string
6591 has no internal zero bytes. We can assume that the offset falls
6592 within the bounds of the string; otherwise, the programmer deserves
6593 what he gets. Subtract the offset from the length of the string,
6595 /* This would perhaps not be valid if we were dealing with named
6596 arrays in addition to literal string constants. */
6597 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6600 /* We have a known offset into the string. Start searching there for
6601 a null character. */
6602 if (offset_node == 0)
6606 /* Did we get a long long offset? If so, punt. */
6607 if (TREE_INT_CST_HIGH (offset_node) != 0)
6609 offset = TREE_INT_CST_LOW (offset_node);
6611 /* If the offset is known to be out of bounds, warn, and call strlen at
6613 if (offset < 0 || offset > max)
6615 warning ("offset outside bounds of constant string");
6618 /* Use strlen to search for the first zero byte. Since any strings
6619 constructed with build_string will have nulls appended, we win even
6620 if we get handed something like (char[4])"abcd".
6622 Since OFFSET is our starting index into the string, no further
6623 calculation is needed. */
6624 return size_int (strlen (ptr + offset));
6627 /* Expand an expression EXP that calls a built-in function,
6628 with result going to TARGET if that's convenient
6629 (and in mode MODE if that's convenient).
6630 SUBTARGET may be used as the target for computing one of EXP's operands.
6631 IGNORE is nonzero if the value is to be ignored. */
6634 expand_builtin (exp, target, subtarget, mode, ignore)
6638 enum machine_mode mode;
6641 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6642 tree arglist = TREE_OPERAND (exp, 1);
6645 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6646 optab builtin_optab;
6648 switch (DECL_FUNCTION_CODE (fndecl))
6653 /* build_function_call changes these into ABS_EXPR. */
6658 case BUILT_IN_FSQRT:
6659 /* If not optimizing, call the library function. */
6664 /* Arg could be wrong type if user redeclared this fcn wrong. */
6665 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6668 /* Stabilize and compute the argument. */
6669 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6670 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6672 exp = copy_node (exp);
6673 arglist = copy_node (arglist);
6674 TREE_OPERAND (exp, 1) = arglist;
6675 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6677 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6679 /* Make a suitable register to place result in. */
6680 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6685 switch (DECL_FUNCTION_CODE (fndecl))
6688 builtin_optab = sin_optab; break;
6690 builtin_optab = cos_optab; break;
6691 case BUILT_IN_FSQRT:
6692 builtin_optab = sqrt_optab; break;
6697 /* Compute into TARGET.
6698 Set TARGET to wherever the result comes back. */
6699 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6700 builtin_optab, op0, target, 0);
6702 /* If we were unable to expand via the builtin, stop the
6703 sequence (without outputting the insns) and break, causing
6704 a call the the library function. */
6711 /* Check the results by default. But if flag_fast_math is turned on,
6712 then assume sqrt will always be called with valid arguments. */
6714 if (! flag_fast_math)
6716 /* Don't define the builtin FP instructions
6717 if your machine is not IEEE. */
6718 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6721 lab1 = gen_label_rtx ();
6723 /* Test the result; if it is NaN, set errno=EDOM because
6724 the argument was not in the domain. */
6725 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6726 emit_jump_insn (gen_beq (lab1));
6730 #ifdef GEN_ERRNO_RTX
6731 rtx errno_rtx = GEN_ERRNO_RTX;
6734 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6737 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6740 /* We can't set errno=EDOM directly; let the library call do it.
6741 Pop the arguments right away in case the call gets deleted. */
6743 expand_call (exp, target, 0);
6750 /* Output the entire sequence. */
6751 insns = get_insns ();
6757 /* __builtin_apply_args returns block of memory allocated on
6758 the stack into which is stored the arg pointer, structure
6759 value address, static chain, and all the registers that might
6760 possibly be used in performing a function call. The code is
6761 moved to the start of the function so the incoming values are
6763 case BUILT_IN_APPLY_ARGS:
6764 /* Don't do __builtin_apply_args more than once in a function.
6765 Save the result of the first call and reuse it. */
6766 if (apply_args_value != 0)
6767 return apply_args_value;
6769 /* When this function is called, it means that registers must be
6770 saved on entry to this function. So we migrate the
6771 call to the first insn of this function. */
6776 temp = expand_builtin_apply_args ();
6780 apply_args_value = temp;
6782 /* Put the sequence after the NOTE that starts the function.
6783 If this is inside a SEQUENCE, make the outer-level insn
6784 chain current, so the code is placed at the start of the
6786 push_topmost_sequence ();
6787 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6788 pop_topmost_sequence ();
6792 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6793 FUNCTION with a copy of the parameters described by
6794 ARGUMENTS, and ARGSIZE. It returns a block of memory
6795 allocated on the stack into which is stored all the registers
6796 that might possibly be used for returning the result of a
6797 function. ARGUMENTS is the value returned by
6798 __builtin_apply_args. ARGSIZE is the number of bytes of
6799 arguments that must be copied. ??? How should this value be
6800 computed? We'll also need a safe worst case value for varargs
6802 case BUILT_IN_APPLY:
6804 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6805 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6806 || TREE_CHAIN (arglist) == 0
6807 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6808 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6809 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6817 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6818 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6820 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6823 /* __builtin_return (RESULT) causes the function to return the
6824 value described by RESULT. RESULT is address of the block of
6825 memory returned by __builtin_apply. */
6826 case BUILT_IN_RETURN:
6828 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6829 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6830 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6831 NULL_RTX, VOIDmode, 0));
6834 case BUILT_IN_SAVEREGS:
6835 /* Don't do __builtin_saveregs more than once in a function.
6836 Save the result of the first call and reuse it. */
6837 if (saveregs_value != 0)
6838 return saveregs_value;
6840 /* When this function is called, it means that registers must be
6841 saved on entry to this function. So we migrate the
6842 call to the first insn of this function. */
6845 rtx valreg, saved_valreg;
6847 /* Now really call the function. `expand_call' does not call
6848 expand_builtin, so there is no danger of infinite recursion here. */
6851 #ifdef EXPAND_BUILTIN_SAVEREGS
6852 /* Do whatever the machine needs done in this case. */
6853 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6855 /* The register where the function returns its value
6856 is likely to have something else in it, such as an argument.
6857 So preserve that register around the call. */
6858 if (value_mode != VOIDmode)
6860 valreg = hard_libcall_value (value_mode);
6861 saved_valreg = gen_reg_rtx (value_mode);
6862 emit_move_insn (saved_valreg, valreg);
6865 /* Generate the call, putting the value in a pseudo. */
6866 temp = expand_call (exp, target, ignore);
6868 if (value_mode != VOIDmode)
6869 emit_move_insn (valreg, saved_valreg);
6875 saveregs_value = temp;
6877 /* Put the sequence after the NOTE that starts the function.
6878 If this is inside a SEQUENCE, make the outer-level insn
6879 chain current, so the code is placed at the start of the
6881 push_topmost_sequence ();
6882 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6883 pop_topmost_sequence ();
6887 /* __builtin_args_info (N) returns word N of the arg space info
6888 for the current function. The number and meanings of words
6889 is controlled by the definition of CUMULATIVE_ARGS. */
6890 case BUILT_IN_ARGS_INFO:
6892 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6894 int *word_ptr = (int *) ¤t_function_args_info;
6895 tree type, elts, result;
6897 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6898 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6899 __FILE__, __LINE__);
6903 tree arg = TREE_VALUE (arglist);
6904 if (TREE_CODE (arg) != INTEGER_CST)
6905 error ("argument of `__builtin_args_info' must be constant");
6908 int wordnum = TREE_INT_CST_LOW (arg);
6910 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6911 error ("argument of `__builtin_args_info' out of range");
6913 return GEN_INT (word_ptr[wordnum]);
6917 error ("missing argument in `__builtin_args_info'");
6922 for (i = 0; i < nwords; i++)
6923 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6925 type = build_array_type (integer_type_node,
6926 build_index_type (build_int_2 (nwords, 0)));
6927 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6928 TREE_CONSTANT (result) = 1;
6929 TREE_STATIC (result) = 1;
6930 result = build (INDIRECT_REF, build_pointer_type (type), result);
6931 TREE_CONSTANT (result) = 1;
6932 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6936 /* Return the address of the first anonymous stack arg. */
6937 case BUILT_IN_NEXT_ARG:
6940 tree fntype = TREE_TYPE (current_function_decl);
6941 tree fnargs = DECL_ARGUMENTS (current_function_decl);
6942 if (!(TYPE_ARG_TYPES (fntype) != 0
6943 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6946 && (parm = tree_last (fnargs)) != 0
6948 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
6949 "__builtin_va_alist"))))
6951 error ("`va_start' used in function with fixed args");
6956 return expand_binop (Pmode, add_optab,
6957 current_function_internal_arg_pointer,
6958 current_function_arg_offset_rtx,
6959 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6961 case BUILT_IN_CLASSIFY_TYPE:
6964 tree type = TREE_TYPE (TREE_VALUE (arglist));
6965 enum tree_code code = TREE_CODE (type);
6966 if (code == VOID_TYPE)
6967 return GEN_INT (void_type_class);
6968 if (code == INTEGER_TYPE)
6969 return GEN_INT (integer_type_class);
6970 if (code == CHAR_TYPE)
6971 return GEN_INT (char_type_class);
6972 if (code == ENUMERAL_TYPE)
6973 return GEN_INT (enumeral_type_class);
6974 if (code == BOOLEAN_TYPE)
6975 return GEN_INT (boolean_type_class);
6976 if (code == POINTER_TYPE)
6977 return GEN_INT (pointer_type_class);
6978 if (code == REFERENCE_TYPE)
6979 return GEN_INT (reference_type_class);
6980 if (code == OFFSET_TYPE)
6981 return GEN_INT (offset_type_class);
6982 if (code == REAL_TYPE)
6983 return GEN_INT (real_type_class);
6984 if (code == COMPLEX_TYPE)
6985 return GEN_INT (complex_type_class);
6986 if (code == FUNCTION_TYPE)
6987 return GEN_INT (function_type_class);
6988 if (code == METHOD_TYPE)
6989 return GEN_INT (method_type_class);
6990 if (code == RECORD_TYPE)
6991 return GEN_INT (record_type_class);
6992 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6993 return GEN_INT (union_type_class);
6994 if (code == ARRAY_TYPE)
6995 return GEN_INT (array_type_class);
6996 if (code == STRING_TYPE)
6997 return GEN_INT (string_type_class);
6998 if (code == SET_TYPE)
6999 return GEN_INT (set_type_class);
7000 if (code == FILE_TYPE)
7001 return GEN_INT (file_type_class);
7002 if (code == LANG_TYPE)
7003 return GEN_INT (lang_type_class);
7005 return GEN_INT (no_type_class);
7007 case BUILT_IN_CONSTANT_P:
7011 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7012 ? const1_rtx : const0_rtx);
7014 case BUILT_IN_FRAME_ADDRESS:
7015 /* The argument must be a nonnegative integer constant.
7016 It counts the number of frames to scan up the stack.
7017 The value is the address of that frame. */
7018 case BUILT_IN_RETURN_ADDRESS:
7019 /* The argument must be a nonnegative integer constant.
7020 It counts the number of frames to scan up the stack.
7021 The value is the return address saved in that frame. */
7023 /* Warning about missing arg was already issued. */
7025 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7027 error ("invalid arg to `__builtin_return_address'");
7030 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
7032 error ("invalid arg to `__builtin_return_address'");
7037 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7038 rtx tem = frame_pointer_rtx;
7041 /* Some machines need special handling before we can access arbitrary
7042 frames. For example, on the sparc, we must first flush all
7043 register windows to the stack. */
7044 #ifdef SETUP_FRAME_ADDRESSES
7045 SETUP_FRAME_ADDRESSES ();
7048 /* On the sparc, the return address is not in the frame, it is
7049 in a register. There is no way to access it off of the current
7050 frame pointer, but it can be accessed off the previous frame
7051 pointer by reading the value from the register window save
7053 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7054 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7058 /* Scan back COUNT frames to the specified frame. */
7059 for (i = 0; i < count; i++)
7061 /* Assume the dynamic chain pointer is in the word that
7062 the frame address points to, unless otherwise specified. */
7063 #ifdef DYNAMIC_CHAIN_ADDRESS
7064 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7066 tem = memory_address (Pmode, tem);
7067 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7070 /* For __builtin_frame_address, return what we've got. */
7071 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7074 /* For __builtin_return_address,
7075 Get the return address from that frame. */
7076 #ifdef RETURN_ADDR_RTX
7077 return RETURN_ADDR_RTX (count, tem);
7079 tem = memory_address (Pmode,
7080 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7081 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7085 case BUILT_IN_ALLOCA:
7087 /* Arg could be non-integer if user redeclared this fcn wrong. */
7088 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7090 current_function_calls_alloca = 1;
7091 /* Compute the argument. */
7092 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7094 /* Allocate the desired space. */
7095 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7097 /* Record the new stack level for nonlocal gotos. */
7098 if (nonlocal_goto_handler_slot != 0)
7099 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7103 /* If not optimizing, call the library function. */
7108 /* Arg could be non-integer if user redeclared this fcn wrong. */
7109 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7112 /* Compute the argument. */
7113 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7114 /* Compute ffs, into TARGET if possible.
7115 Set TARGET to wherever the result comes back. */
7116 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7117 ffs_optab, op0, target, 1);
7122 case BUILT_IN_STRLEN:
7123 /* If not optimizing, call the library function. */
7128 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7129 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7133 tree src = TREE_VALUE (arglist);
7134 tree len = c_strlen (src);
7137 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7139 rtx result, src_rtx, char_rtx;
7140 enum machine_mode insn_mode = value_mode, char_mode;
7141 enum insn_code icode;
7143 /* If the length is known, just return it. */
7145 return expand_expr (len, target, mode, 0);
7147 /* If SRC is not a pointer type, don't do this operation inline. */
7151 /* Call a function if we can't compute strlen in the right mode. */
7153 while (insn_mode != VOIDmode)
7155 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7156 if (icode != CODE_FOR_nothing)
7159 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7161 if (insn_mode == VOIDmode)
7164 /* Make a place to write the result of the instruction. */
7167 && GET_CODE (result) == REG
7168 && GET_MODE (result) == insn_mode
7169 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7170 result = gen_reg_rtx (insn_mode);
7172 /* Make sure the operands are acceptable to the predicates. */
7174 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7175 result = gen_reg_rtx (insn_mode);
7177 src_rtx = memory_address (BLKmode,
7178 expand_expr (src, NULL_RTX, Pmode,
7180 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7181 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7183 char_rtx = const0_rtx;
7184 char_mode = insn_operand_mode[(int)icode][2];
7185 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7186 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7188 emit_insn (GEN_FCN (icode) (result,
7189 gen_rtx (MEM, BLKmode, src_rtx),
7190 char_rtx, GEN_INT (align)));
7192 /* Return the value in the proper mode for this function. */
7193 if (GET_MODE (result) == value_mode)
7195 else if (target != 0)
7197 convert_move (target, result, 0);
7201 return convert_to_mode (value_mode, result, 0);
7204 case BUILT_IN_STRCPY:
7205 /* If not optimizing, call the library function. */
7210 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7211 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7212 || TREE_CHAIN (arglist) == 0
7213 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7217 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7222 len = size_binop (PLUS_EXPR, len, integer_one_node);
7224 chainon (arglist, build_tree_list (NULL_TREE, len));
7228 case BUILT_IN_MEMCPY:
7229 /* If not optimizing, call the library function. */
7234 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7235 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7236 || TREE_CHAIN (arglist) == 0
7237 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7238 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7239 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7243 tree dest = TREE_VALUE (arglist);
7244 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7245 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7248 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7250 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7251 rtx dest_rtx, dest_mem, src_mem;
7253 /* If either SRC or DEST is not a pointer type, don't do
7254 this operation in-line. */
7255 if (src_align == 0 || dest_align == 0)
7257 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7258 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7262 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7263 dest_mem = gen_rtx (MEM, BLKmode,
7264 memory_address (BLKmode, dest_rtx));
7265 src_mem = gen_rtx (MEM, BLKmode,
7266 memory_address (BLKmode,
7267 expand_expr (src, NULL_RTX,
7271 /* Copy word part most expediently. */
7272 emit_block_move (dest_mem, src_mem,
7273 expand_expr (len, NULL_RTX, VOIDmode, 0),
7274 MIN (src_align, dest_align));
7278 /* These comparison functions need an instruction that returns an actual
7279 index. An ordinary compare that just sets the condition codes
7281 #ifdef HAVE_cmpstrsi
7282 case BUILT_IN_STRCMP:
7283 /* If not optimizing, call the library function. */
7288 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7289 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7290 || TREE_CHAIN (arglist) == 0
7291 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7293 else if (!HAVE_cmpstrsi)
7296 tree arg1 = TREE_VALUE (arglist);
7297 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7301 len = c_strlen (arg1);
7303 len = size_binop (PLUS_EXPR, integer_one_node, len);
7304 len2 = c_strlen (arg2);
7306 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7308 /* If we don't have a constant length for the first, use the length
7309 of the second, if we know it. We don't require a constant for
7310 this case; some cost analysis could be done if both are available
7311 but neither is constant. For now, assume they're equally cheap.
7313 If both strings have constant lengths, use the smaller. This
7314 could arise if optimization results in strcpy being called with
7315 two fixed strings, or if the code was machine-generated. We should
7316 add some code to the `memcmp' handler below to deal with such
7317 situations, someday. */
7318 if (!len || TREE_CODE (len) != INTEGER_CST)
7325 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7327 if (tree_int_cst_lt (len2, len))
7331 chainon (arglist, build_tree_list (NULL_TREE, len));
7335 case BUILT_IN_MEMCMP:
7336 /* If not optimizing, call the library function. */
7341 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7342 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7343 || TREE_CHAIN (arglist) == 0
7344 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7345 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7346 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7348 else if (!HAVE_cmpstrsi)
7351 tree arg1 = TREE_VALUE (arglist);
7352 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7353 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7357 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7359 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7360 enum machine_mode insn_mode
7361 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7363 /* If we don't have POINTER_TYPE, call the function. */
7364 if (arg1_align == 0 || arg2_align == 0)
7366 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7367 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7371 /* Make a place to write the result of the instruction. */
7374 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7375 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7376 result = gen_reg_rtx (insn_mode);
7378 emit_insn (gen_cmpstrsi (result,
7379 gen_rtx (MEM, BLKmode,
7380 expand_expr (arg1, NULL_RTX, Pmode,
7382 gen_rtx (MEM, BLKmode,
7383 expand_expr (arg2, NULL_RTX, Pmode,
7385 expand_expr (len, NULL_RTX, VOIDmode, 0),
7386 GEN_INT (MIN (arg1_align, arg2_align))));
7388 /* Return the value in the proper mode for this function. */
7389 mode = TYPE_MODE (TREE_TYPE (exp));
7390 if (GET_MODE (result) == mode)
7392 else if (target != 0)
7394 convert_move (target, result, 0);
7398 return convert_to_mode (mode, result, 0);
7401 case BUILT_IN_STRCMP:
7402 case BUILT_IN_MEMCMP:
7406 default: /* just do library call, if unknown builtin */
7407 error ("built-in function `%s' not currently supported",
7408 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7411 /* The switch statement above can drop through to cause the function
7412 to be called normally. */
7414 return expand_call (exp, target, ignore);
7417 /* Built-in functions to perform an untyped call and return. */
7419 /* For each register that may be used for calling a function, this
7420 gives a mode used to copy the register's value. VOIDmode indicates
7421 the register is not used for calling a function. If the machine
7422 has register windows, this gives only the outbound registers.
7423 INCOMING_REGNO gives the corresponding inbound register. */
7424 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7426 /* For each register that may be used for returning values, this gives
7427 a mode used to copy the register's value. VOIDmode indicates the
7428 register is not used for returning values. If the machine has
7429 register windows, this gives only the outbound registers.
7430 INCOMING_REGNO gives the corresponding inbound register. */
7431 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7433 /* For each register that may be used for calling a function, this
7434 gives the offset of that register into the block returned by
7435 __bultin_apply_args. 0 indicates that the register is not
7436 used for calling a function. */
7437 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7439 /* Return the offset of register REGNO into the block returned by
7440 __builtin_apply_args. This is not declared static, since it is
7441 needed in objc-act.c. */
7444 apply_args_register_offset (regno)
7449 /* Arguments are always put in outgoing registers (in the argument
7450 block) if such make sense. */
7451 #ifdef OUTGOING_REGNO
7452 regno = OUTGOING_REGNO(regno);
7454 return apply_args_reg_offset[regno];
7457 /* Return the size required for the block returned by __builtin_apply_args,
7458 and initialize apply_args_mode. */
7463 static int size = -1;
7465 enum machine_mode mode;
7467 /* The values computed by this function never change. */
7470 /* The first value is the incoming arg-pointer. */
7471 size = GET_MODE_SIZE (Pmode);
7473 /* The second value is the structure value address unless this is
7474 passed as an "invisible" first argument. */
7475 if (struct_value_rtx)
7476 size += GET_MODE_SIZE (Pmode);
7478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7479 if (FUNCTION_ARG_REGNO_P (regno))
7481 /* Search for the proper mode for copying this register's
7482 value. I'm not sure this is right, but it works so far. */
7483 enum machine_mode best_mode = VOIDmode;
7485 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7487 mode = GET_MODE_WIDER_MODE (mode))
7488 if (HARD_REGNO_MODE_OK (regno, mode)
7489 && HARD_REGNO_NREGS (regno, mode) == 1)
7492 if (best_mode == VOIDmode)
7493 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7495 mode = GET_MODE_WIDER_MODE (mode))
7496 if (HARD_REGNO_MODE_OK (regno, mode)
7497 && (mov_optab->handlers[(int) mode].insn_code
7498 != CODE_FOR_nothing))
7502 if (mode == VOIDmode)
7505 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7506 if (size % align != 0)
7507 size = CEIL (size, align) * align;
7508 apply_args_reg_offset[regno] = size;
7509 size += GET_MODE_SIZE (mode);
7510 apply_args_mode[regno] = mode;
7514 apply_args_mode[regno] = VOIDmode;
7515 apply_args_reg_offset[regno] = 0;
7521 /* Return the size required for the block returned by __builtin_apply,
7522 and initialize apply_result_mode. */
7525 apply_result_size ()
7527 static int size = -1;
7529 enum machine_mode mode;
7531 /* The values computed by this function never change. */
7536 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7537 if (FUNCTION_VALUE_REGNO_P (regno))
7539 /* Search for the proper mode for copying this register's
7540 value. I'm not sure this is right, but it works so far. */
7541 enum machine_mode best_mode = VOIDmode;
7543 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7545 mode = GET_MODE_WIDER_MODE (mode))
7546 if (HARD_REGNO_MODE_OK (regno, mode))
7549 if (best_mode == VOIDmode)
7550 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7552 mode = GET_MODE_WIDER_MODE (mode))
7553 if (HARD_REGNO_MODE_OK (regno, mode)
7554 && (mov_optab->handlers[(int) mode].insn_code
7555 != CODE_FOR_nothing))
7559 if (mode == VOIDmode)
7562 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7563 if (size % align != 0)
7564 size = CEIL (size, align) * align;
7565 size += GET_MODE_SIZE (mode);
7566 apply_result_mode[regno] = mode;
7569 apply_result_mode[regno] = VOIDmode;
7571 /* Allow targets that use untyped_call and untyped_return to override
7572 the size so that machine-specific information can be stored here. */
7573 #ifdef APPLY_RESULT_SIZE
7574 size = APPLY_RESULT_SIZE;
7580 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7581 /* Create a vector describing the result block RESULT. If SAVEP is true,
7582 the result block is used to save the values; otherwise it is used to
7583 restore the values. */
7586 result_vector (savep, result)
7590 int regno, size, align, nelts;
7591 enum machine_mode mode;
7593 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7596 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7597 if ((mode = apply_result_mode[regno]) != VOIDmode)
7599 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7600 if (size % align != 0)
7601 size = CEIL (size, align) * align;
7602 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7603 mem = change_address (result, mode,
7604 plus_constant (XEXP (result, 0), size));
7605 savevec[nelts++] = (savep
7606 ? gen_rtx (SET, VOIDmode, mem, reg)
7607 : gen_rtx (SET, VOIDmode, reg, mem));
7608 size += GET_MODE_SIZE (mode);
7610 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7612 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7614 /* Save the state required to perform an untyped call with the same
7615 arguments as were passed to the current function. */
7618 expand_builtin_apply_args ()
7621 int size, align, regno;
7622 enum machine_mode mode;
7624 /* Create a block where the arg-pointer, structure value address,
7625 and argument registers can be saved. */
7626 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7628 /* Walk past the arg-pointer and structure value address. */
7629 size = GET_MODE_SIZE (Pmode);
7630 if (struct_value_rtx)
7631 size += GET_MODE_SIZE (Pmode);
7633 /* Save each register used in calling a function to the block. */
7634 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7635 if ((mode = apply_args_mode[regno]) != VOIDmode)
7637 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7638 if (size % align != 0)
7639 size = CEIL (size, align) * align;
7640 emit_move_insn (change_address (registers, mode,
7641 plus_constant (XEXP (registers, 0),
7643 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7644 size += GET_MODE_SIZE (mode);
7647 /* Save the arg pointer to the block. */
7648 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7649 copy_to_reg (virtual_incoming_args_rtx));
7650 size = GET_MODE_SIZE (Pmode);
7652 /* Save the structure value address unless this is passed as an
7653 "invisible" first argument. */
7654 if (struct_value_incoming_rtx)
7656 emit_move_insn (change_address (registers, Pmode,
7657 plus_constant (XEXP (registers, 0),
7659 copy_to_reg (struct_value_incoming_rtx));
7660 size += GET_MODE_SIZE (Pmode);
7663 /* Return the address of the block. */
7664 return copy_addr_to_reg (XEXP (registers, 0));
7667 /* Perform an untyped call and save the state required to perform an
7668 untyped return of whatever value was returned by the given function. */
7671 expand_builtin_apply (function, arguments, argsize)
7672 rtx function, arguments, argsize;
7674 int size, align, regno;
7675 enum machine_mode mode;
7676 rtx incoming_args, result, reg, dest, call_insn;
7677 rtx old_stack_level = 0;
7680 /* Create a block where the return registers can be saved. */
7681 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7683 /* ??? The argsize value should be adjusted here. */
7685 /* Fetch the arg pointer from the ARGUMENTS block. */
7686 incoming_args = gen_reg_rtx (Pmode);
7687 emit_move_insn (incoming_args,
7688 gen_rtx (MEM, Pmode, arguments));
7689 #ifndef STACK_GROWS_DOWNWARD
7690 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7691 incoming_args, 0, OPTAB_LIB_WIDEN);
7694 /* Perform postincrements before actually calling the function. */
7697 /* Push a new argument block and copy the arguments. */
7698 do_pending_stack_adjust ();
7699 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7701 /* Push a block of memory onto the stack to store the memory arguments.
7702 Save the address in a register, and copy the memory arguments. ??? I
7703 haven't figured out how the calling convention macros effect this,
7704 but it's likely that the source and/or destination addresses in
7705 the block copy will need updating in machine specific ways. */
7706 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7707 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7708 gen_rtx (MEM, BLKmode, incoming_args),
7710 PARM_BOUNDARY / BITS_PER_UNIT);
7712 /* Refer to the argument block. */
7714 arguments = gen_rtx (MEM, BLKmode, arguments);
7716 /* Walk past the arg-pointer and structure value address. */
7717 size = GET_MODE_SIZE (Pmode);
7718 if (struct_value_rtx)
7719 size += GET_MODE_SIZE (Pmode);
7721 /* Restore each of the registers previously saved. Make USE insns
7722 for each of these registers for use in making the call. */
7723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7724 if ((mode = apply_args_mode[regno]) != VOIDmode)
7726 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7727 if (size % align != 0)
7728 size = CEIL (size, align) * align;
7729 reg = gen_rtx (REG, mode, regno);
7730 emit_move_insn (reg,
7731 change_address (arguments, mode,
7732 plus_constant (XEXP (arguments, 0),
7735 push_to_sequence (use_insns);
7736 emit_insn (gen_rtx (USE, VOIDmode, reg));
7737 use_insns = get_insns ();
7739 size += GET_MODE_SIZE (mode);
7742 /* Restore the structure value address unless this is passed as an
7743 "invisible" first argument. */
7744 size = GET_MODE_SIZE (Pmode);
7745 if (struct_value_rtx)
7747 rtx value = gen_reg_rtx (Pmode);
7748 emit_move_insn (value,
7749 change_address (arguments, Pmode,
7750 plus_constant (XEXP (arguments, 0),
7752 emit_move_insn (struct_value_rtx, value);
7753 if (GET_CODE (struct_value_rtx) == REG)
7755 push_to_sequence (use_insns);
7756 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7757 use_insns = get_insns ();
7760 size += GET_MODE_SIZE (Pmode);
7763 /* All arguments and registers used for the call are set up by now! */
7764 function = prepare_call_address (function, NULL_TREE, &use_insns);
7766 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7767 and we don't want to load it into a register as an optimization,
7768 because prepare_call_address already did it if it should be done. */
7769 if (GET_CODE (function) != SYMBOL_REF)
7770 function = memory_address (FUNCTION_MODE, function);
7772 /* Generate the actual call instruction and save the return value. */
7773 #ifdef HAVE_untyped_call
7774 if (HAVE_untyped_call)
7775 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7776 result, result_vector (1, result)));
7779 #ifdef HAVE_call_value
7780 if (HAVE_call_value)
7784 /* Locate the unique return register. It is not possible to
7785 express a call that sets more than one return register using
7786 call_value; use untyped_call for that. In fact, untyped_call
7787 only needs to save the return registers in the given block. */
7788 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7789 if ((mode = apply_result_mode[regno]) != VOIDmode)
7792 abort (); /* HAVE_untyped_call required. */
7793 valreg = gen_rtx (REG, mode, regno);
7796 emit_call_insn (gen_call_value (valreg,
7797 gen_rtx (MEM, FUNCTION_MODE, function),
7798 const0_rtx, NULL_RTX, const0_rtx));
7800 emit_move_insn (change_address (result, GET_MODE (valreg),
7808 /* Find the CALL insn we just emitted and write the USE insns before it. */
7809 for (call_insn = get_last_insn ();
7810 call_insn && GET_CODE (call_insn) != CALL_INSN;
7811 call_insn = PREV_INSN (call_insn))
7817 /* Put the USE insns before the CALL. */
7818 emit_insns_before (use_insns, call_insn);
7820 /* Restore the stack. */
7821 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7823 /* Return the address of the result block. */
7824 return copy_addr_to_reg (XEXP (result, 0));
7827 /* Perform an untyped return. */
7830 expand_builtin_return (result)
7833 int size, align, regno;
7834 enum machine_mode mode;
7838 apply_result_size ();
7839 result = gen_rtx (MEM, BLKmode, result);
7841 #ifdef HAVE_untyped_return
7842 if (HAVE_untyped_return)
7844 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7850 /* Restore the return value and note that each value is used. */
7852 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7853 if ((mode = apply_result_mode[regno]) != VOIDmode)
7855 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7856 if (size % align != 0)
7857 size = CEIL (size, align) * align;
7858 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7859 emit_move_insn (reg,
7860 change_address (result, mode,
7861 plus_constant (XEXP (result, 0),
7864 push_to_sequence (use_insns);
7865 emit_insn (gen_rtx (USE, VOIDmode, reg));
7866 use_insns = get_insns ();
7868 size += GET_MODE_SIZE (mode);
7871 /* Put the USE insns before the return. */
7872 emit_insns (use_insns);
7874 /* Return whatever values was restored by jumping directly to the end
7876 expand_null_return ();
7879 /* Expand code for a post- or pre- increment or decrement
7880 and return the RTX for the result.
7881 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7884 expand_increment (exp, post)
7888 register rtx op0, op1;
7889 register rtx temp, value;
7890 register tree incremented = TREE_OPERAND (exp, 0);
7891 optab this_optab = add_optab;
7893 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7894 int op0_is_copy = 0;
7895 int single_insn = 0;
7896 /* 1 means we can't store into OP0 directly,
7897 because it is a subreg narrower than a word,
7898 and we don't dare clobber the rest of the word. */
7901 if (output_bytecode)
7903 bc_expand_expr (exp);
7907 /* Stabilize any component ref that might need to be
7908 evaluated more than once below. */
7910 || TREE_CODE (incremented) == BIT_FIELD_REF
7911 || (TREE_CODE (incremented) == COMPONENT_REF
7912 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7913 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7914 incremented = stabilize_reference (incremented);
7915 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7916 ones into save exprs so that they don't accidentally get evaluated
7917 more than once by the code below. */
7918 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7919 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7920 incremented = save_expr (incremented);
7922 /* Compute the operands as RTX.
7923 Note whether OP0 is the actual lvalue or a copy of it:
7924 I believe it is a copy iff it is a register or subreg
7925 and insns were generated in computing it. */
7927 temp = get_last_insn ();
7928 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7930 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7931 in place but intead must do sign- or zero-extension during assignment,
7932 so we copy it into a new register and let the code below use it as
7935 Note that we can safely modify this SUBREG since it is know not to be
7936 shared (it was made by the expand_expr call above). */
7938 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7939 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7940 else if (GET_CODE (op0) == SUBREG
7941 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7944 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7945 && temp != get_last_insn ());
7946 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7948 /* Decide whether incrementing or decrementing. */
7949 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7950 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7951 this_optab = sub_optab;
7953 /* Convert decrement by a constant into a negative increment. */
7954 if (this_optab == sub_optab
7955 && GET_CODE (op1) == CONST_INT)
7957 op1 = GEN_INT (- INTVAL (op1));
7958 this_optab = add_optab;
7961 /* For a preincrement, see if we can do this with a single instruction. */
7964 icode = (int) this_optab->handlers[(int) mode].insn_code;
7965 if (icode != (int) CODE_FOR_nothing
7966 /* Make sure that OP0 is valid for operands 0 and 1
7967 of the insn we want to queue. */
7968 && (*insn_operand_predicate[icode][0]) (op0, mode)
7969 && (*insn_operand_predicate[icode][1]) (op0, mode)
7970 && (*insn_operand_predicate[icode][2]) (op1, mode))
7974 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7975 then we cannot just increment OP0. We must therefore contrive to
7976 increment the original value. Then, for postincrement, we can return
7977 OP0 since it is a copy of the old value. For preincrement, expand here
7978 unless we can do it with a single insn.
7980 Likewise if storing directly into OP0 would clobber high bits
7981 we need to preserve (bad_subreg). */
7982 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
7984 /* This is the easiest way to increment the value wherever it is.
7985 Problems with multiple evaluation of INCREMENTED are prevented
7986 because either (1) it is a component_ref or preincrement,
7987 in which case it was stabilized above, or (2) it is an array_ref
7988 with constant index in an array in a register, which is
7989 safe to reevaluate. */
7990 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7991 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7992 ? MINUS_EXPR : PLUS_EXPR),
7995 TREE_OPERAND (exp, 1));
7996 temp = expand_assignment (incremented, newexp, ! post, 0);
7997 return post ? op0 : temp;
8002 /* We have a true reference to the value in OP0.
8003 If there is an insn to add or subtract in this mode, queue it.
8004 Queueing the increment insn avoids the register shuffling
8005 that often results if we must increment now and first save
8006 the old value for subsequent use. */
8008 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8009 op0 = stabilize (op0);
8012 icode = (int) this_optab->handlers[(int) mode].insn_code;
8013 if (icode != (int) CODE_FOR_nothing
8014 /* Make sure that OP0 is valid for operands 0 and 1
8015 of the insn we want to queue. */
8016 && (*insn_operand_predicate[icode][0]) (op0, mode)
8017 && (*insn_operand_predicate[icode][1]) (op0, mode))
8019 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8020 op1 = force_reg (mode, op1);
8022 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8026 /* Preincrement, or we can't increment with one simple insn. */
8028 /* Save a copy of the value before inc or dec, to return it later. */
8029 temp = value = copy_to_reg (op0);
8031 /* Arrange to return the incremented value. */
8032 /* Copy the rtx because expand_binop will protect from the queue,
8033 and the results of that would be invalid for us to return
8034 if our caller does emit_queue before using our result. */
8035 temp = copy_rtx (value = op0);
8037 /* Increment however we can. */
8038 op1 = expand_binop (mode, this_optab, value, op1, op0,
8039 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8040 /* Make sure the value is stored into OP0. */
8042 emit_move_insn (op0, op1);
8047 /* Expand all function calls contained within EXP, innermost ones first.
8048 But don't look within expressions that have sequence points.
8049 For each CALL_EXPR, record the rtx for its value
8050 in the CALL_EXPR_RTL field. */
8053 preexpand_calls (exp)
8056 register int nops, i;
8057 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8059 if (! do_preexpand_calls)
8062 /* Only expressions and references can contain calls. */
8064 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8067 switch (TREE_CODE (exp))
8070 /* Do nothing if already expanded. */
8071 if (CALL_EXPR_RTL (exp) != 0)
8074 /* Do nothing to built-in functions. */
8075 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8076 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8077 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8078 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8083 case TRUTH_ANDIF_EXPR:
8084 case TRUTH_ORIF_EXPR:
8085 /* If we find one of these, then we can be sure
8086 the adjust will be done for it (since it makes jumps).
8087 Do it now, so that if this is inside an argument
8088 of a function, we don't get the stack adjustment
8089 after some other args have already been pushed. */
8090 do_pending_stack_adjust ();
8095 case WITH_CLEANUP_EXPR:
8099 if (SAVE_EXPR_RTL (exp) != 0)
8103 nops = tree_code_length[(int) TREE_CODE (exp)];
8104 for (i = 0; i < nops; i++)
8105 if (TREE_OPERAND (exp, i) != 0)
8107 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8108 if (type == 'e' || type == '<' || type == '1' || type == '2'
8110 preexpand_calls (TREE_OPERAND (exp, i));
8114 /* At the start of a function, record that we have no previously-pushed
8115 arguments waiting to be popped. */
8118 init_pending_stack_adjust ()
8120 pending_stack_adjust = 0;
8123 /* When exiting from function, if safe, clear out any pending stack adjust
8124 so the adjustment won't get done. */
8127 clear_pending_stack_adjust ()
8129 #ifdef EXIT_IGNORE_STACK
8130 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8131 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8132 && ! flag_inline_functions)
8133 pending_stack_adjust = 0;
8137 /* Pop any previously-pushed arguments that have not been popped yet. */
8140 do_pending_stack_adjust ()
8142 if (inhibit_defer_pop == 0)
8144 if (pending_stack_adjust != 0)
8145 adjust_stack (GEN_INT (pending_stack_adjust));
8146 pending_stack_adjust = 0;
8150 /* Expand all cleanups up to OLD_CLEANUPS.
8151 Needed here, and also for language-dependent calls. */
8154 expand_cleanups_to (old_cleanups)
8157 while (cleanups_this_call != old_cleanups)
8159 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8160 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8164 /* Expand conditional expressions. */
8166 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8167 LABEL is an rtx of code CODE_LABEL, in this function and all the
8171 jumpifnot (exp, label)
8175 do_jump (exp, label, NULL_RTX);
8178 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8185 do_jump (exp, NULL_RTX, label);
8188 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8189 the result is zero, or IF_TRUE_LABEL if the result is one.
8190 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8191 meaning fall through in that case.
8193 do_jump always does any pending stack adjust except when it does not
8194 actually perform a jump. An example where there is no jump
8195 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8197 This function is responsible for optimizing cases such as
8198 &&, || and comparison operators in EXP. */
8201 do_jump (exp, if_false_label, if_true_label)
8203 rtx if_false_label, if_true_label;
8205 register enum tree_code code = TREE_CODE (exp);
8206 /* Some cases need to create a label to jump to
8207 in order to properly fall through.
8208 These cases set DROP_THROUGH_LABEL nonzero. */
8209 rtx drop_through_label = 0;
8223 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8229 /* This is not true with #pragma weak */
8231 /* The address of something can never be zero. */
8233 emit_jump (if_true_label);
8238 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8239 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8240 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8243 /* If we are narrowing the operand, we have to do the compare in the
8245 if ((TYPE_PRECISION (TREE_TYPE (exp))
8246 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8248 case NON_LVALUE_EXPR:
8249 case REFERENCE_EXPR:
8254 /* These cannot change zero->non-zero or vice versa. */
8255 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8259 /* This is never less insns than evaluating the PLUS_EXPR followed by
8260 a test and can be longer if the test is eliminated. */
8262 /* Reduce to minus. */
8263 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8264 TREE_OPERAND (exp, 0),
8265 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8266 TREE_OPERAND (exp, 1))));
8267 /* Process as MINUS. */
8271 /* Non-zero iff operands of minus differ. */
8272 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8273 TREE_OPERAND (exp, 0),
8274 TREE_OPERAND (exp, 1)),
8279 /* If we are AND'ing with a small constant, do this comparison in the
8280 smallest type that fits. If the machine doesn't have comparisons
8281 that small, it will be converted back to the wider comparison.
8282 This helps if we are testing the sign bit of a narrower object.
8283 combine can't do this for us because it can't know whether a
8284 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8286 if (! SLOW_BYTE_ACCESS
8287 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8288 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8289 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8290 && (type = type_for_size (i + 1, 1)) != 0
8291 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8292 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8293 != CODE_FOR_nothing))
8295 do_jump (convert (type, exp), if_false_label, if_true_label);
8300 case TRUTH_NOT_EXPR:
8301 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8304 case TRUTH_ANDIF_EXPR:
8305 if (if_false_label == 0)
8306 if_false_label = drop_through_label = gen_label_rtx ();
8307 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8308 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8311 case TRUTH_ORIF_EXPR:
8312 if (if_true_label == 0)
8313 if_true_label = drop_through_label = gen_label_rtx ();
8314 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8315 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8320 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8324 do_pending_stack_adjust ();
8325 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8332 int bitsize, bitpos, unsignedp;
8333 enum machine_mode mode;
8338 /* Get description of this reference. We don't actually care
8339 about the underlying object here. */
8340 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8341 &mode, &unsignedp, &volatilep);
8343 type = type_for_size (bitsize, unsignedp);
8344 if (! SLOW_BYTE_ACCESS
8345 && type != 0 && bitsize >= 0
8346 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8347 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8348 != CODE_FOR_nothing))
8350 do_jump (convert (type, exp), if_false_label, if_true_label);
8357 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8358 if (integer_onep (TREE_OPERAND (exp, 1))
8359 && integer_zerop (TREE_OPERAND (exp, 2)))
8360 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8362 else if (integer_zerop (TREE_OPERAND (exp, 1))
8363 && integer_onep (TREE_OPERAND (exp, 2)))
8364 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8368 register rtx label1 = gen_label_rtx ();
8369 drop_through_label = gen_label_rtx ();
8370 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8371 /* Now the THEN-expression. */
8372 do_jump (TREE_OPERAND (exp, 1),
8373 if_false_label ? if_false_label : drop_through_label,
8374 if_true_label ? if_true_label : drop_through_label);
8375 /* In case the do_jump just above never jumps. */
8376 do_pending_stack_adjust ();
8377 emit_label (label1);
8378 /* Now the ELSE-expression. */
8379 do_jump (TREE_OPERAND (exp, 2),
8380 if_false_label ? if_false_label : drop_through_label,
8381 if_true_label ? if_true_label : drop_through_label);
8386 if (integer_zerop (TREE_OPERAND (exp, 1)))
8387 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8388 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8391 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8392 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8393 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8394 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8396 comparison = compare (exp, EQ, EQ);
8400 if (integer_zerop (TREE_OPERAND (exp, 1)))
8401 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8402 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8405 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8406 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8407 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8408 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8410 comparison = compare (exp, NE, NE);
8414 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8416 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8417 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8419 comparison = compare (exp, LT, LTU);
8423 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8425 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8426 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8428 comparison = compare (exp, LE, LEU);
8432 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8434 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8435 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8437 comparison = compare (exp, GT, GTU);
8441 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8443 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8444 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8446 comparison = compare (exp, GE, GEU);
8451 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8453 /* This is not needed any more and causes poor code since it causes
8454 comparisons and tests from non-SI objects to have different code
8456 /* Copy to register to avoid generating bad insns by cse
8457 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8458 if (!cse_not_expected && GET_CODE (temp) == MEM)
8459 temp = copy_to_reg (temp);
8461 do_pending_stack_adjust ();
8462 if (GET_CODE (temp) == CONST_INT)
8463 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8464 else if (GET_CODE (temp) == LABEL_REF)
8465 comparison = const_true_rtx;
8466 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8467 && !can_compare_p (GET_MODE (temp)))
8468 /* Note swapping the labels gives us not-equal. */
8469 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8470 else if (GET_MODE (temp) != VOIDmode)
8471 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8472 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8473 GET_MODE (temp), NULL_RTX, 0);
8478 /* Do any postincrements in the expression that was tested. */
8481 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8482 straight into a conditional jump instruction as the jump condition.
8483 Otherwise, all the work has been done already. */
8485 if (comparison == const_true_rtx)
8488 emit_jump (if_true_label);
8490 else if (comparison == const0_rtx)
8493 emit_jump (if_false_label);
8495 else if (comparison)
8496 do_jump_for_compare (comparison, if_false_label, if_true_label);
8498 if (drop_through_label)
8500 /* If do_jump produces code that might be jumped around,
8501 do any stack adjusts from that code, before the place
8502 where control merges in. */
8503 do_pending_stack_adjust ();
8504 emit_label (drop_through_label);
8508 /* Given a comparison expression EXP for values too wide to be compared
8509 with one insn, test the comparison and jump to the appropriate label.
8510 The code of EXP is ignored; we always test GT if SWAP is 0,
8511 and LT if SWAP is 1. */
8514 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8517 rtx if_false_label, if_true_label;
8519 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8520 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8521 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8522 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8523 rtx drop_through_label = 0;
8524 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8527 if (! if_true_label || ! if_false_label)
8528 drop_through_label = gen_label_rtx ();
8529 if (! if_true_label)
8530 if_true_label = drop_through_label;
8531 if (! if_false_label)
8532 if_false_label = drop_through_label;
8534 /* Compare a word at a time, high order first. */
8535 for (i = 0; i < nwords; i++)
8538 rtx op0_word, op1_word;
8540 if (WORDS_BIG_ENDIAN)
8542 op0_word = operand_subword_force (op0, i, mode);
8543 op1_word = operand_subword_force (op1, i, mode);
8547 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8548 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8551 /* All but high-order word must be compared as unsigned. */
8552 comp = compare_from_rtx (op0_word, op1_word,
8553 (unsignedp || i > 0) ? GTU : GT,
8554 unsignedp, word_mode, NULL_RTX, 0);
8555 if (comp == const_true_rtx)
8556 emit_jump (if_true_label);
8557 else if (comp != const0_rtx)
8558 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8560 /* Consider lower words only if these are equal. */
8561 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8563 if (comp == const_true_rtx)
8564 emit_jump (if_false_label);
8565 else if (comp != const0_rtx)
8566 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8570 emit_jump (if_false_label);
8571 if (drop_through_label)
8572 emit_label (drop_through_label);
8575 /* Compare OP0 with OP1, word at a time, in mode MODE.
8576 UNSIGNEDP says to do unsigned comparison.
8577 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8580 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8581 enum machine_mode mode;
8584 rtx if_false_label, if_true_label;
8586 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8587 rtx drop_through_label = 0;
8590 if (! if_true_label || ! if_false_label)
8591 drop_through_label = gen_label_rtx ();
8592 if (! if_true_label)
8593 if_true_label = drop_through_label;
8594 if (! if_false_label)
8595 if_false_label = drop_through_label;
8597 /* Compare a word at a time, high order first. */
8598 for (i = 0; i < nwords; i++)
8601 rtx op0_word, op1_word;
8603 if (WORDS_BIG_ENDIAN)
8605 op0_word = operand_subword_force (op0, i, mode);
8606 op1_word = operand_subword_force (op1, i, mode);
8610 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8611 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8614 /* All but high-order word must be compared as unsigned. */
8615 comp = compare_from_rtx (op0_word, op1_word,
8616 (unsignedp || i > 0) ? GTU : GT,
8617 unsignedp, word_mode, NULL_RTX, 0);
8618 if (comp == const_true_rtx)
8619 emit_jump (if_true_label);
8620 else if (comp != const0_rtx)
8621 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8623 /* Consider lower words only if these are equal. */
8624 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8626 if (comp == const_true_rtx)
8627 emit_jump (if_false_label);
8628 else if (comp != const0_rtx)
8629 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8633 emit_jump (if_false_label);
8634 if (drop_through_label)
8635 emit_label (drop_through_label);
8638 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8639 with one insn, test the comparison and jump to the appropriate label. */
8642 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8644 rtx if_false_label, if_true_label;
8646 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8647 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8648 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8649 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8651 rtx drop_through_label = 0;
8653 if (! if_false_label)
8654 drop_through_label = if_false_label = gen_label_rtx ();
8656 for (i = 0; i < nwords; i++)
8658 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8659 operand_subword_force (op1, i, mode),
8660 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8661 word_mode, NULL_RTX, 0);
8662 if (comp == const_true_rtx)
8663 emit_jump (if_false_label);
8664 else if (comp != const0_rtx)
8665 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8669 emit_jump (if_true_label);
8670 if (drop_through_label)
8671 emit_label (drop_through_label);
8674 /* Jump according to whether OP0 is 0.
8675 We assume that OP0 has an integer mode that is too wide
8676 for the available compare insns. */
8679 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8681 rtx if_false_label, if_true_label;
8683 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8685 rtx drop_through_label = 0;
8687 if (! if_false_label)
8688 drop_through_label = if_false_label = gen_label_rtx ();
8690 for (i = 0; i < nwords; i++)
8692 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8694 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8695 if (comp == const_true_rtx)
8696 emit_jump (if_false_label);
8697 else if (comp != const0_rtx)
8698 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8702 emit_jump (if_true_label);
8703 if (drop_through_label)
8704 emit_label (drop_through_label);
8707 /* Given a comparison expression in rtl form, output conditional branches to
8708 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8711 do_jump_for_compare (comparison, if_false_label, if_true_label)
8712 rtx comparison, if_false_label, if_true_label;
8716 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8717 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8722 emit_jump (if_false_label);
8724 else if (if_false_label)
8727 rtx prev = get_last_insn ();
8731 prev = PREV_INSN (prev);
8733 /* Output the branch with the opposite condition. Then try to invert
8734 what is generated. If more than one insn is a branch, or if the
8735 branch is not the last insn written, abort. If we can't invert
8736 the branch, emit make a true label, redirect this jump to that,
8737 emit a jump to the false label and define the true label. */
8739 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8740 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8744 /* Here we get the insn before what was just emitted.
8745 On some machines, emitting the branch can discard
8746 the previous compare insn and emit a replacement. */
8748 /* If there's only one preceding insn... */
8749 insn = get_insns ();
8751 insn = NEXT_INSN (prev);
8753 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8754 if (GET_CODE (insn) == JUMP_INSN)
8761 if (branch != get_last_insn ())
8764 if (! invert_jump (branch, if_false_label))
8766 if_true_label = gen_label_rtx ();
8767 redirect_jump (branch, if_true_label);
8768 emit_jump (if_false_label);
8769 emit_label (if_true_label);
8774 /* Generate code for a comparison expression EXP
8775 (including code to compute the values to be compared)
8776 and set (CC0) according to the result.
8777 SIGNED_CODE should be the rtx operation for this comparison for
8778 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8780 We force a stack adjustment unless there are currently
8781 things pushed on the stack that aren't yet used. */
8784 compare (exp, signed_code, unsigned_code)
8786 enum rtx_code signed_code, unsigned_code;
8789 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8791 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8792 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8793 register enum machine_mode mode = TYPE_MODE (type);
8794 int unsignedp = TREE_UNSIGNED (type);
8795 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8797 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8799 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8800 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8803 /* Like compare but expects the values to compare as two rtx's.
8804 The decision as to signed or unsigned comparison must be made by the caller.
8806 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8809 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8810 size of MODE should be used. */
8813 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8814 register rtx op0, op1;
8817 enum machine_mode mode;
8823 /* If one operand is constant, make it the second one. Only do this
8824 if the other operand is not constant as well. */
8826 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8827 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8832 code = swap_condition (code);
8837 op0 = force_not_mem (op0);
8838 op1 = force_not_mem (op1);
8841 do_pending_stack_adjust ();
8843 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8844 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8848 /* There's no need to do this now that combine.c can eliminate lots of
8849 sign extensions. This can be less efficient in certain cases on other
8852 /* If this is a signed equality comparison, we can do it as an
8853 unsigned comparison since zero-extension is cheaper than sign
8854 extension and comparisons with zero are done as unsigned. This is
8855 the case even on machines that can do fast sign extension, since
8856 zero-extension is easier to combine with other operations than
8857 sign-extension is. If we are comparing against a constant, we must
8858 convert it to what it would look like unsigned. */
8859 if ((code == EQ || code == NE) && ! unsignedp
8860 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8862 if (GET_CODE (op1) == CONST_INT
8863 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8864 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8869 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8871 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8874 /* Generate code to calculate EXP using a store-flag instruction
8875 and return an rtx for the result. EXP is either a comparison
8876 or a TRUTH_NOT_EXPR whose operand is a comparison.
8878 If TARGET is nonzero, store the result there if convenient.
8880 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8883 Return zero if there is no suitable set-flag instruction
8884 available on this machine.
8886 Once expand_expr has been called on the arguments of the comparison,
8887 we are committed to doing the store flag, since it is not safe to
8888 re-evaluate the expression. We emit the store-flag insn by calling
8889 emit_store_flag, but only expand the arguments if we have a reason
8890 to believe that emit_store_flag will be successful. If we think that
8891 it will, but it isn't, we have to simulate the store-flag with a
8892 set/jump/set sequence. */
8895 do_store_flag (exp, target, mode, only_cheap)
8898 enum machine_mode mode;
8902 tree arg0, arg1, type;
8904 enum machine_mode operand_mode;
8908 enum insn_code icode;
8909 rtx subtarget = target;
8910 rtx result, label, pattern, jump_pat;
8912 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8913 result at the end. We can't simply invert the test since it would
8914 have already been inverted if it were valid. This case occurs for
8915 some floating-point comparisons. */
8917 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8918 invert = 1, exp = TREE_OPERAND (exp, 0);
8920 arg0 = TREE_OPERAND (exp, 0);
8921 arg1 = TREE_OPERAND (exp, 1);
8922 type = TREE_TYPE (arg0);
8923 operand_mode = TYPE_MODE (type);
8924 unsignedp = TREE_UNSIGNED (type);
8926 /* We won't bother with BLKmode store-flag operations because it would mean
8927 passing a lot of information to emit_store_flag. */
8928 if (operand_mode == BLKmode)
8934 /* Get the rtx comparison code to use. We know that EXP is a comparison
8935 operation of some type. Some comparisons against 1 and -1 can be
8936 converted to comparisons with zero. Do so here so that the tests
8937 below will be aware that we have a comparison with zero. These
8938 tests will not catch constants in the first operand, but constants
8939 are rarely passed as the first operand. */
8941 switch (TREE_CODE (exp))
8950 if (integer_onep (arg1))
8951 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8953 code = unsignedp ? LTU : LT;
8956 if (! unsignedp && integer_all_onesp (arg1))
8957 arg1 = integer_zero_node, code = LT;
8959 code = unsignedp ? LEU : LE;
8962 if (! unsignedp && integer_all_onesp (arg1))
8963 arg1 = integer_zero_node, code = GE;
8965 code = unsignedp ? GTU : GT;
8968 if (integer_onep (arg1))
8969 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8971 code = unsignedp ? GEU : GE;
8977 /* Put a constant second. */
8978 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8980 tem = arg0; arg0 = arg1; arg1 = tem;
8981 code = swap_condition (code);
8984 /* If this is an equality or inequality test of a single bit, we can
8985 do this by shifting the bit being tested to the low-order bit and
8986 masking the result with the constant 1. If the condition was EQ,
8987 we xor it with 1. This does not require an scc insn and is faster
8988 than an scc insn even if we have it. */
8990 if ((code == NE || code == EQ)
8991 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8992 && integer_pow2p (TREE_OPERAND (arg0, 1))
8993 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8995 tree inner = TREE_OPERAND (arg0, 0);
8996 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8997 NULL_RTX, VOIDmode, 0)));
9000 /* If INNER is a right shift of a constant and it plus BITNUM does
9001 not overflow, adjust BITNUM and INNER. */
9003 if (TREE_CODE (inner) == RSHIFT_EXPR
9004 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9005 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9006 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9007 < TYPE_PRECISION (type)))
9009 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9010 inner = TREE_OPERAND (inner, 0);
9013 /* If we are going to be able to omit the AND below, we must do our
9014 operations as unsigned. If we must use the AND, we have a choice.
9015 Normally unsigned is faster, but for some machines signed is. */
9016 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9017 #ifdef LOAD_EXTEND_OP
9018 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9024 if (subtarget == 0 || GET_CODE (subtarget) != REG
9025 || GET_MODE (subtarget) != operand_mode
9026 || ! safe_from_p (subtarget, inner))
9029 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9032 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9033 size_int (bitnum), subtarget, ops_unsignedp);
9035 if (GET_MODE (op0) != mode)
9036 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9038 if ((code == EQ && ! invert) || (code == NE && invert))
9039 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9040 ops_unsignedp, OPTAB_LIB_WIDEN);
9042 /* Put the AND last so it can combine with more things. */
9043 if (bitnum != TYPE_PRECISION (type) - 1)
9044 op0 = expand_and (op0, const1_rtx, subtarget);
9049 /* Now see if we are likely to be able to do this. Return if not. */
9050 if (! can_compare_p (operand_mode))
9052 icode = setcc_gen_code[(int) code];
9053 if (icode == CODE_FOR_nothing
9054 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9056 /* We can only do this if it is one of the special cases that
9057 can be handled without an scc insn. */
9058 if ((code == LT && integer_zerop (arg1))
9059 || (! only_cheap && code == GE && integer_zerop (arg1)))
9061 else if (BRANCH_COST >= 0
9062 && ! only_cheap && (code == NE || code == EQ)
9063 && TREE_CODE (type) != REAL_TYPE
9064 && ((abs_optab->handlers[(int) operand_mode].insn_code
9065 != CODE_FOR_nothing)
9066 || (ffs_optab->handlers[(int) operand_mode].insn_code
9067 != CODE_FOR_nothing)))
9073 preexpand_calls (exp);
9074 if (subtarget == 0 || GET_CODE (subtarget) != REG
9075 || GET_MODE (subtarget) != operand_mode
9076 || ! safe_from_p (subtarget, arg1))
9079 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9080 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9083 target = gen_reg_rtx (mode);
9085 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9086 because, if the emit_store_flag does anything it will succeed and
9087 OP0 and OP1 will not be used subsequently. */
9089 result = emit_store_flag (target, code,
9090 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9091 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9092 operand_mode, unsignedp, 1);
9097 result = expand_binop (mode, xor_optab, result, const1_rtx,
9098 result, 0, OPTAB_LIB_WIDEN);
9102 /* If this failed, we have to do this with set/compare/jump/set code. */
9103 if (target == 0 || GET_CODE (target) != REG
9104 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9105 target = gen_reg_rtx (GET_MODE (target));
9107 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9108 result = compare_from_rtx (op0, op1, code, unsignedp,
9109 operand_mode, NULL_RTX, 0);
9110 if (GET_CODE (result) == CONST_INT)
9111 return (((result == const0_rtx && ! invert)
9112 || (result != const0_rtx && invert))
9113 ? const0_rtx : const1_rtx);
9115 label = gen_label_rtx ();
9116 if (bcc_gen_fctn[(int) code] == 0)
9119 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9120 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9126 /* Generate a tablejump instruction (used for switch statements). */
9128 #ifdef HAVE_tablejump
9130 /* INDEX is the value being switched on, with the lowest value
9131 in the table already subtracted.
9132 MODE is its expected mode (needed if INDEX is constant).
9133 RANGE is the length of the jump table.
9134 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9136 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9137 index value is out of range. */
9140 do_tablejump (index, mode, range, table_label, default_label)
9141 rtx index, range, table_label, default_label;
9142 enum machine_mode mode;
9144 register rtx temp, vector;
9146 /* Do an unsigned comparison (in the proper mode) between the index
9147 expression and the value which represents the length of the range.
9148 Since we just finished subtracting the lower bound of the range
9149 from the index expression, this comparison allows us to simultaneously
9150 check that the original index expression value is both greater than
9151 or equal to the minimum value of the range and less than or equal to
9152 the maximum value of the range. */
9154 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9155 emit_jump_insn (gen_bltu (default_label));
9157 /* If index is in range, it must fit in Pmode.
9158 Convert to Pmode so we can index with it. */
9160 index = convert_to_mode (Pmode, index, 1);
9162 /* Don't let a MEM slip thru, because then INDEX that comes
9163 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9164 and break_out_memory_refs will go to work on it and mess it up. */
9165 #ifdef PIC_CASE_VECTOR_ADDRESS
9166 if (flag_pic && GET_CODE (index) != REG)
9167 index = copy_to_mode_reg (Pmode, index);
9170 /* If flag_force_addr were to affect this address
9171 it could interfere with the tricky assumptions made
9172 about addresses that contain label-refs,
9173 which may be valid only very near the tablejump itself. */
9174 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9175 GET_MODE_SIZE, because this indicates how large insns are. The other
9176 uses should all be Pmode, because they are addresses. This code
9177 could fail if addresses and insns are not the same size. */
9178 index = gen_rtx (PLUS, Pmode,
9179 gen_rtx (MULT, Pmode, index,
9180 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9181 gen_rtx (LABEL_REF, Pmode, table_label));
9182 #ifdef PIC_CASE_VECTOR_ADDRESS
9184 index = PIC_CASE_VECTOR_ADDRESS (index);
9187 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9188 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9189 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9190 RTX_UNCHANGING_P (vector) = 1;
9191 convert_move (temp, vector, 0);
9193 emit_jump_insn (gen_tablejump (temp, table_label));
9195 #ifndef CASE_VECTOR_PC_RELATIVE
9196 /* If we are generating PIC code or if the table is PC-relative, the
9197 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9203 #endif /* HAVE_tablejump */
9206 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9207 to that value is on the top of the stack. The resulting type is TYPE, and
9208 the source declaration is DECL. */
9211 bc_load_memory (type, decl)
9214 enum bytecode_opcode opcode;
9217 /* Bit fields are special. We only know about signed and
9218 unsigned ints, and enums. The latter are treated as
9221 if (DECL_BIT_FIELD (decl))
9222 if (TREE_CODE (type) == ENUMERAL_TYPE
9223 || TREE_CODE (type) == INTEGER_TYPE)
9224 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9228 /* See corresponding comment in bc_store_memory(). */
9229 if (TYPE_MODE (type) == BLKmode
9230 || TYPE_MODE (type) == VOIDmode)
9233 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9235 if (opcode == neverneverland)
9238 bc_emit_bytecode (opcode);
9240 #ifdef DEBUG_PRINT_CODE
9241 fputc ('\n', stderr);
9246 /* Store the contents of the second stack slot to the address in the
9247 top stack slot. DECL is the declaration of the destination and is used
9248 to determine whether we're dealing with a bitfield. */
9251 bc_store_memory (type, decl)
9254 enum bytecode_opcode opcode;
9257 if (DECL_BIT_FIELD (decl))
9259 if (TREE_CODE (type) == ENUMERAL_TYPE
9260 || TREE_CODE (type) == INTEGER_TYPE)
9266 if (TYPE_MODE (type) == BLKmode)
9268 /* Copy structure. This expands to a block copy instruction, storeBLK.
9269 In addition to the arguments expected by the other store instructions,
9270 it also expects a type size (SImode) on top of the stack, which is the
9271 structure size in size units (usually bytes). The two first arguments
9272 are already on the stack; so we just put the size on level 1. For some
9273 other languages, the size may be variable, this is why we don't encode
9274 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9276 bc_expand_expr (TYPE_SIZE (type));
9280 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9282 if (opcode == neverneverland)
9285 bc_emit_bytecode (opcode);
9287 #ifdef DEBUG_PRINT_CODE
9288 fputc ('\n', stderr);
9293 /* Allocate local stack space sufficient to hold a value of the given
9294 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9295 integral power of 2. A special case is locals of type VOID, which
9296 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9297 remapped into the corresponding attribute of SI. */
9300 bc_allocate_local (size, alignment)
9301 int size, alignment;
9309 /* Normalize size and alignment */
9311 size = UNITS_PER_WORD;
9313 if (alignment < BITS_PER_UNIT)
9314 byte_alignment = 1 << (INT_ALIGN - 1);
9317 byte_alignment = alignment / BITS_PER_UNIT;
9319 if (local_vars_size & (byte_alignment - 1))
9320 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9322 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9323 local_vars_size += size;
9329 /* Allocate variable-sized local array. Variable-sized arrays are
9330 actually pointers to the address in memory where they are stored. */
9333 bc_allocate_variable_array (size)
9337 const int ptralign = (1 << (PTR_ALIGN - 1));
9340 if (local_vars_size & ptralign)
9341 local_vars_size += ptralign - (local_vars_size & ptralign);
9343 /* Note down local space needed: pointer to block; also return
9346 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9347 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9352 /* Push the machine address for the given external variable offset. */
9354 bc_load_externaddr (externaddr)
9357 bc_emit_bytecode (constP);
9358 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9359 BYTECODE_BC_LABEL (externaddr)->offset);
9361 #ifdef DEBUG_PRINT_CODE
9362 fputc ('\n', stderr);
9371 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9377 /* Like above, but expects an IDENTIFIER. */
9379 bc_load_externaddr_id (id, offset)
9383 if (!IDENTIFIER_POINTER (id))
9386 bc_emit_bytecode (constP);
9387 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9389 #ifdef DEBUG_PRINT_CODE
9390 fputc ('\n', stderr);
9395 /* Push the machine address for the given local variable offset. */
9397 bc_load_localaddr (localaddr)
9400 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9404 /* Push the machine address for the given parameter offset.
9405 NOTE: offset is in bits. */
9407 bc_load_parmaddr (parmaddr)
9410 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9415 /* Convert a[i] into *(a + i). */
9417 bc_canonicalize_array_ref (exp)
9420 tree type = TREE_TYPE (exp);
9421 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9422 TREE_OPERAND (exp, 0));
9423 tree index = TREE_OPERAND (exp, 1);
9426 /* Convert the integer argument to a type the same size as a pointer
9427 so the multiply won't overflow spuriously. */
9429 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9430 index = convert (type_for_size (POINTER_SIZE, 0), index);
9432 /* The array address isn't volatile even if the array is.
9433 (Of course this isn't terribly relevant since the bytecode
9434 translator treats nearly everything as volatile anyway.) */
9435 TREE_THIS_VOLATILE (array_adr) = 0;
9437 return build1 (INDIRECT_REF, type,
9438 fold (build (PLUS_EXPR,
9439 TYPE_POINTER_TO (type),
9441 fold (build (MULT_EXPR,
9442 TYPE_POINTER_TO (type),
9444 size_in_bytes (type))))));
9448 /* Load the address of the component referenced by the given
9449 COMPONENT_REF expression.
9451 Returns innermost lvalue. */
9454 bc_expand_component_address (exp)
9458 enum machine_mode mode;
9460 HOST_WIDE_INT SIval;
9463 tem = TREE_OPERAND (exp, 1);
9464 mode = DECL_MODE (tem);
9467 /* Compute cumulative bit offset for nested component refs
9468 and array refs, and find the ultimate containing object. */
9470 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9472 if (TREE_CODE (tem) == COMPONENT_REF)
9473 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9475 if (TREE_CODE (tem) == ARRAY_REF
9476 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9477 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9479 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9480 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9481 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9486 bc_expand_expr (tem);
9489 /* For bitfields also push their offset and size */
9490 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9491 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9493 if (SIval = bitpos / BITS_PER_UNIT)
9494 bc_emit_instruction (addconstPSI, SIval);
9496 return (TREE_OPERAND (exp, 1));
9500 /* Emit code to push two SI constants */
9502 bc_push_offset_and_size (offset, size)
9503 HOST_WIDE_INT offset, size;
9505 bc_emit_instruction (constSI, offset);
9506 bc_emit_instruction (constSI, size);
9510 /* Emit byte code to push the address of the given lvalue expression to
9511 the stack. If it's a bit field, we also push offset and size info.
9513 Returns innermost component, which allows us to determine not only
9514 its type, but also whether it's a bitfield. */
9517 bc_expand_address (exp)
9521 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9525 switch (TREE_CODE (exp))
9529 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9533 return (bc_expand_component_address (exp));
9537 bc_expand_expr (TREE_OPERAND (exp, 0));
9539 /* For variable-sized types: retrieve pointer. Sometimes the
9540 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9541 also make sure we have an operand, just in case... */
9543 if (TREE_OPERAND (exp, 0)
9544 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9545 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9546 bc_emit_instruction (loadP);
9548 /* If packed, also return offset and size */
9549 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9551 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9552 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9554 return (TREE_OPERAND (exp, 0));
9558 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9559 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9564 bc_load_parmaddr (DECL_RTL (exp));
9566 /* For variable-sized types: retrieve pointer */
9567 if (TYPE_SIZE (TREE_TYPE (exp))
9568 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9569 bc_emit_instruction (loadP);
9571 /* If packed, also return offset and size */
9572 if (DECL_BIT_FIELD (exp))
9573 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9574 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9580 bc_emit_instruction (returnP);
9586 if (BYTECODE_LABEL (DECL_RTL (exp)))
9587 bc_load_externaddr (DECL_RTL (exp));
9590 if (DECL_EXTERNAL (exp))
9591 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9592 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9594 bc_load_localaddr (DECL_RTL (exp));
9596 /* For variable-sized types: retrieve pointer */
9597 if (TYPE_SIZE (TREE_TYPE (exp))
9598 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9599 bc_emit_instruction (loadP);
9601 /* If packed, also return offset and size */
9602 if (DECL_BIT_FIELD (exp))
9603 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9604 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9612 bc_emit_bytecode (constP);
9613 r = output_constant_def (exp);
9614 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9616 #ifdef DEBUG_PRINT_CODE
9617 fputc ('\n', stderr);
9628 /* Most lvalues don't have components. */
9633 /* Emit a type code to be used by the runtime support in handling
9634 parameter passing. The type code consists of the machine mode
9635 plus the minimal alignment shifted left 8 bits. */
9638 bc_runtime_type_code (type)
9643 switch (TREE_CODE (type))
9653 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9665 return build_int_2 (val, 0);
9669 /* Generate constructor label */
9671 bc_gen_constr_label ()
9673 static int label_counter;
9674 static char label[20];
9676 sprintf (label, "*LR%d", label_counter++);
9678 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9682 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9683 expand the constructor data as static data, and push a pointer to it.
9684 The pointer is put in the pointer table and is retrieved by a constP
9685 bytecode instruction. We then loop and store each constructor member in
9686 the corresponding component. Finally, we return the original pointer on
9690 bc_expand_constructor (constr)
9694 HOST_WIDE_INT ptroffs;
9698 /* Literal constructors are handled as constants, whereas
9699 non-literals are evaluated and stored element by element
9700 into the data segment. */
9702 /* Allocate space in proper segment and push pointer to space on stack.
9705 l = bc_gen_constr_label ();
9707 if (TREE_CONSTANT (constr))
9711 bc_emit_const_labeldef (l);
9712 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9718 bc_emit_data_labeldef (l);
9719 bc_output_data_constructor (constr);
9723 /* Add reference to pointer table and recall pointer to stack;
9724 this code is common for both types of constructors: literals
9725 and non-literals. */
9727 ptroffs = bc_define_pointer (l);
9728 bc_emit_instruction (constP, ptroffs);
9730 /* This is all that has to be done if it's a literal. */
9731 if (TREE_CONSTANT (constr))
9735 /* At this point, we have the pointer to the structure on top of the stack.
9736 Generate sequences of store_memory calls for the constructor. */
9738 /* constructor type is structure */
9739 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9743 /* If the constructor has fewer fields than the structure,
9744 clear the whole structure first. */
9746 if (list_length (CONSTRUCTOR_ELTS (constr))
9747 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9749 bc_emit_instruction (duplicate);
9750 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9751 bc_emit_instruction (clearBLK);
9754 /* Store each element of the constructor into the corresponding
9757 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9759 register tree field = TREE_PURPOSE (elt);
9760 register enum machine_mode mode;
9765 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9766 mode = DECL_MODE (field);
9767 unsignedp = TREE_UNSIGNED (field);
9769 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9771 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9772 /* The alignment of TARGET is
9773 at least what its type requires. */
9775 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9776 int_size_in_bytes (TREE_TYPE (constr)));
9781 /* Constructor type is array */
9782 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9786 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9787 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9788 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9789 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9791 /* If the constructor has fewer fields than the structure,
9792 clear the whole structure first. */
9794 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9796 bc_emit_instruction (duplicate);
9797 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9798 bc_emit_instruction (clearBLK);
9802 /* Store each element of the constructor into the corresponding
9803 element of TARGET, determined by counting the elements. */
9805 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9807 elt = TREE_CHAIN (elt), i++)
9809 register enum machine_mode mode;
9814 mode = TYPE_MODE (elttype);
9815 bitsize = GET_MODE_BITSIZE (mode);
9816 unsignedp = TREE_UNSIGNED (elttype);
9818 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9819 /* * TYPE_SIZE_UNIT (elttype) */ );
9821 bc_store_field (elt, bitsize, bitpos, mode,
9822 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9823 /* The alignment of TARGET is
9824 at least what its type requires. */
9826 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9827 int_size_in_bytes (TREE_TYPE (constr)));
9834 /* Store the value of EXP (an expression tree) into member FIELD of
9835 structure at address on stack, which has type TYPE, mode MODE and
9836 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9839 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9840 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9843 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9844 value_mode, unsignedp, align, total_size)
9845 int bitsize, bitpos;
9846 enum machine_mode mode;
9847 tree field, exp, type;
9848 enum machine_mode value_mode;
9854 /* Expand expression and copy pointer */
9855 bc_expand_expr (exp);
9856 bc_emit_instruction (over);
9859 /* If the component is a bit field, we cannot use addressing to access
9860 it. Use bit-field techniques to store in it. */
9862 if (DECL_BIT_FIELD (field))
9864 bc_store_bit_field (bitpos, bitsize, unsignedp);
9870 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9872 /* Advance pointer to the desired member */
9874 bc_emit_instruction (addconstPSI, offset);
9877 bc_store_memory (type, field);
9882 /* Store SI/SU in bitfield */
9884 bc_store_bit_field (offset, size, unsignedp)
9885 int offset, size, unsignedp;
9887 /* Push bitfield offset and size */
9888 bc_push_offset_and_size (offset, size);
9891 bc_emit_instruction (sstoreBI);
9895 /* Load SI/SU from bitfield */
9897 bc_load_bit_field (offset, size, unsignedp)
9898 int offset, size, unsignedp;
9900 /* Push bitfield offset and size */
9901 bc_push_offset_and_size (offset, size);
9903 /* Load: sign-extend if signed, else zero-extend */
9904 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9908 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9909 (adjust stack pointer upwards), negative means add that number of
9910 levels (adjust the stack pointer downwards). Only positive values
9911 normally make sense. */
9914 bc_adjust_stack (nlevels)
9923 bc_emit_instruction (drop);
9926 bc_emit_instruction (drop);
9931 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9932 stack_depth -= nlevels;
9935 #if defined (VALIDATE_STACK_FOR_BC)
9936 VALIDATE_STACK_FOR_BC ();