1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-flags.h"
29 #include "insn-codes.h"
31 #include "insn-config.h"
34 #include "typeclass.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
108 struct move_by_pieces
117 int explicit_inc_from;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void group_insns PROTO((rtx));
141 static void store_constructor PROTO((tree, rtx));
142 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
143 enum machine_mode, int, int, int));
144 static tree save_noncopied_parts PROTO((tree, tree));
145 static tree init_noncopied_parts PROTO((tree, tree));
146 static int safe_from_p PROTO((rtx, tree));
147 static int fixed_type_p PROTO((tree));
148 static int get_pointer_alignment PROTO((tree, unsigned));
149 static tree string_constant PROTO((tree, tree *));
150 static tree c_strlen PROTO((tree));
151 static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
152 static int apply_args_size PROTO((void));
153 static int apply_result_size PROTO((void));
154 static rtx result_vector PROTO((int, rtx));
155 static rtx expand_builtin_apply_args PROTO((void));
156 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157 static void expand_builtin_return PROTO((rtx));
158 static rtx expand_increment PROTO((tree, int));
159 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160 tree bc_runtime_type_code PROTO((tree));
161 rtx bc_allocate_local PROTO((int, int));
162 void bc_store_memory PROTO((tree, tree));
163 tree bc_expand_component_address PROTO((tree));
164 tree bc_expand_address PROTO((tree));
165 void bc_expand_constructor PROTO((tree));
166 void bc_adjust_stack PROTO((int));
167 tree bc_canonicalize_array_ref PROTO((tree));
168 void bc_load_memory PROTO((tree, tree));
169 void bc_load_externaddr PROTO((rtx));
170 void bc_load_externaddr_id PROTO((tree, int));
171 void bc_load_localaddr PROTO((rtx));
172 void bc_load_parmaddr PROTO((rtx));
173 static void preexpand_calls PROTO((tree));
174 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
175 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
176 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* MOVE_RATIO is the number of move instructions that is better than
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 /* A value of around 6 would minimize code size; infinity would minimize
198 #define MOVE_RATIO 15
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS 0
211 /* Register mappings for target machines without register windows. */
212 #ifndef INCOMING_REGNO
213 #define INCOMING_REGNO(OUT) (OUT)
215 #ifndef OUTGOING_REGNO
216 #define OUTGOING_REGNO(IN) (IN)
219 /* Maps used to convert modes to const, load, and store bytecodes. */
220 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
224 /* Initialize maps used to convert modes to const, load, and store
227 bc_init_mode_to_opcode_maps ()
231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
236 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
241 #include "modemap.def"
245 /* This is run once per compilation to set up which modes can be used
246 directly in memory and to initialize the block move optab. */
252 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 if (! HARD_REGNO_MODE_OK (regno, mode))
285 reg = gen_rtx (REG, mode, regno);
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
312 /* This is run at the start of compiling a function. */
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
323 apply_args_value = 0;
327 /* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
334 /* Instead of saving the postincrement queue, empty it. */
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
341 p->apply_args_value = apply_args_value;
342 p->forced_labels = forced_labels;
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
348 apply_args_value = 0;
352 /* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
356 restore_expr_status (p)
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
363 apply_args_value = p->apply_args_value;
364 forced_labels = p->forced_labels;
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 static rtx pending_chain;
372 /* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
380 enqueue_insn (var, body)
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
384 var, NULL_RTX, NULL_RTX, body, pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
418 /* A special hack for read access to (MEM (QUEUED ...))
419 to facilitate use of autoincrement.
420 Make a copy of the contents of the memory location
421 rather than a copy of the address, but not
422 if the value is of mode BLKmode. */
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 XEXP (x, 0) = QUEUED_VAR (y);
430 register rtx temp = gen_reg_rtx (GET_MODE (x));
431 emit_insn_before (gen_move_insn (temp, x),
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
448 else if (code == PLUS || code == MULT)
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 register enum rtx_code code = GET_CODE (x);
491 return queued_subexp_p (XEXP (x, 0));
495 return queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1));
501 /* Perform all the pending incrementations. */
507 while (p = pending_chain)
509 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
510 pending_chain = QUEUED_NEXT (p);
521 /* Copy data from FROM to TO, where the machine modes are not the same.
522 Both modes may be integer, or both may be floating.
523 UNSIGNEDP should be nonzero if FROM is an unsigned type.
524 This causes zero-extension instead of sign-extension. */
527 convert_move (to, from, unsignedp)
528 register rtx to, from;
531 enum machine_mode to_mode = GET_MODE (to);
532 enum machine_mode from_mode = GET_MODE (from);
533 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
534 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
538 /* rtx code for making an equivalent value. */
539 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
541 to = protect_from_queue (to, 1);
542 from = protect_from_queue (from, 0);
544 if (to_real != from_real)
547 /* If FROM is a SUBREG that indicates that we have already done at least
548 the required extension, strip it. We don't handle such SUBREGs as
551 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
552 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
553 >= GET_MODE_SIZE (to_mode))
554 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
555 from = gen_lowpart (to_mode, from), from_mode = to_mode;
557 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
560 if (to_mode == from_mode
561 || (from_mode == VOIDmode && CONSTANT_P (from)))
563 emit_move_insn (to, from);
571 #ifdef HAVE_extendqfhf2
572 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
574 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
578 #ifdef HAVE_extendqfsf2
579 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
581 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
585 #ifdef HAVE_extendqfdf2
586 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
588 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
592 #ifdef HAVE_extendqfxf2
593 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
595 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
599 #ifdef HAVE_extendqftf2
600 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
602 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
607 #ifdef HAVE_extendhfsf2
608 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
610 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
614 #ifdef HAVE_extendhfdf2
615 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
617 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
621 #ifdef HAVE_extendhfxf2
622 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
624 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
628 #ifdef HAVE_extendhftf2
629 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
631 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
636 #ifdef HAVE_extendsfdf2
637 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
639 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
643 #ifdef HAVE_extendsfxf2
644 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
646 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
650 #ifdef HAVE_extendsftf2
651 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
653 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
657 #ifdef HAVE_extenddfxf2
658 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
660 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
664 #ifdef HAVE_extenddftf2
665 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
667 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunchfqf2
673 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
675 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncsfqf2
680 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
682 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncdfqf2
687 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
689 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
693 #ifdef HAVE_truncxfqf2
694 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
696 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
700 #ifdef HAVE_trunctfqf2
701 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
703 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
707 #ifdef HAVE_truncsfhf2
708 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
710 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
714 #ifdef HAVE_truncdfhf2
715 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
717 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
721 #ifdef HAVE_truncxfhf2
722 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
724 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
728 #ifdef HAVE_trunctfhf2
729 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
731 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
778 libcall = extendsfdf2_libfunc;
782 libcall = extendsfxf2_libfunc;
786 libcall = extendsftf2_libfunc;
795 libcall = truncdfsf2_libfunc;
799 libcall = extenddfxf2_libfunc;
803 libcall = extenddftf2_libfunc;
812 libcall = truncxfsf2_libfunc;
816 libcall = truncxfdf2_libfunc;
825 libcall = trunctfsf2_libfunc;
829 libcall = trunctfdf2_libfunc;
835 if (libcall == (rtx) 0)
836 /* This conversion is not implemented yet. */
839 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
841 emit_move_insn (to, value);
845 /* Now both modes are integers. */
847 /* Handle expanding beyond a word. */
848 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
849 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
856 enum machine_mode lowpart_mode;
857 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
859 /* Try converting directly if the insn is supported. */
860 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
863 /* If FROM is a SUBREG, put it into a register. Do this
864 so that we always generate the same set of insns for
865 better cse'ing; if an intermediate assignment occurred,
866 we won't be doing the operation directly on the SUBREG. */
867 if (optimize > 0 && GET_CODE (from) == SUBREG)
868 from = force_reg (from_mode, from);
869 emit_unop_insn (code, to, from, equiv_code);
872 /* Next, try converting via full word. */
873 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
874 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
875 != CODE_FOR_nothing))
877 if (GET_CODE (to) == REG)
878 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
879 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
880 emit_unop_insn (code, to,
881 gen_lowpart (word_mode, to), equiv_code);
885 /* No special multiword conversion insn; do it by hand. */
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
892 lowpart_mode = from_mode;
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
899 /* Compute the value to put in each remaining word. */
901 fill_value = const0_rtx;
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
938 insns = get_insns ();
941 emit_no_conflict_block (insns, to, from, NULL_RTX,
942 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
957 convert_move (to, gen_lowpart (word_mode, from), 0);
961 /* Handle pointer conversion */ /* SPEE 900220 */
962 if (to_mode == PSImode)
964 if (from_mode != SImode)
965 from = convert_to_mode (SImode, from, unsignedp);
967 #ifdef HAVE_truncsipsi
970 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
973 #endif /* HAVE_truncsipsi */
977 if (from_mode == PSImode)
979 if (to_mode != SImode)
981 from = convert_to_mode (SImode, from, unsignedp);
986 #ifdef HAVE_extendpsisi
987 if (HAVE_extendpsisi)
989 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
992 #endif /* HAVE_extendpsisi */
997 /* Now follow all the conversions between integers
998 no more than a word long. */
1000 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1001 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1002 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1003 GET_MODE_BITSIZE (from_mode)))
1005 if (!((GET_CODE (from) == MEM
1006 && ! MEM_VOLATILE_P (from)
1007 && direct_load[(int) to_mode]
1008 && ! mode_dependent_address_p (XEXP (from, 0)))
1009 || GET_CODE (from) == REG
1010 || GET_CODE (from) == SUBREG))
1011 from = force_reg (from_mode, from);
1012 emit_move_insn (to, gen_lowpart (to_mode, from));
1016 /* Handle extension. */
1017 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1019 /* Convert directly if that works. */
1020 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1021 != CODE_FOR_nothing)
1023 /* If FROM is a SUBREG, put it into a register. Do this
1024 so that we always generate the same set of insns for
1025 better cse'ing; if an intermediate assignment occurred,
1026 we won't be doing the operation directly on the SUBREG. */
1027 if (optimize > 0 && GET_CODE (from) == SUBREG)
1028 from = force_reg (from_mode, from);
1029 emit_unop_insn (code, to, from, equiv_code);
1034 enum machine_mode intermediate;
1036 /* Search for a mode to convert via. */
1037 for (intermediate = from_mode; intermediate != VOIDmode;
1038 intermediate = GET_MODE_WIDER_MODE (intermediate))
1039 if ((can_extend_p (to_mode, intermediate, unsignedp)
1040 != CODE_FOR_nothing)
1041 && (can_extend_p (intermediate, from_mode, unsignedp)
1042 != CODE_FOR_nothing))
1044 convert_move (to, convert_to_mode (intermediate, from,
1045 unsignedp), unsignedp);
1049 /* No suitable intermediate mode. */
1054 /* Support special truncate insns for certain modes. */
1056 if (from_mode == DImode && to_mode == SImode)
1058 #ifdef HAVE_truncdisi2
1059 if (HAVE_truncdisi2)
1061 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1065 convert_move (to, force_reg (from_mode, from), unsignedp);
1069 if (from_mode == DImode && to_mode == HImode)
1071 #ifdef HAVE_truncdihi2
1072 if (HAVE_truncdihi2)
1074 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1078 convert_move (to, force_reg (from_mode, from), unsignedp);
1082 if (from_mode == DImode && to_mode == QImode)
1084 #ifdef HAVE_truncdiqi2
1085 if (HAVE_truncdiqi2)
1087 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1091 convert_move (to, force_reg (from_mode, from), unsignedp);
1095 if (from_mode == SImode && to_mode == HImode)
1097 #ifdef HAVE_truncsihi2
1098 if (HAVE_truncsihi2)
1100 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1104 convert_move (to, force_reg (from_mode, from), unsignedp);
1108 if (from_mode == SImode && to_mode == QImode)
1110 #ifdef HAVE_truncsiqi2
1111 if (HAVE_truncsiqi2)
1113 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1117 convert_move (to, force_reg (from_mode, from), unsignedp);
1121 if (from_mode == HImode && to_mode == QImode)
1123 #ifdef HAVE_trunchiqi2
1124 if (HAVE_trunchiqi2)
1126 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1130 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 /* Handle truncation of volatile memrefs, and so on;
1135 the things that couldn't be truncated directly,
1136 and for which there was no special instruction. */
1137 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1139 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1140 emit_move_insn (to, temp);
1144 /* Mode combination is not recognized. */
1148 /* Return an rtx for a value that would result
1149 from converting X to mode MODE.
1150 Both X and MODE may be floating, or both integer.
1151 UNSIGNEDP is nonzero if X is an unsigned value.
1152 This can be done by referring to a part of X in place
1153 or by copying to a new temporary with conversion.
1155 This function *must not* call protect_from_queue
1156 except when putting X into an insn (in which case convert_move does it). */
1159 convert_to_mode (mode, x, unsignedp)
1160 enum machine_mode mode;
1164 return convert_modes (mode, VOIDmode, x, unsignedp);
1167 /* Return an rtx for a value that would result
1168 from converting X from mode OLDMODE to mode MODE.
1169 Both modes may be floating, or both integer.
1170 UNSIGNEDP is nonzero if X is an unsigned value.
1172 This can be done by referring to a part of X in place
1173 or by copying to a new temporary with conversion.
1175 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1177 This function *must not* call protect_from_queue
1178 except when putting X into an insn (in which case convert_move does it). */
1181 convert_modes (mode, oldmode, x, unsignedp)
1182 enum machine_mode mode, oldmode;
1188 /* If FROM is a SUBREG that indicates that we have already done at least
1189 the required extension, strip it. */
1191 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1192 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1193 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1194 x = gen_lowpart (mode, x);
1196 if (GET_MODE (x) != VOIDmode)
1197 oldmode = GET_MODE (x);
1199 if (mode == oldmode)
1202 /* There is one case that we must handle specially: If we are converting
1203 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1204 we are to interpret the constant as unsigned, gen_lowpart will do
1205 the wrong if the constant appears negative. What we want to do is
1206 make the high-order word of the constant zero, not all ones. */
1208 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1209 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1210 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1211 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1213 /* We can do this with a gen_lowpart if both desired and current modes
1214 are integer, and this is either a constant integer, a register, or a
1215 non-volatile MEM. Except for the constant case where MODE is no
1216 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1218 if ((GET_CODE (x) == CONST_INT
1219 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1220 || (GET_MODE_CLASS (mode) == MODE_INT
1221 && GET_MODE_CLASS (oldmode) == MODE_INT
1222 && (GET_CODE (x) == CONST_DOUBLE
1223 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1224 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1225 && direct_load[(int) mode])
1226 || GET_CODE (x) == REG)))))
1228 /* ?? If we don't know OLDMODE, we have to assume here that
1229 X does not need sign- or zero-extension. This may not be
1230 the case, but it's the best we can do. */
1231 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1232 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1234 HOST_WIDE_INT val = INTVAL (x);
1235 int width = GET_MODE_BITSIZE (oldmode);
1237 /* We must sign or zero-extend in this case. Start by
1238 zero-extending, then sign extend if we need to. */
1239 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1241 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1242 val |= (HOST_WIDE_INT) (-1) << width;
1244 return GEN_INT (val);
1247 return gen_lowpart (mode, x);
1250 temp = gen_reg_rtx (mode);
1251 convert_move (temp, x, unsignedp);
1255 /* Generate several move instructions to copy LEN bytes
1256 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1257 The caller must pass FROM and TO
1258 through protect_from_queue before calling.
1259 ALIGN (in bytes) is maximum alignment we can assume. */
1262 move_by_pieces (to, from, len, align)
1266 struct move_by_pieces data;
1267 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1268 int max_size = MOVE_MAX + 1;
1271 data.to_addr = to_addr;
1272 data.from_addr = from_addr;
1276 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1277 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1279 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1280 || GET_CODE (from_addr) == POST_INC
1281 || GET_CODE (from_addr) == POST_DEC);
1283 data.explicit_inc_from = 0;
1284 data.explicit_inc_to = 0;
1286 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1287 if (data.reverse) data.offset = len;
1290 /* If copying requires more than two move insns,
1291 copy addresses to registers (to make displacements shorter)
1292 and use post-increment if available. */
1293 if (!(data.autinc_from && data.autinc_to)
1294 && move_by_pieces_ninsns (len, align) > 2)
1296 #ifdef HAVE_PRE_DECREMENT
1297 if (data.reverse && ! data.autinc_from)
1299 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1300 data.autinc_from = 1;
1301 data.explicit_inc_from = -1;
1304 #ifdef HAVE_POST_INCREMENT
1305 if (! data.autinc_from)
1307 data.from_addr = copy_addr_to_reg (from_addr);
1308 data.autinc_from = 1;
1309 data.explicit_inc_from = 1;
1312 if (!data.autinc_from && CONSTANT_P (from_addr))
1313 data.from_addr = copy_addr_to_reg (from_addr);
1314 #ifdef HAVE_PRE_DECREMENT
1315 if (data.reverse && ! data.autinc_to)
1317 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1319 data.explicit_inc_to = -1;
1322 #ifdef HAVE_POST_INCREMENT
1323 if (! data.reverse && ! data.autinc_to)
1325 data.to_addr = copy_addr_to_reg (to_addr);
1327 data.explicit_inc_to = 1;
1330 if (!data.autinc_to && CONSTANT_P (to_addr))
1331 data.to_addr = copy_addr_to_reg (to_addr);
1334 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1335 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1338 /* First move what we can in the largest integer mode, then go to
1339 successively smaller modes. */
1341 while (max_size > 1)
1343 enum machine_mode mode = VOIDmode, tmode;
1344 enum insn_code icode;
1346 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1347 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1348 if (GET_MODE_SIZE (tmode) < max_size)
1351 if (mode == VOIDmode)
1354 icode = mov_optab->handlers[(int) mode].insn_code;
1355 if (icode != CODE_FOR_nothing
1356 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1357 GET_MODE_SIZE (mode)))
1358 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1360 max_size = GET_MODE_SIZE (mode);
1363 /* The code above should have handled everything. */
1368 /* Return number of insns required to move L bytes by pieces.
1369 ALIGN (in bytes) is maximum alignment we can assume. */
1372 move_by_pieces_ninsns (l, align)
1376 register int n_insns = 0;
1377 int max_size = MOVE_MAX + 1;
1379 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1380 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1383 while (max_size > 1)
1385 enum machine_mode mode = VOIDmode, tmode;
1386 enum insn_code icode;
1388 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1389 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1390 if (GET_MODE_SIZE (tmode) < max_size)
1393 if (mode == VOIDmode)
1396 icode = mov_optab->handlers[(int) mode].insn_code;
1397 if (icode != CODE_FOR_nothing
1398 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1399 GET_MODE_SIZE (mode)))
1400 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1402 max_size = GET_MODE_SIZE (mode);
1408 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1409 with move instructions for mode MODE. GENFUN is the gen_... function
1410 to make a move insn for that mode. DATA has all the other info. */
1413 move_by_pieces_1 (genfun, mode, data)
1415 enum machine_mode mode;
1416 struct move_by_pieces *data;
1418 register int size = GET_MODE_SIZE (mode);
1419 register rtx to1, from1;
1421 while (data->len >= size)
1423 if (data->reverse) data->offset -= size;
1425 to1 = (data->autinc_to
1426 ? gen_rtx (MEM, mode, data->to_addr)
1427 : change_address (data->to, mode,
1428 plus_constant (data->to_addr, data->offset)));
1431 ? gen_rtx (MEM, mode, data->from_addr)
1432 : change_address (data->from, mode,
1433 plus_constant (data->from_addr, data->offset)));
1435 #ifdef HAVE_PRE_DECREMENT
1436 if (data->explicit_inc_to < 0)
1437 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1438 if (data->explicit_inc_from < 0)
1439 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1442 emit_insn ((*genfun) (to1, from1));
1443 #ifdef HAVE_POST_INCREMENT
1444 if (data->explicit_inc_to > 0)
1445 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1446 if (data->explicit_inc_from > 0)
1447 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1450 if (! data->reverse) data->offset += size;
1456 /* Emit code to move a block Y to a block X.
1457 This may be done with string-move instructions,
1458 with multiple scalar move instructions, or with a library call.
1460 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1462 SIZE is an rtx that says how long they are.
1463 ALIGN is the maximum alignment we can assume they have,
1464 measured in bytes. */
1467 emit_block_move (x, y, size, align)
1472 if (GET_MODE (x) != BLKmode)
1475 if (GET_MODE (y) != BLKmode)
1478 x = protect_from_queue (x, 1);
1479 y = protect_from_queue (y, 0);
1480 size = protect_from_queue (size, 0);
1482 if (GET_CODE (x) != MEM)
1484 if (GET_CODE (y) != MEM)
1489 if (GET_CODE (size) == CONST_INT
1490 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1491 move_by_pieces (x, y, INTVAL (size), align);
1494 /* Try the most limited insn first, because there's no point
1495 including more than one in the machine description unless
1496 the more limited one has some advantage. */
1498 rtx opalign = GEN_INT (align);
1499 enum machine_mode mode;
1501 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1502 mode = GET_MODE_WIDER_MODE (mode))
1504 enum insn_code code = movstr_optab[(int) mode];
1506 if (code != CODE_FOR_nothing
1507 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1508 here because if SIZE is less than the mode mask, as it is
1509 returned by the macro, it will definitely be less than the
1510 actual mode mask. */
1511 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1512 && (insn_operand_predicate[(int) code][0] == 0
1513 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1514 && (insn_operand_predicate[(int) code][1] == 0
1515 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1516 && (insn_operand_predicate[(int) code][3] == 0
1517 || (*insn_operand_predicate[(int) code][3]) (opalign,
1521 rtx last = get_last_insn ();
1524 op2 = convert_to_mode (mode, size, 1);
1525 if (insn_operand_predicate[(int) code][2] != 0
1526 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1527 op2 = copy_to_mode_reg (mode, op2);
1529 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1536 delete_insns_since (last);
1540 #ifdef TARGET_MEM_FUNCTIONS
1541 emit_library_call (memcpy_libfunc, 0,
1542 VOIDmode, 3, XEXP (x, 0), Pmode,
1544 convert_to_mode (TYPE_MODE (sizetype), size,
1545 TREE_UNSIGNED (sizetype)),
1546 TYPE_MODE (sizetype));
1548 emit_library_call (bcopy_libfunc, 0,
1549 VOIDmode, 3, XEXP (y, 0), Pmode,
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
1558 /* Copy all or part of a value X into registers starting at REGNO.
1559 The number of registers to be filled is NREGS. */
1562 move_block_to_reg (regno, x, nregs, mode)
1566 enum machine_mode mode;
1571 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1572 x = validize_mem (force_const_mem (mode, x));
1574 /* See if the machine can do this with a load multiple insn. */
1575 #ifdef HAVE_load_multiple
1576 last = get_last_insn ();
1577 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1585 delete_insns_since (last);
1588 for (i = 0; i < nregs; i++)
1589 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1590 operand_subword_force (x, i, mode));
1593 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1594 The number of registers to be filled is NREGS. SIZE indicates the number
1595 of bytes in the object X. */
1599 move_block_from_reg (regno, x, nregs, size)
1608 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1609 to the left before storing to memory. */
1610 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1612 rtx tem = operand_subword (x, 0, 1, BLKmode);
1618 shift = expand_shift (LSHIFT_EXPR, word_mode,
1619 gen_rtx (REG, word_mode, regno),
1620 build_int_2 ((UNITS_PER_WORD - size)
1621 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1622 emit_move_insn (tem, shift);
1626 /* See if the machine can do this with a store multiple insn. */
1627 #ifdef HAVE_store_multiple
1628 last = get_last_insn ();
1629 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1637 delete_insns_since (last);
1640 for (i = 0; i < nregs; i++)
1642 rtx tem = operand_subword (x, i, 1, BLKmode);
1647 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1651 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1654 use_regs (regno, nregs)
1660 for (i = 0; i < nregs; i++)
1661 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1664 /* Mark the instructions since PREV as a libcall block.
1665 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1674 /* Find the instructions to mark */
1676 insn_first = NEXT_INSN (prev);
1678 insn_first = get_insns ();
1680 insn_last = get_last_insn ();
1682 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1683 REG_NOTES (insn_last));
1685 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1686 REG_NOTES (insn_first));
1689 /* Write zeros through the storage of OBJECT.
1690 If OBJECT has BLKmode, SIZE is its length in bytes. */
1693 clear_storage (object, size)
1697 if (GET_MODE (object) == BLKmode)
1699 #ifdef TARGET_MEM_FUNCTIONS
1700 emit_library_call (memset_libfunc, 0,
1702 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1703 GEN_INT (size), Pmode);
1705 emit_library_call (bzero_libfunc, 0,
1707 XEXP (object, 0), Pmode,
1708 GEN_INT (size), Pmode);
1712 emit_move_insn (object, const0_rtx);
1715 /* Generate code to copy Y into X.
1716 Both Y and X must have the same mode, except that
1717 Y can be a constant with VOIDmode.
1718 This mode cannot be BLKmode; use emit_block_move for that.
1720 Return the last instruction emitted. */
1723 emit_move_insn (x, y)
1726 enum machine_mode mode = GET_MODE (x);
1727 enum machine_mode submode;
1728 enum mode_class class = GET_MODE_CLASS (mode);
1731 x = protect_from_queue (x, 1);
1732 y = protect_from_queue (y, 0);
1734 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1737 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1738 y = force_const_mem (mode, y);
1740 /* If X or Y are memory references, verify that their addresses are valid
1742 if (GET_CODE (x) == MEM
1743 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1744 && ! push_operand (x, GET_MODE (x)))
1746 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1747 x = change_address (x, VOIDmode, XEXP (x, 0));
1749 if (GET_CODE (y) == MEM
1750 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1752 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1753 y = change_address (y, VOIDmode, XEXP (y, 0));
1755 if (mode == BLKmode)
1758 return emit_move_insn_1 (x, y);
1761 /* Low level part of emit_move_insn.
1762 Called just like emit_move_insn, but assumes X and Y
1763 are basically valid. */
1766 emit_move_insn_1 (x, y)
1769 enum machine_mode mode = GET_MODE (x);
1770 enum machine_mode submode;
1771 enum mode_class class = GET_MODE_CLASS (mode);
1774 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1775 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1776 (class == MODE_COMPLEX_INT
1777 ? MODE_INT : MODE_FLOAT),
1780 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1782 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1784 /* Expand complex moves by moving real part and imag part, if possible. */
1785 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1786 && submode != BLKmode
1787 && (mov_optab->handlers[(int) submode].insn_code
1788 != CODE_FOR_nothing))
1790 /* Don't split destination if it is a stack push. */
1791 int stack = push_operand (x, GET_MODE (x));
1792 rtx prev = get_last_insn ();
1794 /* Tell flow that the whole of the destination is being set. */
1795 if (GET_CODE (x) == REG)
1796 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1798 /* If this is a stack, push the highpart first, so it
1799 will be in the argument order.
1801 In that case, change_address is used only to convert
1802 the mode, not to change the address. */
1805 /* Note that the real part always precedes the imag part in memory
1806 regardless of machine's endianness. */
1807 #ifdef STACK_GROWS_DOWNWARD
1808 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1809 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1810 gen_imagpart (submode, y)));
1811 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1812 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1813 gen_realpart (submode, y)));
1815 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1816 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1817 gen_realpart (submode, y)));
1818 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1819 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1820 gen_imagpart (submode, y)));
1825 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1826 (gen_highpart (submode, x), gen_highpart (submode, y)));
1827 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1828 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1831 if (GET_CODE (x) != CONCAT)
1832 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1833 each with a separate pseudo as destination.
1834 It's not correct for flow to treat them as a unit. */
1837 return get_last_insn ();
1840 /* This will handle any multi-word mode that lacks a move_insn pattern.
1841 However, you will get better code if you define such patterns,
1842 even if they must turn into multiple assembler instructions. */
1843 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1846 rtx prev_insn = get_last_insn ();
1849 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1852 rtx xpart = operand_subword (x, i, 1, mode);
1853 rtx ypart = operand_subword (y, i, 1, mode);
1855 /* If we can't get a part of Y, put Y into memory if it is a
1856 constant. Otherwise, force it into a register. If we still
1857 can't get a part of Y, abort. */
1858 if (ypart == 0 && CONSTANT_P (y))
1860 y = force_const_mem (mode, y);
1861 ypart = operand_subword (y, i, 1, mode);
1863 else if (ypart == 0)
1864 ypart = operand_subword_force (y, i, mode);
1866 if (xpart == 0 || ypart == 0)
1869 last_insn = emit_move_insn (xpart, ypart);
1871 /* Mark these insns as a libcall block. */
1872 group_insns (prev_insn);
1880 /* Pushing data onto the stack. */
1882 /* Push a block of length SIZE (perhaps variable)
1883 and return an rtx to address the beginning of the block.
1884 Note that it is not possible for the value returned to be a QUEUED.
1885 The value may be virtual_outgoing_args_rtx.
1887 EXTRA is the number of bytes of padding to push in addition to SIZE.
1888 BELOW nonzero means this padding comes at low addresses;
1889 otherwise, the padding comes at high addresses. */
1892 push_block (size, extra, below)
1897 if (CONSTANT_P (size))
1898 anti_adjust_stack (plus_constant (size, extra));
1899 else if (GET_CODE (size) == REG && extra == 0)
1900 anti_adjust_stack (size);
1903 rtx temp = copy_to_mode_reg (Pmode, size);
1905 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1906 temp, 0, OPTAB_LIB_WIDEN);
1907 anti_adjust_stack (temp);
1910 #ifdef STACK_GROWS_DOWNWARD
1911 temp = virtual_outgoing_args_rtx;
1912 if (extra != 0 && below)
1913 temp = plus_constant (temp, extra);
1915 if (GET_CODE (size) == CONST_INT)
1916 temp = plus_constant (virtual_outgoing_args_rtx,
1917 - INTVAL (size) - (below ? 0 : extra));
1918 else if (extra != 0 && !below)
1919 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1920 negate_rtx (Pmode, plus_constant (size, extra)));
1922 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1923 negate_rtx (Pmode, size));
1926 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1932 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1935 /* Generate code to push X onto the stack, assuming it has mode MODE and
1937 MODE is redundant except when X is a CONST_INT (since they don't
1939 SIZE is an rtx for the size of data to be copied (in bytes),
1940 needed only if X is BLKmode.
1942 ALIGN (in bytes) is maximum alignment we can assume.
1944 If PARTIAL and REG are both nonzero, then copy that many of the first
1945 words of X into registers starting with REG, and push the rest of X.
1946 The amount of space pushed is decreased by PARTIAL words,
1947 rounded *down* to a multiple of PARM_BOUNDARY.
1948 REG must be a hard register in this case.
1949 If REG is zero but PARTIAL is not, take any all others actions for an
1950 argument partially in registers, but do not actually load any
1953 EXTRA is the amount in bytes of extra space to leave next to this arg.
1954 This is ignored if an argument block has already been allocated.
1956 On a machine that lacks real push insns, ARGS_ADDR is the address of
1957 the bottom of the argument block for this call. We use indexing off there
1958 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1959 argument block has not been preallocated.
1961 ARGS_SO_FAR is the size of args previously pushed for this call. */
1964 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1965 args_addr, args_so_far)
1967 enum machine_mode mode;
1978 enum direction stack_direction
1979 #ifdef STACK_GROWS_DOWNWARD
1985 /* Decide where to pad the argument: `downward' for below,
1986 `upward' for above, or `none' for don't pad it.
1987 Default is below for small data on big-endian machines; else above. */
1988 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1990 /* Invert direction if stack is post-update. */
1991 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1992 if (where_pad != none)
1993 where_pad = (where_pad == downward ? upward : downward);
1995 xinner = x = protect_from_queue (x, 0);
1997 if (mode == BLKmode)
1999 /* Copy a block into the stack, entirely or partially. */
2002 int used = partial * UNITS_PER_WORD;
2003 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2011 /* USED is now the # of bytes we need not copy to the stack
2012 because registers will take care of them. */
2015 xinner = change_address (xinner, BLKmode,
2016 plus_constant (XEXP (xinner, 0), used));
2018 /* If the partial register-part of the arg counts in its stack size,
2019 skip the part of stack space corresponding to the registers.
2020 Otherwise, start copying to the beginning of the stack space,
2021 by setting SKIP to 0. */
2022 #ifndef REG_PARM_STACK_SPACE
2028 #ifdef PUSH_ROUNDING
2029 /* Do it with several push insns if that doesn't take lots of insns
2030 and if there is no difficulty with push insns that skip bytes
2031 on the stack for alignment purposes. */
2033 && GET_CODE (size) == CONST_INT
2035 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2037 /* Here we avoid the case of a structure whose weak alignment
2038 forces many pushes of a small amount of data,
2039 and such small pushes do rounding that causes trouble. */
2040 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2041 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2042 || PUSH_ROUNDING (align) == align)
2043 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2045 /* Push padding now if padding above and stack grows down,
2046 or if padding below and stack grows up.
2047 But if space already allocated, this has already been done. */
2048 if (extra && args_addr == 0
2049 && where_pad != none && where_pad != stack_direction)
2050 anti_adjust_stack (GEN_INT (extra));
2052 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2053 INTVAL (size) - used, align);
2056 #endif /* PUSH_ROUNDING */
2058 /* Otherwise make space on the stack and copy the data
2059 to the address of that space. */
2061 /* Deduct words put into registers from the size we must copy. */
2064 if (GET_CODE (size) == CONST_INT)
2065 size = GEN_INT (INTVAL (size) - used);
2067 size = expand_binop (GET_MODE (size), sub_optab, size,
2068 GEN_INT (used), NULL_RTX, 0,
2072 /* Get the address of the stack space.
2073 In this case, we do not deal with EXTRA separately.
2074 A single stack adjust will do. */
2077 temp = push_block (size, extra, where_pad == downward);
2080 else if (GET_CODE (args_so_far) == CONST_INT)
2081 temp = memory_address (BLKmode,
2082 plus_constant (args_addr,
2083 skip + INTVAL (args_so_far)));
2085 temp = memory_address (BLKmode,
2086 plus_constant (gen_rtx (PLUS, Pmode,
2087 args_addr, args_so_far),
2090 /* TEMP is the address of the block. Copy the data there. */
2091 if (GET_CODE (size) == CONST_INT
2092 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2095 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2096 INTVAL (size), align);
2099 /* Try the most limited insn first, because there's no point
2100 including more than one in the machine description unless
2101 the more limited one has some advantage. */
2102 #ifdef HAVE_movstrqi
2104 && GET_CODE (size) == CONST_INT
2105 && ((unsigned) INTVAL (size)
2106 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2108 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2109 xinner, size, GEN_INT (align));
2117 #ifdef HAVE_movstrhi
2119 && GET_CODE (size) == CONST_INT
2120 && ((unsigned) INTVAL (size)
2121 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2123 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2124 xinner, size, GEN_INT (align));
2132 #ifdef HAVE_movstrsi
2135 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2136 xinner, size, GEN_INT (align));
2144 #ifdef HAVE_movstrdi
2147 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2148 xinner, size, GEN_INT (align));
2157 #ifndef ACCUMULATE_OUTGOING_ARGS
2158 /* If the source is referenced relative to the stack pointer,
2159 copy it to another register to stabilize it. We do not need
2160 to do this if we know that we won't be changing sp. */
2162 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2163 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2164 temp = copy_to_reg (temp);
2167 /* Make inhibit_defer_pop nonzero around the library call
2168 to force it to pop the bcopy-arguments right away. */
2170 #ifdef TARGET_MEM_FUNCTIONS
2171 emit_library_call (memcpy_libfunc, 0,
2172 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2173 convert_to_mode (TYPE_MODE (sizetype),
2174 size, TREE_UNSIGNED (sizetype)),
2175 TYPE_MODE (sizetype));
2177 emit_library_call (bcopy_libfunc, 0,
2178 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2179 convert_to_mode (TYPE_MODE (sizetype),
2180 size, TREE_UNSIGNED (sizetype)),
2181 TYPE_MODE (sizetype));
2186 else if (partial > 0)
2188 /* Scalar partly in registers. */
2190 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2193 /* # words of start of argument
2194 that we must make space for but need not store. */
2195 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2196 int args_offset = INTVAL (args_so_far);
2199 /* Push padding now if padding above and stack grows down,
2200 or if padding below and stack grows up.
2201 But if space already allocated, this has already been done. */
2202 if (extra && args_addr == 0
2203 && where_pad != none && where_pad != stack_direction)
2204 anti_adjust_stack (GEN_INT (extra));
2206 /* If we make space by pushing it, we might as well push
2207 the real data. Otherwise, we can leave OFFSET nonzero
2208 and leave the space uninitialized. */
2212 /* Now NOT_STACK gets the number of words that we don't need to
2213 allocate on the stack. */
2214 not_stack = partial - offset;
2216 /* If the partial register-part of the arg counts in its stack size,
2217 skip the part of stack space corresponding to the registers.
2218 Otherwise, start copying to the beginning of the stack space,
2219 by setting SKIP to 0. */
2220 #ifndef REG_PARM_STACK_SPACE
2226 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2227 x = validize_mem (force_const_mem (mode, x));
2229 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2230 SUBREGs of such registers are not allowed. */
2231 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2232 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2233 x = copy_to_reg (x);
2235 /* Loop over all the words allocated on the stack for this arg. */
2236 /* We can do it by words, because any scalar bigger than a word
2237 has a size a multiple of a word. */
2238 #ifndef PUSH_ARGS_REVERSED
2239 for (i = not_stack; i < size; i++)
2241 for (i = size - 1; i >= not_stack; i--)
2243 if (i >= not_stack + offset)
2244 emit_push_insn (operand_subword_force (x, i, mode),
2245 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2247 GEN_INT (args_offset + ((i - not_stack + skip)
2248 * UNITS_PER_WORD)));
2254 /* Push padding now if padding above and stack grows down,
2255 or if padding below and stack grows up.
2256 But if space already allocated, this has already been done. */
2257 if (extra && args_addr == 0
2258 && where_pad != none && where_pad != stack_direction)
2259 anti_adjust_stack (GEN_INT (extra));
2261 #ifdef PUSH_ROUNDING
2263 addr = gen_push_operand ();
2266 if (GET_CODE (args_so_far) == CONST_INT)
2268 = memory_address (mode,
2269 plus_constant (args_addr, INTVAL (args_so_far)));
2271 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2274 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2278 /* If part should go in registers, copy that part
2279 into the appropriate registers. Do this now, at the end,
2280 since mem-to-mem copies above may do function calls. */
2281 if (partial > 0 && reg != 0)
2282 move_block_to_reg (REGNO (reg), x, partial, mode);
2284 if (extra && args_addr == 0 && where_pad == stack_direction)
2285 anti_adjust_stack (GEN_INT (extra));
2288 /* Expand an assignment that stores the value of FROM into TO.
2289 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2290 (This may contain a QUEUED rtx;
2291 if the value is constant, this rtx is a constant.)
2292 Otherwise, the returned value is NULL_RTX.
2294 SUGGEST_REG is no longer actually used.
2295 It used to mean, copy the value through a register
2296 and return that register, if that is possible.
2297 We now use WANT_VALUE to decide whether to do this. */
2300 expand_assignment (to, from, want_value, suggest_reg)
2305 register rtx to_rtx = 0;
2308 /* Don't crash if the lhs of the assignment was erroneous. */
2310 if (TREE_CODE (to) == ERROR_MARK)
2312 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2313 return want_value ? result : NULL_RTX;
2316 if (output_bytecode)
2318 tree dest_innermost;
2320 bc_expand_expr (from);
2321 bc_emit_instruction (duplicate);
2323 dest_innermost = bc_expand_address (to);
2325 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2326 take care of it here. */
2328 bc_store_memory (TREE_TYPE (to), dest_innermost);
2332 /* Assignment of a structure component needs special treatment
2333 if the structure component's rtx is not simply a MEM.
2334 Assignment of an array element at a constant index
2335 has the same problem. */
2337 if (TREE_CODE (to) == COMPONENT_REF
2338 || TREE_CODE (to) == BIT_FIELD_REF
2339 || (TREE_CODE (to) == ARRAY_REF
2340 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2341 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2343 enum machine_mode mode1;
2353 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2354 &mode1, &unsignedp, &volatilep);
2356 /* If we are going to use store_bit_field and extract_bit_field,
2357 make sure to_rtx will be safe for multiple use. */
2359 if (mode1 == VOIDmode && want_value)
2360 tem = stabilize_reference (tem);
2362 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2363 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2366 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2368 if (GET_CODE (to_rtx) != MEM)
2370 to_rtx = change_address (to_rtx, VOIDmode,
2371 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2372 force_reg (Pmode, offset_rtx)));
2373 /* If we have a variable offset, the known alignment
2374 is only that of the innermost structure containing the field.
2375 (Actually, we could sometimes do better by using the
2376 align of an element of the innermost array, but no need.) */
2377 if (TREE_CODE (to) == COMPONENT_REF
2378 || TREE_CODE (to) == BIT_FIELD_REF)
2380 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2384 if (GET_CODE (to_rtx) == MEM)
2385 MEM_VOLATILE_P (to_rtx) = 1;
2386 #if 0 /* This was turned off because, when a field is volatile
2387 in an object which is not volatile, the object may be in a register,
2388 and then we would abort over here. */
2394 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2396 /* Spurious cast makes HPUX compiler happy. */
2397 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2400 /* Required alignment of containing datum. */
2402 int_size_in_bytes (TREE_TYPE (tem)));
2403 preserve_temp_slots (result);
2407 /* If the value is meaningful, convert RESULT to the proper mode.
2408 Otherwise, return nothing. */
2409 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2410 TYPE_MODE (TREE_TYPE (from)),
2412 TREE_UNSIGNED (TREE_TYPE (to)))
2416 /* If the rhs is a function call and its value is not an aggregate,
2417 call the function before we start to compute the lhs.
2418 This is needed for correct code for cases such as
2419 val = setjmp (buf) on machines where reference to val
2420 requires loading up part of an address in a separate insn.
2422 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2423 a promoted variable where the zero- or sign- extension needs to be done.
2424 Handling this in the normal way is safe because no computation is done
2426 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2427 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2432 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2434 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2435 emit_move_insn (to_rtx, value);
2436 preserve_temp_slots (to_rtx);
2439 return want_value ? to_rtx : NULL_RTX;
2442 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2443 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2446 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2448 /* Don't move directly into a return register. */
2449 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2454 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2455 emit_move_insn (to_rtx, temp);
2456 preserve_temp_slots (to_rtx);
2459 return want_value ? to_rtx : NULL_RTX;
2462 /* In case we are returning the contents of an object which overlaps
2463 the place the value is being stored, use a safe function when copying
2464 a value through a pointer into a structure value return block. */
2465 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2466 && current_function_returns_struct
2467 && !current_function_returns_pcc_struct)
2472 size = expr_size (from);
2473 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2475 #ifdef TARGET_MEM_FUNCTIONS
2476 emit_library_call (memcpy_libfunc, 0,
2477 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2478 XEXP (from_rtx, 0), Pmode,
2479 convert_to_mode (TYPE_MODE (sizetype),
2480 size, TREE_UNSIGNED (sizetype)),
2481 TYPE_MODE (sizetype));
2483 emit_library_call (bcopy_libfunc, 0,
2484 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2485 XEXP (to_rtx, 0), Pmode,
2486 convert_to_mode (TYPE_MODE (sizetype),
2487 size, TREE_UNSIGNED (sizetype)),
2488 TYPE_MODE (sizetype));
2491 preserve_temp_slots (to_rtx);
2494 return want_value ? to_rtx : NULL_RTX;
2497 /* Compute FROM and store the value in the rtx we got. */
2500 result = store_expr (from, to_rtx, want_value);
2501 preserve_temp_slots (result);
2504 return want_value ? result : NULL_RTX;
2507 /* Generate code for computing expression EXP,
2508 and storing the value into TARGET.
2509 TARGET may contain a QUEUED rtx.
2511 If WANT_VALUE is nonzero, return a copy of the value
2512 not in TARGET, so that we can be sure to use the proper
2513 value in a containing expression even if TARGET has something
2514 else stored in it. If possible, we copy the value through a pseudo
2515 and return that pseudo. Or, if the value is constant, we try to
2516 return the constant. In some cases, we return a pseudo
2517 copied *from* TARGET.
2519 If the mode is BLKmode then we may return TARGET itself.
2520 It turns out that in BLKmode it doesn't cause a problem.
2521 because C has no operators that could combine two different
2522 assignments into the same BLKmode object with different values
2523 with no sequence point. Will other languages need this to
2526 If WANT_VALUE is 0, we return NULL, to make sure
2527 to catch quickly any cases where the caller uses the value
2528 and fails to set WANT_VALUE. */
2531 store_expr (exp, target, want_value)
2533 register rtx target;
2537 int dont_return_target = 0;
2539 if (TREE_CODE (exp) == COMPOUND_EXPR)
2541 /* Perform first part of compound expression, then assign from second
2543 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2545 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2547 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2549 /* For conditional expression, get safe form of the target. Then
2550 test the condition, doing the appropriate assignment on either
2551 side. This avoids the creation of unnecessary temporaries.
2552 For non-BLKmode, it is more efficient not to do this. */
2554 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2557 target = protect_from_queue (target, 1);
2560 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2561 store_expr (TREE_OPERAND (exp, 1), target, 0);
2563 emit_jump_insn (gen_jump (lab2));
2566 store_expr (TREE_OPERAND (exp, 2), target, 0);
2570 return want_value ? target : NULL_RTX;
2572 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2573 && GET_MODE (target) != BLKmode)
2574 /* If target is in memory and caller wants value in a register instead,
2575 arrange that. Pass TARGET as target for expand_expr so that,
2576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2577 We know expand_expr will not use the target in that case.
2578 Don't do this if TARGET is volatile because we are supposed
2579 to write it and then read it. */
2581 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2582 GET_MODE (target), 0);
2583 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2584 temp = copy_to_reg (temp);
2585 dont_return_target = 1;
2587 else if (queued_subexp_p (target))
2588 /* If target contains a postincrement, let's not risk
2589 using it as the place to generate the rhs. */
2591 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2593 /* Expand EXP into a new pseudo. */
2594 temp = gen_reg_rtx (GET_MODE (target));
2595 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2598 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2600 /* If target is volatile, ANSI requires accessing the value
2601 *from* the target, if it is accessed. So make that happen.
2602 In no case return the target itself. */
2603 if (! MEM_VOLATILE_P (target) && want_value)
2604 dont_return_target = 1;
2606 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2607 /* If this is an scalar in a register that is stored in a wider mode
2608 than the declared mode, compute the result into its declared mode
2609 and then convert to the wider mode. Our value is the computed
2612 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2613 convert_move (SUBREG_REG (target), temp,
2614 SUBREG_PROMOTED_UNSIGNED_P (target));
2615 return want_value ? temp : NULL_RTX;
2619 temp = expand_expr (exp, target, GET_MODE (target), 0);
2620 /* DO return TARGET if it's a specified hardware register.
2621 expand_return relies on this.
2622 If TARGET is a volatile mem ref, either return TARGET
2623 or return a reg copied *from* TARGET; ANSI requires this.
2625 Otherwise, if TEMP is not TARGET, return TEMP
2626 if it is constant (for efficiency),
2627 or if we really want the correct value. */
2628 if (!(target && GET_CODE (target) == REG
2629 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2630 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2632 && (CONSTANT_P (temp) || want_value))
2633 dont_return_target = 1;
2636 /* If value was not generated in the target, store it there.
2637 Convert the value to TARGET's type first if nec. */
2639 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2641 target = protect_from_queue (target, 1);
2642 if (GET_MODE (temp) != GET_MODE (target)
2643 && GET_MODE (temp) != VOIDmode)
2645 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2646 if (dont_return_target)
2648 /* In this case, we will return TEMP,
2649 so make sure it has the proper mode.
2650 But don't forget to store the value into TARGET. */
2651 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2652 emit_move_insn (target, temp);
2655 convert_move (target, temp, unsignedp);
2658 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2660 /* Handle copying a string constant into an array.
2661 The string constant may be shorter than the array.
2662 So copy just the string's actual length, and clear the rest. */
2665 /* Get the size of the data type of the string,
2666 which is actually the size of the target. */
2667 size = expr_size (exp);
2668 if (GET_CODE (size) == CONST_INT
2669 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2670 emit_block_move (target, temp, size,
2671 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2674 /* Compute the size of the data to copy from the string. */
2676 = size_binop (MIN_EXPR,
2677 make_tree (sizetype, size),
2679 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2680 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2684 /* Copy that much. */
2685 emit_block_move (target, temp, copy_size_rtx,
2686 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2688 /* Figure out how much is left in TARGET
2689 that we have to clear. */
2690 if (GET_CODE (copy_size_rtx) == CONST_INT)
2692 temp = plus_constant (XEXP (target, 0),
2693 TREE_STRING_LENGTH (exp));
2694 size = plus_constant (size,
2695 - TREE_STRING_LENGTH (exp));
2699 enum machine_mode size_mode = Pmode;
2701 temp = force_reg (Pmode, XEXP (target, 0));
2702 temp = expand_binop (size_mode, add_optab, temp,
2703 copy_size_rtx, NULL_RTX, 0,
2706 size = expand_binop (size_mode, sub_optab, size,
2707 copy_size_rtx, NULL_RTX, 0,
2710 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2711 GET_MODE (size), 0, 0);
2712 label = gen_label_rtx ();
2713 emit_jump_insn (gen_blt (label));
2716 if (size != const0_rtx)
2718 #ifdef TARGET_MEM_FUNCTIONS
2719 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2720 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2722 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2723 temp, Pmode, size, Pmode);
2730 else if (GET_MODE (temp) == BLKmode)
2731 emit_block_move (target, temp, expr_size (exp),
2732 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2734 emit_move_insn (target, temp);
2737 if (dont_return_target && GET_CODE (temp) != MEM)
2739 if (want_value && GET_MODE (target) != BLKmode)
2740 return copy_to_reg (target);
2746 /* Store the value of constructor EXP into the rtx TARGET.
2747 TARGET is either a REG or a MEM. */
2750 store_constructor (exp, target)
2754 tree type = TREE_TYPE (exp);
2756 /* We know our target cannot conflict, since safe_from_p has been called. */
2758 /* Don't try copying piece by piece into a hard register
2759 since that is vulnerable to being clobbered by EXP.
2760 Instead, construct in a pseudo register and then copy it all. */
2761 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2763 rtx temp = gen_reg_rtx (GET_MODE (target));
2764 store_constructor (exp, temp);
2765 emit_move_insn (target, temp);
2770 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2771 || TREE_CODE (type) == QUAL_UNION_TYPE)
2775 /* Inform later passes that the whole union value is dead. */
2776 if (TREE_CODE (type) == UNION_TYPE
2777 || TREE_CODE (type) == QUAL_UNION_TYPE)
2778 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2780 /* If we are building a static constructor into a register,
2781 set the initial value as zero so we can fold the value into
2783 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2784 emit_move_insn (target, const0_rtx);
2786 /* If the constructor has fewer fields than the structure,
2787 clear the whole structure first. */
2788 else if (list_length (CONSTRUCTOR_ELTS (exp))
2789 != list_length (TYPE_FIELDS (type)))
2790 clear_storage (target, int_size_in_bytes (type));
2792 /* Inform later passes that the old value is dead. */
2793 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2795 /* Store each element of the constructor into
2796 the corresponding field of TARGET. */
2798 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2800 register tree field = TREE_PURPOSE (elt);
2801 register enum machine_mode mode;
2805 tree pos, constant = 0, offset = 0;
2806 rtx to_rtx = target;
2808 /* Just ignore missing fields.
2809 We cleared the whole structure, above,
2810 if any fields are missing. */
2814 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2815 unsignedp = TREE_UNSIGNED (field);
2816 mode = DECL_MODE (field);
2817 if (DECL_BIT_FIELD (field))
2820 pos = DECL_FIELD_BITPOS (field);
2821 if (TREE_CODE (pos) == INTEGER_CST)
2823 else if (TREE_CODE (pos) == PLUS_EXPR
2824 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2825 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2830 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2836 if (contains_placeholder_p (offset))
2837 offset = build (WITH_RECORD_EXPR, sizetype,
2840 offset = size_binop (FLOOR_DIV_EXPR, offset,
2841 size_int (BITS_PER_UNIT));
2843 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2844 if (GET_CODE (to_rtx) != MEM)
2848 = change_address (to_rtx, VOIDmode,
2849 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2850 force_reg (Pmode, offset_rtx)));
2853 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2854 /* The alignment of TARGET is
2855 at least what its type requires. */
2857 TYPE_ALIGN (type) / BITS_PER_UNIT,
2858 int_size_in_bytes (type));
2861 else if (TREE_CODE (type) == ARRAY_TYPE)
2865 tree domain = TYPE_DOMAIN (type);
2866 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2867 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2868 tree elttype = TREE_TYPE (type);
2870 /* If the constructor has fewer fields than the structure,
2871 clear the whole structure first. Similarly if this this is
2872 static constructor of a non-BLKmode object. */
2874 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2875 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2876 clear_storage (target, int_size_in_bytes (type));
2878 /* Inform later passes that the old value is dead. */
2879 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2881 /* Store each element of the constructor into
2882 the corresponding element of TARGET, determined
2883 by counting the elements. */
2884 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2886 elt = TREE_CHAIN (elt), i++)
2888 register enum machine_mode mode;
2892 tree index = TREE_PURPOSE (elt);
2893 rtx xtarget = target;
2895 mode = TYPE_MODE (elttype);
2896 bitsize = GET_MODE_BITSIZE (mode);
2897 unsignedp = TREE_UNSIGNED (elttype);
2899 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2901 /* We don't currently allow variable indices in a
2902 C initializer, but let's try here to support them. */
2903 rtx pos_rtx, addr, xtarget;
2906 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2907 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2908 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2909 xtarget = change_address (target, mode, addr);
2910 store_expr (TREE_VALUE (elt), xtarget, 0);
2915 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2916 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2918 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2920 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2921 /* The alignment of TARGET is
2922 at least what its type requires. */
2924 TYPE_ALIGN (type) / BITS_PER_UNIT,
2925 int_size_in_bytes (type));
2934 /* Store the value of EXP (an expression tree)
2935 into a subfield of TARGET which has mode MODE and occupies
2936 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2937 If MODE is VOIDmode, it means that we are storing into a bit-field.
2939 If VALUE_MODE is VOIDmode, return nothing in particular.
2940 UNSIGNEDP is not used in this case.
2942 Otherwise, return an rtx for the value stored. This rtx
2943 has mode VALUE_MODE if that is convenient to do.
2944 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2946 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2947 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2950 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2951 unsignedp, align, total_size)
2953 int bitsize, bitpos;
2954 enum machine_mode mode;
2956 enum machine_mode value_mode;
2961 HOST_WIDE_INT width_mask = 0;
2963 if (bitsize < HOST_BITS_PER_WIDE_INT)
2964 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2966 /* If we are storing into an unaligned field of an aligned union that is
2967 in a register, we may have the mode of TARGET being an integer mode but
2968 MODE == BLKmode. In that case, get an aligned object whose size and
2969 alignment are the same as TARGET and store TARGET into it (we can avoid
2970 the store if the field being stored is the entire width of TARGET). Then
2971 call ourselves recursively to store the field into a BLKmode version of
2972 that object. Finally, load from the object into TARGET. This is not
2973 very efficient in general, but should only be slightly more expensive
2974 than the otherwise-required unaligned accesses. Perhaps this can be
2975 cleaned up later. */
2978 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2980 rtx object = assign_stack_temp (GET_MODE (target),
2981 GET_MODE_SIZE (GET_MODE (target)), 0);
2982 rtx blk_object = copy_rtx (object);
2984 PUT_MODE (blk_object, BLKmode);
2986 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2987 emit_move_insn (object, target);
2989 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2992 /* Even though we aren't returning target, we need to
2993 give it the updated value. */
2994 emit_move_insn (target, object);
2999 /* If the structure is in a register or if the component
3000 is a bit field, we cannot use addressing to access it.
3001 Use bit-field techniques or SUBREG to store in it. */
3003 if (mode == VOIDmode
3004 || (mode != BLKmode && ! direct_store[(int) mode])
3005 || GET_CODE (target) == REG
3006 || GET_CODE (target) == SUBREG
3007 /* If the field isn't aligned enough to store as an ordinary memref,
3008 store it as a bit field. */
3009 || (STRICT_ALIGNMENT
3010 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3011 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3013 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3014 /* Store the value in the bitfield. */
3015 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3016 if (value_mode != VOIDmode)
3018 /* The caller wants an rtx for the value. */
3019 /* If possible, avoid refetching from the bitfield itself. */
3021 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3024 enum machine_mode tmode;
3027 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3028 tmode = GET_MODE (temp);
3029 if (tmode == VOIDmode)
3031 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3032 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3033 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3035 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3036 NULL_RTX, value_mode, 0, align,
3043 rtx addr = XEXP (target, 0);
3046 /* If a value is wanted, it must be the lhs;
3047 so make the address stable for multiple use. */
3049 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3050 && ! CONSTANT_ADDRESS_P (addr)
3051 /* A frame-pointer reference is already stable. */
3052 && ! (GET_CODE (addr) == PLUS
3053 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3054 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3055 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3056 addr = copy_to_reg (addr);
3058 /* Now build a reference to just the desired component. */
3060 to_rtx = change_address (target, mode,
3061 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3062 MEM_IN_STRUCT_P (to_rtx) = 1;
3064 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3068 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3069 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3070 ARRAY_REFs and find the ultimate containing object, which we return.
3072 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3073 bit position, and *PUNSIGNEDP to the signedness of the field.
3074 If the position of the field is variable, we store a tree
3075 giving the variable offset (in units) in *POFFSET.
3076 This offset is in addition to the bit position.
3077 If the position is not variable, we store 0 in *POFFSET.
3079 If any of the extraction expressions is volatile,
3080 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3082 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3083 is a mode that can be used to access the field. In that case, *PBITSIZE
3086 If the field describes a variable-sized object, *PMODE is set to
3087 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3088 this case, but the address of the object can be found. */
3091 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3092 punsignedp, pvolatilep)
3097 enum machine_mode *pmode;
3101 tree orig_exp = exp;
3103 enum machine_mode mode = VOIDmode;
3104 tree offset = integer_zero_node;
3106 if (TREE_CODE (exp) == COMPONENT_REF)
3108 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3109 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3110 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3111 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3113 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3115 size_tree = TREE_OPERAND (exp, 1);
3116 *punsignedp = TREE_UNSIGNED (exp);
3120 mode = TYPE_MODE (TREE_TYPE (exp));
3121 *pbitsize = GET_MODE_BITSIZE (mode);
3122 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3127 if (TREE_CODE (size_tree) != INTEGER_CST)
3128 mode = BLKmode, *pbitsize = -1;
3130 *pbitsize = TREE_INT_CST_LOW (size_tree);
3133 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3134 and find the ultimate containing object. */
3140 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3142 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3143 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3144 : TREE_OPERAND (exp, 2));
3146 /* If this field hasn't been filled in yet, don't go
3147 past it. This should only happen when folding expressions
3148 made during type construction. */
3152 if (TREE_CODE (pos) == PLUS_EXPR)
3155 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3157 constant = TREE_OPERAND (pos, 0);
3158 var = TREE_OPERAND (pos, 1);
3160 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3162 constant = TREE_OPERAND (pos, 1);
3163 var = TREE_OPERAND (pos, 0);
3168 *pbitpos += TREE_INT_CST_LOW (constant);
3169 offset = size_binop (PLUS_EXPR, offset,
3170 size_binop (FLOOR_DIV_EXPR, var,
3171 size_int (BITS_PER_UNIT)));
3173 else if (TREE_CODE (pos) == INTEGER_CST)
3174 *pbitpos += TREE_INT_CST_LOW (pos);
3177 /* Assume here that the offset is a multiple of a unit.
3178 If not, there should be an explicitly added constant. */
3179 offset = size_binop (PLUS_EXPR, offset,
3180 size_binop (FLOOR_DIV_EXPR, pos,
3181 size_int (BITS_PER_UNIT)));
3185 else if (TREE_CODE (exp) == ARRAY_REF)
3187 /* This code is based on the code in case ARRAY_REF in expand_expr
3188 below. We assume here that the size of an array element is
3189 always an integral multiple of BITS_PER_UNIT. */
3191 tree index = TREE_OPERAND (exp, 1);
3192 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3194 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3195 tree index_type = TREE_TYPE (index);
3197 if (! integer_zerop (low_bound))
3198 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3200 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3202 index = convert (type_for_size (POINTER_SIZE, 0), index);
3203 index_type = TREE_TYPE (index);
3206 index = fold (build (MULT_EXPR, index_type, index,
3207 TYPE_SIZE (TREE_TYPE (exp))));
3209 if (TREE_CODE (index) == INTEGER_CST
3210 && TREE_INT_CST_HIGH (index) == 0)
3211 *pbitpos += TREE_INT_CST_LOW (index);
3213 offset = size_binop (PLUS_EXPR, offset,
3214 size_binop (FLOOR_DIV_EXPR, index,
3215 size_int (BITS_PER_UNIT)));
3217 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3218 && ! ((TREE_CODE (exp) == NOP_EXPR
3219 || TREE_CODE (exp) == CONVERT_EXPR)
3220 && (TYPE_MODE (TREE_TYPE (exp))
3221 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3224 /* If any reference in the chain is volatile, the effect is volatile. */
3225 if (TREE_THIS_VOLATILE (exp))
3227 exp = TREE_OPERAND (exp, 0);
3230 /* If this was a bit-field, see if there is a mode that allows direct
3231 access in case EXP is in memory. */
3232 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3234 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3235 if (mode == BLKmode)
3239 if (integer_zerop (offset))
3242 if (offset != 0 && contains_placeholder_p (offset))
3243 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3250 /* Given an rtx VALUE that may contain additions and multiplications,
3251 return an equivalent value that just refers to a register or memory.
3252 This is done by generating instructions to perform the arithmetic
3253 and returning a pseudo-register containing the value.
3255 The returned value may be a REG, SUBREG, MEM or constant. */
3258 force_operand (value, target)
3261 register optab binoptab = 0;
3262 /* Use a temporary to force order of execution of calls to
3266 /* Use subtarget as the target for operand 0 of a binary operation. */
3267 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3269 if (GET_CODE (value) == PLUS)
3270 binoptab = add_optab;
3271 else if (GET_CODE (value) == MINUS)
3272 binoptab = sub_optab;
3273 else if (GET_CODE (value) == MULT)
3275 op2 = XEXP (value, 1);
3276 if (!CONSTANT_P (op2)
3277 && !(GET_CODE (op2) == REG && op2 != subtarget))
3279 tmp = force_operand (XEXP (value, 0), subtarget);
3280 return expand_mult (GET_MODE (value), tmp,
3281 force_operand (op2, NULL_RTX),
3287 op2 = XEXP (value, 1);
3288 if (!CONSTANT_P (op2)
3289 && !(GET_CODE (op2) == REG && op2 != subtarget))
3291 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3293 binoptab = add_optab;
3294 op2 = negate_rtx (GET_MODE (value), op2);
3297 /* Check for an addition with OP2 a constant integer and our first
3298 operand a PLUS of a virtual register and something else. In that
3299 case, we want to emit the sum of the virtual register and the
3300 constant first and then add the other value. This allows virtual
3301 register instantiation to simply modify the constant rather than
3302 creating another one around this addition. */
3303 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3304 && GET_CODE (XEXP (value, 0)) == PLUS
3305 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3306 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3307 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3309 rtx temp = expand_binop (GET_MODE (value), binoptab,
3310 XEXP (XEXP (value, 0), 0), op2,
3311 subtarget, 0, OPTAB_LIB_WIDEN);
3312 return expand_binop (GET_MODE (value), binoptab, temp,
3313 force_operand (XEXP (XEXP (value, 0), 1), 0),
3314 target, 0, OPTAB_LIB_WIDEN);
3317 tmp = force_operand (XEXP (value, 0), subtarget);
3318 return expand_binop (GET_MODE (value), binoptab, tmp,
3319 force_operand (op2, NULL_RTX),
3320 target, 0, OPTAB_LIB_WIDEN);
3321 /* We give UNSIGNEDP = 0 to expand_binop
3322 because the only operations we are expanding here are signed ones. */
3327 /* Subroutine of expand_expr:
3328 save the non-copied parts (LIST) of an expr (LHS), and return a list
3329 which can restore these values to their previous values,
3330 should something modify their storage. */
3333 save_noncopied_parts (lhs, list)
3340 for (tail = list; tail; tail = TREE_CHAIN (tail))
3341 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3342 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3345 tree part = TREE_VALUE (tail);
3346 tree part_type = TREE_TYPE (part);
3347 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3348 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3349 int_size_in_bytes (part_type), 0);
3350 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3351 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3352 parts = tree_cons (to_be_saved,
3353 build (RTL_EXPR, part_type, NULL_TREE,
3356 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3361 /* Subroutine of expand_expr:
3362 record the non-copied parts (LIST) of an expr (LHS), and return a list
3363 which specifies the initial values of these parts. */
3366 init_noncopied_parts (lhs, list)
3373 for (tail = list; tail; tail = TREE_CHAIN (tail))
3374 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3375 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3378 tree part = TREE_VALUE (tail);
3379 tree part_type = TREE_TYPE (part);
3380 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3381 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3386 /* Subroutine of expand_expr: return nonzero iff there is no way that
3387 EXP can reference X, which is being modified. */
3390 safe_from_p (x, exp)
3400 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3401 find the underlying pseudo. */
3402 if (GET_CODE (x) == SUBREG)
3405 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3409 /* If X is a location in the outgoing argument area, it is always safe. */
3410 if (GET_CODE (x) == MEM
3411 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3412 || (GET_CODE (XEXP (x, 0)) == PLUS
3413 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3416 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3419 exp_rtl = DECL_RTL (exp);
3426 if (TREE_CODE (exp) == TREE_LIST)
3427 return ((TREE_VALUE (exp) == 0
3428 || safe_from_p (x, TREE_VALUE (exp)))
3429 && (TREE_CHAIN (exp) == 0
3430 || safe_from_p (x, TREE_CHAIN (exp))));
3435 return safe_from_p (x, TREE_OPERAND (exp, 0));
3439 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3440 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3444 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3445 the expression. If it is set, we conflict iff we are that rtx or
3446 both are in memory. Otherwise, we check all operands of the
3447 expression recursively. */
3449 switch (TREE_CODE (exp))
3452 return (staticp (TREE_OPERAND (exp, 0))
3453 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3456 if (GET_CODE (x) == MEM)
3461 exp_rtl = CALL_EXPR_RTL (exp);
3464 /* Assume that the call will clobber all hard registers and
3466 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3467 || GET_CODE (x) == MEM)
3474 exp_rtl = RTL_EXPR_RTL (exp);
3476 /* We don't know what this can modify. */
3481 case WITH_CLEANUP_EXPR:
3482 exp_rtl = RTL_EXPR_RTL (exp);
3486 exp_rtl = SAVE_EXPR_RTL (exp);
3490 /* The only operand we look at is operand 1. The rest aren't
3491 part of the expression. */
3492 return safe_from_p (x, TREE_OPERAND (exp, 1));
3494 case METHOD_CALL_EXPR:
3495 /* This takes a rtx argument, but shouldn't appear here. */
3499 /* If we have an rtx, we do not need to scan our operands. */
3503 nops = tree_code_length[(int) TREE_CODE (exp)];
3504 for (i = 0; i < nops; i++)
3505 if (TREE_OPERAND (exp, i) != 0
3506 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3510 /* If we have an rtl, find any enclosed object. Then see if we conflict
3514 if (GET_CODE (exp_rtl) == SUBREG)
3516 exp_rtl = SUBREG_REG (exp_rtl);
3517 if (GET_CODE (exp_rtl) == REG
3518 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3522 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3523 are memory and EXP is not readonly. */
3524 return ! (rtx_equal_p (x, exp_rtl)
3525 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3526 && ! TREE_READONLY (exp)));
3529 /* If we reach here, it is safe. */
3533 /* Subroutine of expand_expr: return nonzero iff EXP is an
3534 expression whose type is statically determinable. */
3540 if (TREE_CODE (exp) == PARM_DECL
3541 || TREE_CODE (exp) == VAR_DECL
3542 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3543 || TREE_CODE (exp) == COMPONENT_REF
3544 || TREE_CODE (exp) == ARRAY_REF)
3549 /* expand_expr: generate code for computing expression EXP.
3550 An rtx for the computed value is returned. The value is never null.
3551 In the case of a void EXP, const0_rtx is returned.
3553 The value may be stored in TARGET if TARGET is nonzero.
3554 TARGET is just a suggestion; callers must assume that
3555 the rtx returned may not be the same as TARGET.
3557 If TARGET is CONST0_RTX, it means that the value will be ignored.
3559 If TMODE is not VOIDmode, it suggests generating the
3560 result in mode TMODE. But this is done only when convenient.
3561 Otherwise, TMODE is ignored and the value generated in its natural mode.
3562 TMODE is just a suggestion; callers must assume that
3563 the rtx returned may not have mode TMODE.
3565 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3566 with a constant address even if that address is not normally legitimate.
3567 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3569 If MODIFIER is EXPAND_SUM then when EXP is an addition
3570 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3571 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3572 products as above, or REG or MEM, or constant.
3573 Ordinarily in such cases we would output mul or add instructions
3574 and then return a pseudo reg containing the sum.
3576 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3577 it also marks a label as absolutely required (it can't be dead).
3578 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3579 This is used for outputting expressions used in initializers. */
3582 expand_expr (exp, target, tmode, modifier)
3585 enum machine_mode tmode;
3586 enum expand_modifier modifier;
3588 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3589 This is static so it will be accessible to our recursive callees. */
3590 static tree placeholder_list = 0;
3591 register rtx op0, op1, temp;
3592 tree type = TREE_TYPE (exp);
3593 int unsignedp = TREE_UNSIGNED (type);
3594 register enum machine_mode mode = TYPE_MODE (type);
3595 register enum tree_code code = TREE_CODE (exp);
3597 /* Use subtarget as the target for operand 0 of a binary operation. */
3598 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3599 rtx original_target = target;
3600 /* Maybe defer this until sure not doing bytecode? */
3601 int ignore = (target == const0_rtx
3602 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3603 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3604 || code == COND_EXPR)
3605 && TREE_CODE (type) == VOID_TYPE));
3609 if (output_bytecode)
3611 bc_expand_expr (exp);
3615 /* Don't use hard regs as subtargets, because the combiner
3616 can only handle pseudo regs. */
3617 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3619 /* Avoid subtargets inside loops,
3620 since they hide some invariant expressions. */
3621 if (preserve_subexpressions_p ())
3624 /* If we are going to ignore this result, we need only do something
3625 if there is a side-effect somewhere in the expression. If there
3626 is, short-circuit the most common cases here. Note that we must
3627 not call expand_expr with anything but const0_rtx in case this
3628 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3632 if (! TREE_SIDE_EFFECTS (exp))
3635 /* Ensure we reference a volatile object even if value is ignored. */
3636 if (TREE_THIS_VOLATILE (exp)
3637 && TREE_CODE (exp) != FUNCTION_DECL
3638 && mode != VOIDmode && mode != BLKmode)
3640 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3641 if (GET_CODE (temp) == MEM)
3642 temp = copy_to_reg (temp);
3646 if (TREE_CODE_CLASS (code) == '1')
3647 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3648 VOIDmode, modifier);
3649 else if (TREE_CODE_CLASS (code) == '2'
3650 || TREE_CODE_CLASS (code) == '<')
3652 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3653 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3656 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3657 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3658 /* If the second operand has no side effects, just evaluate
3660 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3661 VOIDmode, modifier);
3666 /* If will do cse, generate all results into pseudo registers
3667 since 1) that allows cse to find more things
3668 and 2) otherwise cse could produce an insn the machine
3671 if (! cse_not_expected && mode != BLKmode && target
3672 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3679 tree function = decl_function_context (exp);
3680 /* Handle using a label in a containing function. */
3681 if (function != current_function_decl && function != 0)
3683 struct function *p = find_function_data (function);
3684 /* Allocate in the memory associated with the function
3685 that the label is in. */
3686 push_obstacks (p->function_obstack,
3687 p->function_maybepermanent_obstack);
3689 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3690 label_rtx (exp), p->forced_labels);
3693 else if (modifier == EXPAND_INITIALIZER)
3694 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3695 label_rtx (exp), forced_labels);
3696 temp = gen_rtx (MEM, FUNCTION_MODE,
3697 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3698 if (function != current_function_decl && function != 0)
3699 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3704 if (DECL_RTL (exp) == 0)
3706 error_with_decl (exp, "prior parameter's size depends on `%s'");
3707 return CONST0_RTX (mode);
3711 /* If a static var's type was incomplete when the decl was written,
3712 but the type is complete now, lay out the decl now. */
3713 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3714 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3716 push_obstacks_nochange ();
3717 end_temporary_allocation ();
3718 layout_decl (exp, 0);
3719 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3724 if (DECL_RTL (exp) == 0)
3726 /* Ensure variable marked as used even if it doesn't go through
3727 a parser. If it hasn't be used yet, write out an external
3729 if (! TREE_USED (exp))
3731 assemble_external (exp);
3732 TREE_USED (exp) = 1;
3735 /* Handle variables inherited from containing functions. */
3736 context = decl_function_context (exp);
3738 /* We treat inline_function_decl as an alias for the current function
3739 because that is the inline function whose vars, types, etc.
3740 are being merged into the current function.
3741 See expand_inline_function. */
3742 if (context != 0 && context != current_function_decl
3743 && context != inline_function_decl
3744 /* If var is static, we don't need a static chain to access it. */
3745 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3746 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3750 /* Mark as non-local and addressable. */
3751 DECL_NONLOCAL (exp) = 1;
3752 mark_addressable (exp);
3753 if (GET_CODE (DECL_RTL (exp)) != MEM)
3755 addr = XEXP (DECL_RTL (exp), 0);
3756 if (GET_CODE (addr) == MEM)
3757 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3759 addr = fix_lexical_addr (addr, exp);
3760 return change_address (DECL_RTL (exp), mode, addr);
3763 /* This is the case of an array whose size is to be determined
3764 from its initializer, while the initializer is still being parsed.
3766 if (GET_CODE (DECL_RTL (exp)) == MEM
3767 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3768 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3769 XEXP (DECL_RTL (exp), 0));
3770 if (GET_CODE (DECL_RTL (exp)) == MEM
3771 && modifier != EXPAND_CONST_ADDRESS
3772 && modifier != EXPAND_SUM
3773 && modifier != EXPAND_INITIALIZER)
3775 /* DECL_RTL probably contains a constant address.
3776 On RISC machines where a constant address isn't valid,
3777 make some insns to get that address into a register. */
3778 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3780 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3781 return change_address (DECL_RTL (exp), VOIDmode,
3782 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3785 /* If the mode of DECL_RTL does not match that of the decl, it
3786 must be a promoted value. We return a SUBREG of the wanted mode,
3787 but mark it so that we know that it was already extended. */
3789 if (GET_CODE (DECL_RTL (exp)) == REG
3790 && GET_MODE (DECL_RTL (exp)) != mode)
3792 enum machine_mode decl_mode = DECL_MODE (exp);
3794 /* Get the signedness used for this variable. Ensure we get the
3795 same mode we got when the variable was declared. */
3797 PROMOTE_MODE (decl_mode, unsignedp, type);
3799 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3802 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3803 SUBREG_PROMOTED_VAR_P (temp) = 1;
3804 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3808 return DECL_RTL (exp);
3811 return immed_double_const (TREE_INT_CST_LOW (exp),
3812 TREE_INT_CST_HIGH (exp),
3816 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3819 /* If optimized, generate immediate CONST_DOUBLE
3820 which will be turned into memory by reload if necessary.
3822 We used to force a register so that loop.c could see it. But
3823 this does not allow gen_* patterns to perform optimizations with
3824 the constants. It also produces two insns in cases like "x = 1.0;".
3825 On most machines, floating-point constants are not permitted in
3826 many insns, so we'd end up copying it to a register in any case.
3828 Now, we do the copying in expand_binop, if appropriate. */
3829 return immed_real_const (exp);
3833 if (! TREE_CST_RTL (exp))
3834 output_constant_def (exp);
3836 /* TREE_CST_RTL probably contains a constant address.
3837 On RISC machines where a constant address isn't valid,
3838 make some insns to get that address into a register. */
3839 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3840 && modifier != EXPAND_CONST_ADDRESS
3841 && modifier != EXPAND_INITIALIZER
3842 && modifier != EXPAND_SUM
3843 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3844 return change_address (TREE_CST_RTL (exp), VOIDmode,
3845 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3846 return TREE_CST_RTL (exp);
3849 context = decl_function_context (exp);
3850 /* We treat inline_function_decl as an alias for the current function
3851 because that is the inline function whose vars, types, etc.
3852 are being merged into the current function.
3853 See expand_inline_function. */
3854 if (context == current_function_decl || context == inline_function_decl)
3857 /* If this is non-local, handle it. */
3860 temp = SAVE_EXPR_RTL (exp);
3861 if (temp && GET_CODE (temp) == REG)
3863 put_var_into_stack (exp);
3864 temp = SAVE_EXPR_RTL (exp);
3866 if (temp == 0 || GET_CODE (temp) != MEM)
3868 return change_address (temp, mode,
3869 fix_lexical_addr (XEXP (temp, 0), exp));
3871 if (SAVE_EXPR_RTL (exp) == 0)
3873 if (mode == BLKmode)
3876 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3877 MEM_IN_STRUCT_P (temp)
3878 = (TREE_CODE (type) == RECORD_TYPE
3879 || TREE_CODE (type) == UNION_TYPE
3880 || TREE_CODE (type) == QUAL_UNION_TYPE
3881 || TREE_CODE (type) == ARRAY_TYPE);
3885 enum machine_mode var_mode = mode;
3887 if (TREE_CODE (type) == INTEGER_TYPE
3888 || TREE_CODE (type) == ENUMERAL_TYPE
3889 || TREE_CODE (type) == BOOLEAN_TYPE
3890 || TREE_CODE (type) == CHAR_TYPE
3891 || TREE_CODE (type) == REAL_TYPE
3892 || TREE_CODE (type) == POINTER_TYPE
3893 || TREE_CODE (type) == OFFSET_TYPE)
3895 PROMOTE_MODE (var_mode, unsignedp, type);
3898 temp = gen_reg_rtx (var_mode);
3901 SAVE_EXPR_RTL (exp) = temp;
3902 if (!optimize && GET_CODE (temp) == REG)
3903 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3906 /* If the mode of TEMP does not match that of the expression, it
3907 must be a promoted value. We pass store_expr a SUBREG of the
3908 wanted mode but mark it so that we know that it was already
3909 extended. Note that `unsignedp' was modified above in
3912 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3914 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3915 SUBREG_PROMOTED_VAR_P (temp) = 1;
3916 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3919 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3922 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3923 must be a promoted value. We return a SUBREG of the wanted mode,
3924 but mark it so that we know that it was already extended. Note
3925 that `unsignedp' was modified above in this case. */
3927 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3928 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3930 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3931 SUBREG_PROMOTED_VAR_P (temp) = 1;
3932 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3936 return SAVE_EXPR_RTL (exp);
3938 case PLACEHOLDER_EXPR:
3939 /* If there is an object on the head of the placeholder list,
3940 see if some object in it's references is of type TYPE. For
3941 further information, see tree.def. */
3942 if (placeholder_list)
3946 for (object = TREE_PURPOSE (placeholder_list);
3947 TREE_TYPE (object) != type
3948 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3949 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3950 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3951 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
3952 object = TREE_OPERAND (object, 0))
3955 if (object && TREE_TYPE (object) == type)
3956 return expand_expr (object, original_target, tmode, modifier);
3959 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
3962 case WITH_RECORD_EXPR:
3963 /* Put the object on the placeholder list, expand our first operand,
3964 and pop the list. */
3965 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
3967 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
3969 placeholder_list = TREE_CHAIN (placeholder_list);
3973 expand_exit_loop_if_false (NULL_PTR,
3974 invert_truthvalue (TREE_OPERAND (exp, 0)));
3979 expand_start_loop (1);
3980 expand_expr_stmt (TREE_OPERAND (exp, 0));
3988 tree vars = TREE_OPERAND (exp, 0);
3989 int vars_need_expansion = 0;
3991 /* Need to open a binding contour here because
3992 if there are any cleanups they most be contained here. */
3993 expand_start_bindings (0);
3995 /* Mark the corresponding BLOCK for output in its proper place. */
3996 if (TREE_OPERAND (exp, 2) != 0
3997 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3998 insert_block (TREE_OPERAND (exp, 2));
4000 /* If VARS have not yet been expanded, expand them now. */
4003 if (DECL_RTL (vars) == 0)
4005 vars_need_expansion = 1;
4008 expand_decl_init (vars);
4009 vars = TREE_CHAIN (vars);
4012 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4014 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4020 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4022 emit_insns (RTL_EXPR_SEQUENCE (exp));
4023 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4024 return RTL_EXPR_RTL (exp);
4027 /* If we don't need the result, just ensure we evaluate any
4032 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4033 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4036 /* All elts simple constants => refer to a constant in memory. But
4037 if this is a non-BLKmode mode, let it store a field at a time
4038 since that should make a CONST_INT or CONST_DOUBLE when we
4039 fold. If we are making an initializer and all operands are
4040 constant, put it in memory as well. */
4041 else if ((TREE_STATIC (exp)
4042 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
4043 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4045 rtx constructor = output_constant_def (exp);
4046 if (modifier != EXPAND_CONST_ADDRESS
4047 && modifier != EXPAND_INITIALIZER
4048 && modifier != EXPAND_SUM
4049 && !memory_address_p (GET_MODE (constructor),
4050 XEXP (constructor, 0)))
4051 constructor = change_address (constructor, VOIDmode,
4052 XEXP (constructor, 0));
4058 if (target == 0 || ! safe_from_p (target, exp))
4060 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4061 target = gen_reg_rtx (mode);
4064 enum tree_code c = TREE_CODE (type);
4066 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4067 if (c == RECORD_TYPE || c == UNION_TYPE
4068 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
4069 MEM_IN_STRUCT_P (target) = 1;
4072 store_constructor (exp, target);
4078 tree exp1 = TREE_OPERAND (exp, 0);
4081 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4082 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4083 This code has the same general effect as simply doing
4084 expand_expr on the save expr, except that the expression PTR
4085 is computed for use as a memory address. This means different
4086 code, suitable for indexing, may be generated. */
4087 if (TREE_CODE (exp1) == SAVE_EXPR
4088 && SAVE_EXPR_RTL (exp1) == 0
4089 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4090 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4091 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4093 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4094 VOIDmode, EXPAND_SUM);
4095 op0 = memory_address (mode, temp);
4096 op0 = copy_all_regs (op0);
4097 SAVE_EXPR_RTL (exp1) = op0;
4101 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4102 op0 = memory_address (mode, op0);
4105 temp = gen_rtx (MEM, mode, op0);
4106 /* If address was computed by addition,
4107 mark this as an element of an aggregate. */
4108 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4109 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4110 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4111 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4112 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4113 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4114 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
4115 || (TREE_CODE (exp1) == ADDR_EXPR
4116 && (exp2 = TREE_OPERAND (exp1, 0))
4117 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4118 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4119 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4120 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
4121 MEM_IN_STRUCT_P (temp) = 1;
4122 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4123 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4124 a location is accessed through a pointer to const does not mean
4125 that the value there can never change. */
4126 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4132 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4136 tree array = TREE_OPERAND (exp, 0);
4137 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4138 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4139 tree index = TREE_OPERAND (exp, 1);
4140 tree index_type = TREE_TYPE (index);
4143 if (TREE_CODE (low_bound) != INTEGER_CST
4144 && contains_placeholder_p (low_bound))
4145 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4147 /* Optimize the special-case of a zero lower bound.
4149 We convert the low_bound to sizetype to avoid some problems
4150 with constant folding. (E.g. suppose the lower bound is 1,
4151 and its mode is QI. Without the conversion, (ARRAY
4152 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4153 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4155 But sizetype isn't quite right either (especially if
4156 the lowbound is negative). FIXME */
4158 if (! integer_zerop (low_bound))
4159 index = fold (build (MINUS_EXPR, index_type, index,
4160 convert (sizetype, low_bound)));
4162 if (TREE_CODE (index) != INTEGER_CST
4163 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4165 /* Nonconstant array index or nonconstant element size.
4166 Generate the tree for *(&array+index) and expand that,
4167 except do it in a language-independent way
4168 and don't complain about non-lvalue arrays.
4169 `mark_addressable' should already have been called
4170 for any array for which this case will be reached. */
4172 /* Don't forget the const or volatile flag from the array
4174 tree variant_type = build_type_variant (type,
4175 TREE_READONLY (exp),
4176 TREE_THIS_VOLATILE (exp));
4177 tree array_adr = build1 (ADDR_EXPR,
4178 build_pointer_type (variant_type), array);
4180 tree size = size_in_bytes (type);
4182 /* Convert the integer argument to a type the same size as a
4183 pointer so the multiply won't overflow spuriously. */
4184 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4185 index = convert (type_for_size (POINTER_SIZE, 0), index);
4187 if (TREE_CODE (size) != INTEGER_CST
4188 && contains_placeholder_p (size))
4189 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4191 /* Don't think the address has side effects
4192 just because the array does.
4193 (In some cases the address might have side effects,
4194 and we fail to record that fact here. However, it should not
4195 matter, since expand_expr should not care.) */
4196 TREE_SIDE_EFFECTS (array_adr) = 0;
4198 elt = build1 (INDIRECT_REF, type,
4199 fold (build (PLUS_EXPR,
4200 TYPE_POINTER_TO (variant_type),
4202 fold (build (MULT_EXPR,
4203 TYPE_POINTER_TO (variant_type),
4206 /* Volatility, etc., of new expression is same as old
4208 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4209 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4210 TREE_READONLY (elt) = TREE_READONLY (exp);
4212 return expand_expr (elt, target, tmode, modifier);
4215 /* Fold an expression like: "foo"[2].
4216 This is not done in fold so it won't happen inside &. */
4218 if (TREE_CODE (array) == STRING_CST
4219 && TREE_CODE (index) == INTEGER_CST
4220 && !TREE_INT_CST_HIGH (index)
4221 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
4223 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
4225 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
4226 TREE_TYPE (exp) = integer_type_node;
4227 return expand_expr (exp, target, tmode, modifier);
4229 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
4231 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
4232 TREE_TYPE (exp) = integer_type_node;
4233 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
4235 target, tmode, modifier);
4239 /* If this is a constant index into a constant array,
4240 just get the value from the array. Handle both the cases when
4241 we have an explicit constructor and when our operand is a variable
4242 that was declared const. */
4244 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4246 if (TREE_CODE (index) == INTEGER_CST
4247 && TREE_INT_CST_HIGH (index) == 0)
4249 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4251 i = TREE_INT_CST_LOW (index);
4253 elem = TREE_CHAIN (elem);
4255 return expand_expr (fold (TREE_VALUE (elem)), target,
4260 else if (optimize >= 1
4261 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4262 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4263 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4265 if (TREE_CODE (index) == INTEGER_CST
4266 && TREE_INT_CST_HIGH (index) == 0)
4268 tree init = DECL_INITIAL (array);
4270 i = TREE_INT_CST_LOW (index);
4271 if (TREE_CODE (init) == CONSTRUCTOR)
4273 tree elem = CONSTRUCTOR_ELTS (init);
4276 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4277 elem = TREE_CHAIN (elem);
4279 return expand_expr (fold (TREE_VALUE (elem)), target,
4282 else if (TREE_CODE (init) == STRING_CST
4283 && i < TREE_STRING_LENGTH (init))
4285 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4286 return convert_to_mode (mode, temp, 0);
4292 /* Treat array-ref with constant index as a component-ref. */
4296 /* If the operand is a CONSTRUCTOR, we can just extract the
4297 appropriate field if it is present. */
4298 if (code != ARRAY_REF
4299 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4303 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4304 elt = TREE_CHAIN (elt))
4305 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4306 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4310 enum machine_mode mode1;
4315 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4316 &mode1, &unsignedp, &volatilep);
4319 /* If we got back the original object, something is wrong. Perhaps
4320 we are evaluating an expression too early. In any event, don't
4321 infinitely recurse. */
4325 /* In some cases, we will be offsetting OP0's address by a constant.
4326 So get it as a sum, if possible. If we will be using it
4327 directly in an insn, we validate it. */
4328 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4330 /* If this is a constant, put it into a register if it is a
4331 legitimate constant and memory if it isn't. */
4332 if (CONSTANT_P (op0))
4334 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4335 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4336 op0 = force_reg (mode, op0);
4338 op0 = validize_mem (force_const_mem (mode, op0));
4341 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4344 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4346 if (GET_CODE (op0) != MEM)
4348 op0 = change_address (op0, VOIDmode,
4349 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4350 force_reg (Pmode, offset_rtx)));
4351 /* If we have a variable offset, the known alignment
4352 is only that of the innermost structure containing the field.
4353 (Actually, we could sometimes do better by using the
4354 size of an element of the innermost array, but no need.) */
4355 if (TREE_CODE (exp) == COMPONENT_REF
4356 || TREE_CODE (exp) == BIT_FIELD_REF)
4357 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4361 /* Don't forget about volatility even if this is a bitfield. */
4362 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4364 op0 = copy_rtx (op0);
4365 MEM_VOLATILE_P (op0) = 1;
4368 /* In cases where an aligned union has an unaligned object
4369 as a field, we might be extracting a BLKmode value from
4370 an integer-mode (e.g., SImode) object. Handle this case
4371 by doing the extract into an object as wide as the field
4372 (which we know to be the width of a basic mode), then
4373 storing into memory, and changing the mode to BLKmode. */
4374 if (mode1 == VOIDmode
4375 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4376 && modifier != EXPAND_CONST_ADDRESS
4377 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4378 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4379 /* If the field isn't aligned enough to fetch as a memref,
4380 fetch it as a bit field. */
4381 || (STRICT_ALIGNMENT
4382 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4383 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4385 enum machine_mode ext_mode = mode;
4387 if (ext_mode == BLKmode)
4388 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4390 if (ext_mode == BLKmode)
4393 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4394 unsignedp, target, ext_mode, ext_mode,
4396 int_size_in_bytes (TREE_TYPE (tem)));
4397 if (mode == BLKmode)
4399 rtx new = assign_stack_temp (ext_mode,
4400 bitsize / BITS_PER_UNIT, 0);
4402 emit_move_insn (new, op0);
4403 op0 = copy_rtx (new);
4404 PUT_MODE (op0, BLKmode);
4405 MEM_IN_STRUCT_P (op0) = 1;
4411 /* Get a reference to just this component. */
4412 if (modifier == EXPAND_CONST_ADDRESS
4413 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4414 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4415 (bitpos / BITS_PER_UNIT)));
4417 op0 = change_address (op0, mode1,
4418 plus_constant (XEXP (op0, 0),
4419 (bitpos / BITS_PER_UNIT)));
4420 MEM_IN_STRUCT_P (op0) = 1;
4421 MEM_VOLATILE_P (op0) |= volatilep;
4422 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4425 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4426 convert_move (target, op0, unsignedp);
4432 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4433 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4434 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4435 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4436 MEM_IN_STRUCT_P (temp) = 1;
4437 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4438 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4439 a location is accessed through a pointer to const does not mean
4440 that the value there can never change. */
4441 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4446 /* Intended for a reference to a buffer of a file-object in Pascal.
4447 But it's not certain that a special tree code will really be
4448 necessary for these. INDIRECT_REF might work for them. */
4452 /* IN_EXPR: Inlined pascal set IN expression.
4455 rlo = set_low - (set_low%bits_per_word);
4456 the_word = set [ (index - rlo)/bits_per_word ];
4457 bit_index = index % bits_per_word;
4458 bitmask = 1 << bit_index;
4459 return !!(the_word & bitmask); */
4461 preexpand_calls (exp);
4463 tree set = TREE_OPERAND (exp, 0);
4464 tree index = TREE_OPERAND (exp, 1);
4465 tree set_type = TREE_TYPE (set);
4467 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4468 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4474 rtx diff, quo, rem, addr, bit, result;
4475 rtx setval, setaddr;
4476 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4479 target = gen_reg_rtx (mode);
4481 /* If domain is empty, answer is no. */
4482 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4485 index_val = expand_expr (index, 0, VOIDmode, 0);
4486 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4487 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4488 setval = expand_expr (set, 0, VOIDmode, 0);
4489 setaddr = XEXP (setval, 0);
4491 /* Compare index against bounds, if they are constant. */
4492 if (GET_CODE (index_val) == CONST_INT
4493 && GET_CODE (lo_r) == CONST_INT
4494 && INTVAL (index_val) < INTVAL (lo_r))
4497 if (GET_CODE (index_val) == CONST_INT
4498 && GET_CODE (hi_r) == CONST_INT
4499 && INTVAL (hi_r) < INTVAL (index_val))
4502 /* If we get here, we have to generate the code for both cases
4503 (in range and out of range). */
4505 op0 = gen_label_rtx ();
4506 op1 = gen_label_rtx ();
4508 if (! (GET_CODE (index_val) == CONST_INT
4509 && GET_CODE (lo_r) == CONST_INT))
4511 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4512 GET_MODE (index_val), 0, 0);
4513 emit_jump_insn (gen_blt (op1));
4516 if (! (GET_CODE (index_val) == CONST_INT
4517 && GET_CODE (hi_r) == CONST_INT))
4519 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4520 GET_MODE (index_val), 0, 0);
4521 emit_jump_insn (gen_bgt (op1));
4524 /* Calculate the element number of bit zero in the first word
4526 if (GET_CODE (lo_r) == CONST_INT)
4527 rlow = GEN_INT (INTVAL (lo_r)
4528 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4530 rlow = expand_binop (index_mode, and_optab, lo_r,
4531 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4532 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4534 diff = expand_binop (index_mode, sub_optab,
4535 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4537 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4538 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4539 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4540 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4541 addr = memory_address (byte_mode,
4542 expand_binop (index_mode, add_optab,
4543 diff, setaddr, NULL_RTX, 0,
4545 /* Extract the bit we want to examine */
4546 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4547 gen_rtx (MEM, byte_mode, addr),
4548 make_tree (TREE_TYPE (index), rem),
4550 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4551 GET_MODE (target) == byte_mode ? target : 0,
4552 1, OPTAB_LIB_WIDEN);
4554 if (result != target)
4555 convert_move (target, result, 1);
4557 /* Output the code to handle the out-of-range case. */
4560 emit_move_insn (target, const0_rtx);
4565 case WITH_CLEANUP_EXPR:
4566 if (RTL_EXPR_RTL (exp) == 0)
4569 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4571 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4572 /* That's it for this cleanup. */
4573 TREE_OPERAND (exp, 2) = 0;
4575 return RTL_EXPR_RTL (exp);
4578 /* Check for a built-in function. */
4579 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4580 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4581 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4582 return expand_builtin (exp, target, subtarget, tmode, ignore);
4583 /* If this call was expanded already by preexpand_calls,
4584 just return the result we got. */
4585 if (CALL_EXPR_RTL (exp) != 0)
4586 return CALL_EXPR_RTL (exp);
4587 return expand_call (exp, target, ignore);
4589 case NON_LVALUE_EXPR:
4592 case REFERENCE_EXPR:
4593 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4594 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4595 if (TREE_CODE (type) == UNION_TYPE)
4597 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4600 if (mode == BLKmode)
4602 if (TYPE_SIZE (type) == 0
4603 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4605 target = assign_stack_temp (BLKmode,
4606 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4607 + BITS_PER_UNIT - 1)
4608 / BITS_PER_UNIT, 0);
4611 target = gen_reg_rtx (mode);
4613 if (GET_CODE (target) == MEM)
4614 /* Store data into beginning of memory target. */
4615 store_expr (TREE_OPERAND (exp, 0),
4616 change_address (target, TYPE_MODE (valtype), 0), 0);
4618 else if (GET_CODE (target) == REG)
4619 /* Store this field into a union of the proper type. */
4620 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4621 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4623 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4627 /* Return the entire union. */
4630 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4631 if (GET_MODE (op0) == mode)
4633 /* If arg is a constant integer being extended from a narrower mode,
4634 we must really truncate to get the extended bits right. Otherwise
4635 (unsigned long) (unsigned char) ("\377"[0])
4636 would come out as ffffffff. */
4637 if (GET_MODE (op0) == VOIDmode
4638 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4639 < GET_MODE_BITSIZE (mode)))
4641 /* MODE must be narrower than HOST_BITS_PER_INT. */
4642 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4644 if (width < HOST_BITS_PER_WIDE_INT)
4646 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4647 : CONST_DOUBLE_LOW (op0));
4648 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4649 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4650 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4652 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4654 op0 = GEN_INT (val);
4658 op0 = (simplify_unary_operation
4659 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4660 ? ZERO_EXTEND : SIGN_EXTEND),
4662 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4667 if (GET_MODE (op0) == VOIDmode)
4669 if (modifier == EXPAND_INITIALIZER)
4670 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4671 if (flag_force_mem && GET_CODE (op0) == MEM)
4672 op0 = copy_to_reg (op0);
4675 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4677 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4681 /* We come here from MINUS_EXPR when the second operand is a constant. */
4683 this_optab = add_optab;
4685 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4686 something else, make sure we add the register to the constant and
4687 then to the other thing. This case can occur during strength
4688 reduction and doing it this way will produce better code if the
4689 frame pointer or argument pointer is eliminated.
4691 fold-const.c will ensure that the constant is always in the inner
4692 PLUS_EXPR, so the only case we need to do anything about is if
4693 sp, ap, or fp is our second argument, in which case we must swap
4694 the innermost first argument and our second argument. */
4696 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4698 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4699 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4700 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4701 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4703 tree t = TREE_OPERAND (exp, 1);
4705 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4706 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4709 /* If the result is to be Pmode and we are adding an integer to
4710 something, we might be forming a constant. So try to use
4711 plus_constant. If it produces a sum and we can't accept it,
4712 use force_operand. This allows P = &ARR[const] to generate
4713 efficient code on machines where a SYMBOL_REF is not a valid
4716 If this is an EXPAND_SUM call, always return the sum. */
4717 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4721 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4722 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4724 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4726 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4727 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4728 op1 = force_operand (op1, target);
4732 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4733 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4734 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4738 if (! CONSTANT_P (op0))
4740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4741 VOIDmode, modifier);
4742 /* Don't go to both_summands if modifier
4743 says it's not right to return a PLUS. */
4744 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4748 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4749 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4750 op0 = force_operand (op0, target);
4755 /* No sense saving up arithmetic to be done
4756 if it's all in the wrong mode to form part of an address.
4757 And force_operand won't know whether to sign-extend or
4759 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4763 preexpand_calls (exp);
4764 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4767 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4768 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4771 /* Make sure any term that's a sum with a constant comes last. */
4772 if (GET_CODE (op0) == PLUS
4773 && CONSTANT_P (XEXP (op0, 1)))
4779 /* If adding to a sum including a constant,
4780 associate it to put the constant outside. */
4781 if (GET_CODE (op1) == PLUS
4782 && CONSTANT_P (XEXP (op1, 1)))
4784 rtx constant_term = const0_rtx;
4786 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4789 /* Ensure that MULT comes first if there is one. */
4790 else if (GET_CODE (op0) == MULT)
4791 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4793 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4795 /* Let's also eliminate constants from op0 if possible. */
4796 op0 = eliminate_constant_term (op0, &constant_term);
4798 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4799 their sum should be a constant. Form it into OP1, since the
4800 result we want will then be OP0 + OP1. */
4802 temp = simplify_binary_operation (PLUS, mode, constant_term,
4807 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4810 /* Put a constant term last and put a multiplication first. */
4811 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4812 temp = op1, op1 = op0, op0 = temp;
4814 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4815 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4818 /* Handle difference of two symbolic constants,
4819 for the sake of an initializer. */
4820 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4821 && really_constant_p (TREE_OPERAND (exp, 0))
4822 && really_constant_p (TREE_OPERAND (exp, 1)))
4824 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4825 VOIDmode, modifier);
4826 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4827 VOIDmode, modifier);
4828 return gen_rtx (MINUS, mode, op0, op1);
4830 /* Convert A - const to A + (-const). */
4831 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4833 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4834 fold (build1 (NEGATE_EXPR, type,
4835 TREE_OPERAND (exp, 1))));
4838 this_optab = sub_optab;
4842 preexpand_calls (exp);
4843 /* If first operand is constant, swap them.
4844 Thus the following special case checks need only
4845 check the second operand. */
4846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4848 register tree t1 = TREE_OPERAND (exp, 0);
4849 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4850 TREE_OPERAND (exp, 1) = t1;
4853 /* Attempt to return something suitable for generating an
4854 indexed address, for machines that support that. */
4856 if (modifier == EXPAND_SUM && mode == Pmode
4857 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4858 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4860 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4862 /* Apply distributive law if OP0 is x+c. */
4863 if (GET_CODE (op0) == PLUS
4864 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4865 return gen_rtx (PLUS, mode,
4866 gen_rtx (MULT, mode, XEXP (op0, 0),
4867 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4868 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4869 * INTVAL (XEXP (op0, 1))));
4871 if (GET_CODE (op0) != REG)
4872 op0 = force_operand (op0, NULL_RTX);
4873 if (GET_CODE (op0) != REG)
4874 op0 = copy_to_mode_reg (mode, op0);
4876 return gen_rtx (MULT, mode, op0,
4877 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4880 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4883 /* Check for multiplying things that have been extended
4884 from a narrower type. If this machine supports multiplying
4885 in that narrower type with a result in the desired type,
4886 do it that way, and avoid the explicit type-conversion. */
4887 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4888 && TREE_CODE (type) == INTEGER_TYPE
4889 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4890 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4891 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4892 && int_fits_type_p (TREE_OPERAND (exp, 1),
4893 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4894 /* Don't use a widening multiply if a shift will do. */
4895 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4896 > HOST_BITS_PER_WIDE_INT)
4897 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4899 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4900 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4902 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4903 /* If both operands are extended, they must either both
4904 be zero-extended or both be sign-extended. */
4905 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4907 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4909 enum machine_mode innermode
4910 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4911 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4912 ? umul_widen_optab : smul_widen_optab);
4913 if (mode == GET_MODE_WIDER_MODE (innermode)
4914 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4916 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4917 NULL_RTX, VOIDmode, 0);
4918 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4919 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4922 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4923 NULL_RTX, VOIDmode, 0);
4927 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4928 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4929 return expand_mult (mode, op0, op1, target, unsignedp);
4931 case TRUNC_DIV_EXPR:
4932 case FLOOR_DIV_EXPR:
4934 case ROUND_DIV_EXPR:
4935 case EXACT_DIV_EXPR:
4936 preexpand_calls (exp);
4937 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4939 /* Possible optimization: compute the dividend with EXPAND_SUM
4940 then if the divisor is constant can optimize the case
4941 where some terms of the dividend have coeffs divisible by it. */
4942 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4943 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4944 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4947 this_optab = flodiv_optab;
4950 case TRUNC_MOD_EXPR:
4951 case FLOOR_MOD_EXPR:
4953 case ROUND_MOD_EXPR:
4954 preexpand_calls (exp);
4955 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4957 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4958 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4959 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4961 case FIX_ROUND_EXPR:
4962 case FIX_FLOOR_EXPR:
4964 abort (); /* Not used for C. */
4966 case FIX_TRUNC_EXPR:
4967 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4969 target = gen_reg_rtx (mode);
4970 expand_fix (target, op0, unsignedp);
4974 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4976 target = gen_reg_rtx (mode);
4977 /* expand_float can't figure out what to do if FROM has VOIDmode.
4978 So give it the correct mode. With -O, cse will optimize this. */
4979 if (GET_MODE (op0) == VOIDmode)
4980 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4982 expand_float (target, op0,
4983 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4987 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4988 temp = expand_unop (mode, neg_optab, op0, target, 0);
4994 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4996 /* Handle complex values specially. */
4998 enum machine_mode opmode
4999 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5001 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5002 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5003 return expand_complex_abs (opmode, op0, target, unsignedp);
5006 /* Unsigned abs is simply the operand. Testing here means we don't
5007 risk generating incorrect code below. */
5008 if (TREE_UNSIGNED (type))
5011 /* First try to do it with a special abs instruction. */
5012 temp = expand_unop (mode, abs_optab, op0, target, 0);
5016 /* If this machine has expensive jumps, we can do integer absolute
5017 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5018 where W is the width of MODE. */
5020 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5022 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5023 size_int (GET_MODE_BITSIZE (mode) - 1),
5026 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5029 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5036 /* If that does not win, use conditional jump and negate. */
5037 target = original_target;
5038 temp = gen_label_rtx ();
5039 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5040 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5041 || (GET_CODE (target) == REG
5042 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5043 target = gen_reg_rtx (mode);
5044 emit_move_insn (target, op0);
5045 emit_cmp_insn (target,
5046 expand_expr (convert (type, integer_zero_node),
5047 NULL_RTX, VOIDmode, 0),
5048 GE, NULL_RTX, mode, 0, 0);
5050 emit_jump_insn (gen_bge (temp));
5051 op0 = expand_unop (mode, neg_optab, target, target, 0);
5053 emit_move_insn (target, op0);
5060 target = original_target;
5061 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5062 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5063 || (GET_CODE (target) == REG
5064 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5065 target = gen_reg_rtx (mode);
5066 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5067 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5069 /* First try to do it with a special MIN or MAX instruction.
5070 If that does not win, use a conditional jump to select the proper
5072 this_optab = (TREE_UNSIGNED (type)
5073 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5074 : (code == MIN_EXPR ? smin_optab : smax_optab));
5076 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5082 emit_move_insn (target, op0);
5083 op0 = gen_label_rtx ();
5084 /* If this mode is an integer too wide to compare properly,
5085 compare word by word. Rely on cse to optimize constant cases. */
5086 if (GET_MODE_CLASS (mode) == MODE_INT
5087 && !can_compare_p (mode))
5089 if (code == MAX_EXPR)
5090 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
5092 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5093 emit_move_insn (target, op1);
5097 if (code == MAX_EXPR)
5098 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5099 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5100 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5102 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5103 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5104 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5105 if (temp == const0_rtx)
5106 emit_move_insn (target, op1);
5107 else if (temp != const_true_rtx)
5109 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5110 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5113 emit_move_insn (target, op1);
5119 /* ??? Can optimize when the operand of this is a bitwise operation,
5120 by using a different bitwise operation. */
5122 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5123 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5129 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5130 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5135 /* ??? Can optimize bitwise operations with one arg constant.
5136 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5137 and (a bitwise1 b) bitwise2 b (etc)
5138 but that is probably not worth while. */
5140 /* BIT_AND_EXPR is for bitwise anding.
5141 TRUTH_AND_EXPR is for anding two boolean values
5142 when we want in all cases to compute both of them.
5143 In general it is fastest to do TRUTH_AND_EXPR by
5144 computing both operands as actual zero-or-1 values
5145 and then bitwise anding. In cases where there cannot
5146 be any side effects, better code would be made by
5147 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5148 but the question is how to recognize those cases. */
5150 case TRUTH_AND_EXPR:
5152 this_optab = and_optab;
5155 /* See comment above about TRUTH_AND_EXPR; it applies here too. */
5158 this_optab = ior_optab;
5161 case TRUTH_XOR_EXPR:
5163 this_optab = xor_optab;
5170 preexpand_calls (exp);
5171 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5174 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5177 /* Could determine the answer when only additive constants differ.
5178 Also, the addition of one can be handled by changing the condition. */
5185 preexpand_calls (exp);
5186 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5189 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5190 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5192 && GET_CODE (original_target) == REG
5193 && (GET_MODE (original_target)
5194 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5196 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5197 if (temp != original_target)
5198 temp = copy_to_reg (temp);
5199 op1 = gen_label_rtx ();
5200 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5201 GET_MODE (temp), unsignedp, 0);
5202 emit_jump_insn (gen_beq (op1));
5203 emit_move_insn (temp, const1_rtx);
5207 /* If no set-flag instruction, must generate a conditional
5208 store into a temporary variable. Drop through
5209 and handle this like && and ||. */
5211 case TRUTH_ANDIF_EXPR:
5212 case TRUTH_ORIF_EXPR:
5214 && (target == 0 || ! safe_from_p (target, exp)
5215 /* Make sure we don't have a hard reg (such as function's return
5216 value) live across basic blocks, if not optimizing. */
5217 || (!optimize && GET_CODE (target) == REG
5218 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5219 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5222 emit_clr_insn (target);
5224 op1 = gen_label_rtx ();
5225 jumpifnot (exp, op1);
5228 emit_0_to_1_insn (target);
5231 return ignore ? const0_rtx : target;
5233 case TRUTH_NOT_EXPR:
5234 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5235 /* The parser is careful to generate TRUTH_NOT_EXPR
5236 only with operands that are always zero or one. */
5237 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5238 target, 1, OPTAB_LIB_WIDEN);
5244 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5246 return expand_expr (TREE_OPERAND (exp, 1),
5247 (ignore ? const0_rtx : target),
5252 /* Note that COND_EXPRs whose type is a structure or union
5253 are required to be constructed to contain assignments of
5254 a temporary variable, so that we can evaluate them here
5255 for side effect only. If type is void, we must do likewise. */
5257 /* If an arm of the branch requires a cleanup,
5258 only that cleanup is performed. */
5261 tree binary_op = 0, unary_op = 0;
5262 tree old_cleanups = cleanups_this_call;
5263 cleanups_this_call = 0;
5265 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5266 convert it to our mode, if necessary. */
5267 if (integer_onep (TREE_OPERAND (exp, 1))
5268 && integer_zerop (TREE_OPERAND (exp, 2))
5269 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5278 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5279 if (GET_MODE (op0) == mode)
5282 target = gen_reg_rtx (mode);
5283 convert_move (target, op0, unsignedp);
5287 /* If we are not to produce a result, we have no target. Otherwise,
5288 if a target was specified use it; it will not be used as an
5289 intermediate target unless it is safe. If no target, use a
5294 else if (original_target
5295 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5296 temp = original_target;
5297 else if (mode == BLKmode)
5299 if (TYPE_SIZE (type) == 0
5300 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5303 temp = assign_stack_temp (BLKmode,
5304 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5305 + BITS_PER_UNIT - 1)
5306 / BITS_PER_UNIT, 0);
5307 MEM_IN_STRUCT_P (temp)
5308 = (TREE_CODE (type) == RECORD_TYPE
5309 || TREE_CODE (type) == UNION_TYPE
5310 || TREE_CODE (type) == QUAL_UNION_TYPE
5311 || TREE_CODE (type) == ARRAY_TYPE);
5314 temp = gen_reg_rtx (mode);
5316 /* Check for X ? A + B : A. If we have this, we can copy
5317 A to the output and conditionally add B. Similarly for unary
5318 operations. Don't do this if X has side-effects because
5319 those side effects might affect A or B and the "?" operation is
5320 a sequence point in ANSI. (We test for side effects later.) */
5322 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5323 && operand_equal_p (TREE_OPERAND (exp, 2),
5324 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5325 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5326 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5327 && operand_equal_p (TREE_OPERAND (exp, 1),
5328 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5329 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5330 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5331 && operand_equal_p (TREE_OPERAND (exp, 2),
5332 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5333 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5334 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5335 && operand_equal_p (TREE_OPERAND (exp, 1),
5336 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5337 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5339 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5340 operation, do this as A + (X != 0). Similarly for other simple
5341 binary operators. */
5342 if (temp && singleton && binary_op
5343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5344 && (TREE_CODE (binary_op) == PLUS_EXPR
5345 || TREE_CODE (binary_op) == MINUS_EXPR
5346 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5347 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5348 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5349 && integer_onep (TREE_OPERAND (binary_op, 1))
5350 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5353 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5354 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5355 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5356 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5359 /* If we had X ? A : A + 1, do this as A + (X == 0).
5361 We have to invert the truth value here and then put it
5362 back later if do_store_flag fails. We cannot simply copy
5363 TREE_OPERAND (exp, 0) to another variable and modify that
5364 because invert_truthvalue can modify the tree pointed to
5366 if (singleton == TREE_OPERAND (exp, 1))
5367 TREE_OPERAND (exp, 0)
5368 = invert_truthvalue (TREE_OPERAND (exp, 0));
5370 result = do_store_flag (TREE_OPERAND (exp, 0),
5371 (safe_from_p (temp, singleton)
5373 mode, BRANCH_COST <= 1);
5377 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5378 return expand_binop (mode, boptab, op1, result, temp,
5379 unsignedp, OPTAB_LIB_WIDEN);
5381 else if (singleton == TREE_OPERAND (exp, 1))
5382 TREE_OPERAND (exp, 0)
5383 = invert_truthvalue (TREE_OPERAND (exp, 0));
5387 op0 = gen_label_rtx ();
5389 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5393 /* If the target conflicts with the other operand of the
5394 binary op, we can't use it. Also, we can't use the target
5395 if it is a hard register, because evaluating the condition
5396 might clobber it. */
5398 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5399 || (GET_CODE (temp) == REG
5400 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5401 temp = gen_reg_rtx (mode);
5402 store_expr (singleton, temp, 0);
5405 expand_expr (singleton,
5406 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5407 if (cleanups_this_call)
5409 sorry ("aggregate value in COND_EXPR");
5410 cleanups_this_call = 0;
5412 if (singleton == TREE_OPERAND (exp, 1))
5413 jumpif (TREE_OPERAND (exp, 0), op0);
5415 jumpifnot (TREE_OPERAND (exp, 0), op0);
5417 if (binary_op && temp == 0)
5418 /* Just touch the other operand. */
5419 expand_expr (TREE_OPERAND (binary_op, 1),
5420 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5422 store_expr (build (TREE_CODE (binary_op), type,
5423 make_tree (type, temp),
5424 TREE_OPERAND (binary_op, 1)),
5427 store_expr (build1 (TREE_CODE (unary_op), type,
5428 make_tree (type, temp)),
5433 /* This is now done in jump.c and is better done there because it
5434 produces shorter register lifetimes. */
5436 /* Check for both possibilities either constants or variables
5437 in registers (but not the same as the target!). If so, can
5438 save branches by assigning one, branching, and assigning the
5440 else if (temp && GET_MODE (temp) != BLKmode
5441 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5442 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5443 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5444 && DECL_RTL (TREE_OPERAND (exp, 1))
5445 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5446 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5447 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5448 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5449 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5450 && DECL_RTL (TREE_OPERAND (exp, 2))
5451 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5452 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5454 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5455 temp = gen_reg_rtx (mode);
5456 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5457 jumpifnot (TREE_OPERAND (exp, 0), op0);
5458 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5462 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5463 comparison operator. If we have one of these cases, set the
5464 output to A, branch on A (cse will merge these two references),
5465 then set the output to FOO. */
5467 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5468 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5469 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5470 TREE_OPERAND (exp, 1), 0)
5471 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5472 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5474 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5475 temp = gen_reg_rtx (mode);
5476 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5477 jumpif (TREE_OPERAND (exp, 0), op0);
5478 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5482 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5483 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5484 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5485 TREE_OPERAND (exp, 2), 0)
5486 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5487 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5489 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5490 temp = gen_reg_rtx (mode);
5491 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5492 jumpifnot (TREE_OPERAND (exp, 0), op0);
5493 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5498 op1 = gen_label_rtx ();
5499 jumpifnot (TREE_OPERAND (exp, 0), op0);
5501 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5503 expand_expr (TREE_OPERAND (exp, 1),
5504 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5505 if (cleanups_this_call)
5507 sorry ("aggregate value in COND_EXPR");
5508 cleanups_this_call = 0;
5512 emit_jump_insn (gen_jump (op1));
5516 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5518 expand_expr (TREE_OPERAND (exp, 2),
5519 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5522 if (cleanups_this_call)
5524 sorry ("aggregate value in COND_EXPR");
5525 cleanups_this_call = 0;
5531 cleanups_this_call = old_cleanups;
5537 /* Something needs to be initialized, but we didn't know
5538 where that thing was when building the tree. For example,
5539 it could be the return value of a function, or a parameter
5540 to a function which lays down in the stack, or a temporary
5541 variable which must be passed by reference.
5543 We guarantee that the expression will either be constructed
5544 or copied into our original target. */
5546 tree slot = TREE_OPERAND (exp, 0);
5549 if (TREE_CODE (slot) != VAR_DECL)
5554 if (DECL_RTL (slot) != 0)
5556 target = DECL_RTL (slot);
5557 /* If we have already expanded the slot, so don't do
5559 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5564 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5565 /* All temp slots at this level must not conflict. */
5566 preserve_temp_slots (target);
5567 DECL_RTL (slot) = target;
5571 /* I bet this needs to be done, and I bet that it needs to
5572 be above, inside the else clause. The reason is
5573 simple, how else is it going to get cleaned up? (mrs)
5575 The reason is probably did not work before, and was
5576 commented out is because this was re-expanding already
5577 expanded target_exprs (target == 0 and DECL_RTL (slot)
5578 != 0) also cleaning them up many times as well. :-( */
5580 /* Since SLOT is not known to the called function
5581 to belong to its stack frame, we must build an explicit
5582 cleanup. This case occurs when we must build up a reference
5583 to pass the reference as an argument. In this case,
5584 it is very likely that such a reference need not be
5587 if (TREE_OPERAND (exp, 2) == 0)
5588 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5589 if (TREE_OPERAND (exp, 2))
5590 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5591 cleanups_this_call);
5596 /* This case does occur, when expanding a parameter which
5597 needs to be constructed on the stack. The target
5598 is the actual stack address that we want to initialize.
5599 The function we call will perform the cleanup in this case. */
5601 /* If we have already assigned it space, use that space,
5602 not target that we were passed in, as our target
5603 parameter is only a hint. */
5604 if (DECL_RTL (slot) != 0)
5606 target = DECL_RTL (slot);
5607 /* If we have already expanded the slot, so don't do
5609 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5613 DECL_RTL (slot) = target;
5616 exp1 = TREE_OPERAND (exp, 1);
5617 /* Mark it as expanded. */
5618 TREE_OPERAND (exp, 1) = NULL_TREE;
5620 return expand_expr (exp1, target, tmode, modifier);
5625 tree lhs = TREE_OPERAND (exp, 0);
5626 tree rhs = TREE_OPERAND (exp, 1);
5627 tree noncopied_parts = 0;
5628 tree lhs_type = TREE_TYPE (lhs);
5630 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5631 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5632 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5633 TYPE_NONCOPIED_PARTS (lhs_type));
5634 while (noncopied_parts != 0)
5636 expand_assignment (TREE_VALUE (noncopied_parts),
5637 TREE_PURPOSE (noncopied_parts), 0, 0);
5638 noncopied_parts = TREE_CHAIN (noncopied_parts);
5645 /* If lhs is complex, expand calls in rhs before computing it.
5646 That's so we don't compute a pointer and save it over a call.
5647 If lhs is simple, compute it first so we can give it as a
5648 target if the rhs is just a call. This avoids an extra temp and copy
5649 and that prevents a partial-subsumption which makes bad code.
5650 Actually we could treat component_ref's of vars like vars. */
5652 tree lhs = TREE_OPERAND (exp, 0);
5653 tree rhs = TREE_OPERAND (exp, 1);
5654 tree noncopied_parts = 0;
5655 tree lhs_type = TREE_TYPE (lhs);
5659 if (TREE_CODE (lhs) != VAR_DECL
5660 && TREE_CODE (lhs) != RESULT_DECL
5661 && TREE_CODE (lhs) != PARM_DECL)
5662 preexpand_calls (exp);
5664 /* Check for |= or &= of a bitfield of size one into another bitfield
5665 of size 1. In this case, (unless we need the result of the
5666 assignment) we can do this more efficiently with a
5667 test followed by an assignment, if necessary.
5669 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5670 things change so we do, this code should be enhanced to
5673 && TREE_CODE (lhs) == COMPONENT_REF
5674 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5675 || TREE_CODE (rhs) == BIT_AND_EXPR)
5676 && TREE_OPERAND (rhs, 0) == lhs
5677 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5678 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5679 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5681 rtx label = gen_label_rtx ();
5683 do_jump (TREE_OPERAND (rhs, 1),
5684 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5685 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5686 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5687 (TREE_CODE (rhs) == BIT_IOR_EXPR
5689 : integer_zero_node)),
5691 do_pending_stack_adjust ();
5696 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5697 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5698 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5699 TYPE_NONCOPIED_PARTS (lhs_type));
5701 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5702 while (noncopied_parts != 0)
5704 expand_assignment (TREE_PURPOSE (noncopied_parts),
5705 TREE_VALUE (noncopied_parts), 0, 0);
5706 noncopied_parts = TREE_CHAIN (noncopied_parts);
5711 case PREINCREMENT_EXPR:
5712 case PREDECREMENT_EXPR:
5713 return expand_increment (exp, 0);
5715 case POSTINCREMENT_EXPR:
5716 case POSTDECREMENT_EXPR:
5717 /* Faster to treat as pre-increment if result is not used. */
5718 return expand_increment (exp, ! ignore);
5721 /* Are we taking the address of a nested function? */
5722 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5723 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5725 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5726 op0 = force_operand (op0, target);
5730 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5731 (modifier == EXPAND_INITIALIZER
5732 ? modifier : EXPAND_CONST_ADDRESS));
5734 /* We would like the object in memory. If it is a constant,
5735 we can have it be statically allocated into memory. For
5736 a non-constant (REG or SUBREG), we need to allocate some
5737 memory and store the value into it. */
5739 if (CONSTANT_P (op0))
5740 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5743 /* These cases happen in Fortran. Is that legitimate?
5744 Should Fortran work in another way?
5745 Do they happen in C? */
5746 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5747 || GET_CODE (op0) == CONCAT)
5749 /* If this object is in a register, it must be not
5751 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5752 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5754 = assign_stack_temp (inner_mode,
5755 int_size_in_bytes (inner_type), 1);
5757 emit_move_insn (memloc, op0);
5761 if (GET_CODE (op0) != MEM)
5764 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5765 return XEXP (op0, 0);
5766 op0 = force_operand (XEXP (op0, 0), target);
5768 if (flag_force_addr && GET_CODE (op0) != REG)
5769 return force_reg (Pmode, op0);
5772 case ENTRY_VALUE_EXPR:
5775 /* COMPLEX type for Extended Pascal & Fortran */
5778 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5782 /* Get the rtx code of the operands. */
5783 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5784 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5787 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5789 prev = get_last_insn ();
5791 /* Tell flow that the whole of the destination is being set. */
5792 if (GET_CODE (target) == REG)
5793 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5795 /* Move the real (op0) and imaginary (op1) parts to their location. */
5796 emit_move_insn (gen_realpart (mode, target), op0);
5797 emit_move_insn (gen_imagpart (mode, target), op1);
5799 /* Complex construction should appear as a single unit. */
5800 if (GET_CODE (target) != CONCAT)
5801 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5802 each with a separate pseudo as destination.
5803 It's not correct for flow to treat them as a unit. */
5810 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5811 return gen_realpart (mode, op0);
5814 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5815 return gen_imagpart (mode, op0);
5819 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5823 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5826 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5828 prev = get_last_insn ();
5830 /* Tell flow that the whole of the destination is being set. */
5831 if (GET_CODE (target) == REG)
5832 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5834 /* Store the realpart and the negated imagpart to target. */
5835 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5837 imag_t = gen_imagpart (mode, target);
5838 temp = expand_unop (mode, neg_optab,
5839 gen_imagpart (mode, op0), imag_t, 0);
5841 emit_move_insn (imag_t, temp);
5843 /* Conjugate should appear as a single unit */
5844 if (GET_CODE (target) != CONCAT)
5845 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5846 each with a separate pseudo as destination.
5847 It's not correct for flow to treat them as a unit. */
5854 op0 = CONST0_RTX (tmode);
5860 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
5863 /* Here to do an ordinary binary operator, generating an instruction
5864 from the optab already placed in `this_optab'. */
5866 preexpand_calls (exp);
5867 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5869 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5870 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5872 temp = expand_binop (mode, this_optab, op0, op1, target,
5873 unsignedp, OPTAB_LIB_WIDEN);
5880 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5882 bc_expand_expr (exp)
5885 enum tree_code code;
5888 struct binary_operator *binoptab;
5889 struct unary_operator *unoptab;
5890 struct increment_operator *incroptab;
5891 struct bc_label *lab, *lab1;
5892 enum bytecode_opcode opcode;
5895 code = TREE_CODE (exp);
5901 if (DECL_RTL (exp) == 0)
5903 error_with_decl (exp, "prior parameter's size depends on `%s'");
5907 bc_load_parmaddr (DECL_RTL (exp));
5908 bc_load_memory (TREE_TYPE (exp), exp);
5914 if (DECL_RTL (exp) == 0)
5918 if (BYTECODE_LABEL (DECL_RTL (exp)))
5919 bc_load_externaddr (DECL_RTL (exp));
5921 bc_load_localaddr (DECL_RTL (exp));
5923 if (TREE_PUBLIC (exp))
5924 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5925 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5927 bc_load_localaddr (DECL_RTL (exp));
5929 bc_load_memory (TREE_TYPE (exp), exp);
5934 #ifdef DEBUG_PRINT_CODE
5935 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5937 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
5939 : TYPE_MODE (TREE_TYPE (exp)))],
5940 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5946 #ifdef DEBUG_PRINT_CODE
5947 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5949 /* FIX THIS: find a better way to pass real_cst's. -bson */
5950 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
5951 (double) TREE_REAL_CST (exp));
5960 /* We build a call description vector describing the type of
5961 the return value and of the arguments; this call vector,
5962 together with a pointer to a location for the return value
5963 and the base of the argument list, is passed to the low
5964 level machine dependent call subroutine, which is responsible
5965 for putting the arguments wherever real functions expect
5966 them, as well as getting the return value back. */
5968 tree calldesc = 0, arg;
5972 /* Push the evaluated args on the evaluation stack in reverse
5973 order. Also make an entry for each arg in the calldesc
5974 vector while we're at it. */
5976 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5978 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
5981 bc_expand_expr (TREE_VALUE (arg));
5983 calldesc = tree_cons ((tree) 0,
5984 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
5986 calldesc = tree_cons ((tree) 0,
5987 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
5991 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
5993 /* Allocate a location for the return value and push its
5994 address on the evaluation stack. Also make an entry
5995 at the front of the calldesc for the return value type. */
5997 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
5998 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
5999 bc_load_localaddr (retval);
6001 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6002 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6004 /* Prepend the argument count. */
6005 calldesc = tree_cons ((tree) 0,
6006 build_int_2 (nargs, 0),
6009 /* Push the address of the call description vector on the stack. */
6010 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6011 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6012 build_index_type (build_int_2 (nargs * 2, 0)));
6013 r = output_constant_def (calldesc);
6014 bc_load_externaddr (r);
6016 /* Push the address of the function to be called. */
6017 bc_expand_expr (TREE_OPERAND (exp, 0));
6019 /* Call the function, popping its address and the calldesc vector
6020 address off the evaluation stack in the process. */
6021 bc_emit_instruction (call);
6023 /* Pop the arguments off the stack. */
6024 bc_adjust_stack (nargs);
6026 /* Load the return value onto the stack. */
6027 bc_load_localaddr (retval);
6028 bc_load_memory (type, TREE_OPERAND (exp, 0));
6034 if (!SAVE_EXPR_RTL (exp))
6036 /* First time around: copy to local variable */
6037 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6038 TYPE_ALIGN (TREE_TYPE(exp)));
6039 bc_expand_expr (TREE_OPERAND (exp, 0));
6040 bc_emit_instruction (duplicate);
6042 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6043 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6047 /* Consecutive reference: use saved copy */
6048 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6049 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6054 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6055 how are they handled instead? */
6058 TREE_USED (exp) = 1;
6059 bc_expand_expr (STMT_BODY (exp));
6066 bc_expand_expr (TREE_OPERAND (exp, 0));
6067 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6072 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6077 bc_expand_address (TREE_OPERAND (exp, 0));
6082 bc_expand_expr (TREE_OPERAND (exp, 0));
6083 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6088 bc_expand_expr (bc_canonicalize_array_ref (exp));
6093 bc_expand_component_address (exp);
6095 /* If we have a bitfield, generate a proper load */
6096 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6101 bc_expand_expr (TREE_OPERAND (exp, 0));
6102 bc_emit_instruction (drop);
6103 bc_expand_expr (TREE_OPERAND (exp, 1));
6108 bc_expand_expr (TREE_OPERAND (exp, 0));
6109 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6110 lab = bc_get_bytecode_label ();
6111 bc_emit_bytecode (xjumpifnot);
6112 bc_emit_bytecode_labelref (lab);
6114 #ifdef DEBUG_PRINT_CODE
6115 fputc ('\n', stderr);
6117 bc_expand_expr (TREE_OPERAND (exp, 1));
6118 lab1 = bc_get_bytecode_label ();
6119 bc_emit_bytecode (jump);
6120 bc_emit_bytecode_labelref (lab1);
6122 #ifdef DEBUG_PRINT_CODE
6123 fputc ('\n', stderr);
6126 bc_emit_bytecode_labeldef (lab);
6127 bc_expand_expr (TREE_OPERAND (exp, 2));
6128 bc_emit_bytecode_labeldef (lab1);
6131 case TRUTH_ANDIF_EXPR:
6133 opcode = xjumpifnot;
6136 case TRUTH_ORIF_EXPR:
6143 binoptab = optab_plus_expr;
6148 binoptab = optab_minus_expr;
6153 binoptab = optab_mult_expr;
6156 case TRUNC_DIV_EXPR:
6157 case FLOOR_DIV_EXPR:
6159 case ROUND_DIV_EXPR:
6160 case EXACT_DIV_EXPR:
6162 binoptab = optab_trunc_div_expr;
6165 case TRUNC_MOD_EXPR:
6166 case FLOOR_MOD_EXPR:
6168 case ROUND_MOD_EXPR:
6170 binoptab = optab_trunc_mod_expr;
6173 case FIX_ROUND_EXPR:
6174 case FIX_FLOOR_EXPR:
6176 abort (); /* Not used for C. */
6178 case FIX_TRUNC_EXPR:
6185 abort (); /* FIXME */
6189 binoptab = optab_rdiv_expr;
6194 binoptab = optab_bit_and_expr;
6199 binoptab = optab_bit_ior_expr;
6204 binoptab = optab_bit_xor_expr;
6209 binoptab = optab_lshift_expr;
6214 binoptab = optab_rshift_expr;
6217 case TRUTH_AND_EXPR:
6219 binoptab = optab_truth_and_expr;
6224 binoptab = optab_truth_or_expr;
6229 binoptab = optab_lt_expr;
6234 binoptab = optab_le_expr;
6239 binoptab = optab_ge_expr;
6244 binoptab = optab_gt_expr;
6249 binoptab = optab_eq_expr;
6254 binoptab = optab_ne_expr;
6259 unoptab = optab_negate_expr;
6264 unoptab = optab_bit_not_expr;
6267 case TRUTH_NOT_EXPR:
6269 unoptab = optab_truth_not_expr;
6272 case PREDECREMENT_EXPR:
6274 incroptab = optab_predecrement_expr;
6277 case PREINCREMENT_EXPR:
6279 incroptab = optab_preincrement_expr;
6282 case POSTDECREMENT_EXPR:
6284 incroptab = optab_postdecrement_expr;
6287 case POSTINCREMENT_EXPR:
6289 incroptab = optab_postincrement_expr;
6294 bc_expand_constructor (exp);
6304 tree vars = TREE_OPERAND (exp, 0);
6305 int vars_need_expansion = 0;
6307 /* Need to open a binding contour here because
6308 if there are any cleanups they most be contained here. */
6309 expand_start_bindings (0);
6311 /* Mark the corresponding BLOCK for output. */
6312 if (TREE_OPERAND (exp, 2) != 0)
6313 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6315 /* If VARS have not yet been expanded, expand them now. */
6318 if (DECL_RTL (vars) == 0)
6320 vars_need_expansion = 1;
6321 bc_expand_decl (vars, 0);
6323 bc_expand_decl_init (vars);
6324 vars = TREE_CHAIN (vars);
6327 bc_expand_expr (TREE_OPERAND (exp, 1));
6329 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6339 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6340 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6346 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6352 bc_expand_expr (TREE_OPERAND (exp, 0));
6353 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6354 lab = bc_get_bytecode_label ();
6356 bc_emit_instruction (duplicate);
6357 bc_emit_bytecode (opcode);
6358 bc_emit_bytecode_labelref (lab);
6360 #ifdef DEBUG_PRINT_CODE
6361 fputc ('\n', stderr);
6364 bc_emit_instruction (drop);
6366 bc_expand_expr (TREE_OPERAND (exp, 1));
6367 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6368 bc_emit_bytecode_labeldef (lab);
6374 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6376 /* Push the quantum. */
6377 bc_expand_expr (TREE_OPERAND (exp, 1));
6379 /* Convert it to the lvalue's type. */
6380 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6382 /* Push the address of the lvalue */
6383 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6385 /* Perform actual increment */
6386 bc_expand_increment (incroptab, type);
6390 /* Return the alignment in bits of EXP, a pointer valued expression.
6391 But don't return more than MAX_ALIGN no matter what.
6392 The alignment returned is, by default, the alignment of the thing that
6393 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6395 Otherwise, look at the expression to see if we can do better, i.e., if the
6396 expression is actually pointing at an object whose alignment is tighter. */
6399 get_pointer_alignment (exp, max_align)
6403 unsigned align, inner;
6405 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6408 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6409 align = MIN (align, max_align);
6413 switch (TREE_CODE (exp))
6417 case NON_LVALUE_EXPR:
6418 exp = TREE_OPERAND (exp, 0);
6419 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6421 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6422 inner = MIN (inner, max_align);
6423 align = MAX (align, inner);
6427 /* If sum of pointer + int, restrict our maximum alignment to that
6428 imposed by the integer. If not, we can't do any better than
6430 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6433 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6438 exp = TREE_OPERAND (exp, 0);
6442 /* See what we are pointing at and look at its alignment. */
6443 exp = TREE_OPERAND (exp, 0);
6444 if (TREE_CODE (exp) == FUNCTION_DECL)
6445 align = MAX (align, FUNCTION_BOUNDARY);
6446 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6447 align = MAX (align, DECL_ALIGN (exp));
6448 #ifdef CONSTANT_ALIGNMENT
6449 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6450 align = CONSTANT_ALIGNMENT (exp, align);
6452 return MIN (align, max_align);
6460 /* Return the tree node and offset if a given argument corresponds to
6461 a string constant. */
6464 string_constant (arg, ptr_offset)
6470 if (TREE_CODE (arg) == ADDR_EXPR
6471 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6473 *ptr_offset = integer_zero_node;
6474 return TREE_OPERAND (arg, 0);
6476 else if (TREE_CODE (arg) == PLUS_EXPR)
6478 tree arg0 = TREE_OPERAND (arg, 0);
6479 tree arg1 = TREE_OPERAND (arg, 1);
6484 if (TREE_CODE (arg0) == ADDR_EXPR
6485 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6488 return TREE_OPERAND (arg0, 0);
6490 else if (TREE_CODE (arg1) == ADDR_EXPR
6491 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6494 return TREE_OPERAND (arg1, 0);
6501 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6502 way, because it could contain a zero byte in the middle.
6503 TREE_STRING_LENGTH is the size of the character array, not the string.
6505 Unfortunately, string_constant can't access the values of const char
6506 arrays with initializers, so neither can we do so here. */
6516 src = string_constant (src, &offset_node);
6519 max = TREE_STRING_LENGTH (src);
6520 ptr = TREE_STRING_POINTER (src);
6521 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6523 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6524 compute the offset to the following null if we don't know where to
6525 start searching for it. */
6527 for (i = 0; i < max; i++)
6530 /* We don't know the starting offset, but we do know that the string
6531 has no internal zero bytes. We can assume that the offset falls
6532 within the bounds of the string; otherwise, the programmer deserves
6533 what he gets. Subtract the offset from the length of the string,
6535 /* This would perhaps not be valid if we were dealing with named
6536 arrays in addition to literal string constants. */
6537 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6540 /* We have a known offset into the string. Start searching there for
6541 a null character. */
6542 if (offset_node == 0)
6546 /* Did we get a long long offset? If so, punt. */
6547 if (TREE_INT_CST_HIGH (offset_node) != 0)
6549 offset = TREE_INT_CST_LOW (offset_node);
6551 /* If the offset is known to be out of bounds, warn, and call strlen at
6553 if (offset < 0 || offset > max)
6555 warning ("offset outside bounds of constant string");
6558 /* Use strlen to search for the first zero byte. Since any strings
6559 constructed with build_string will have nulls appended, we win even
6560 if we get handed something like (char[4])"abcd".
6562 Since OFFSET is our starting index into the string, no further
6563 calculation is needed. */
6564 return size_int (strlen (ptr + offset));
6567 /* Expand an expression EXP that calls a built-in function,
6568 with result going to TARGET if that's convenient
6569 (and in mode MODE if that's convenient).
6570 SUBTARGET may be used as the target for computing one of EXP's operands.
6571 IGNORE is nonzero if the value is to be ignored. */
6574 expand_builtin (exp, target, subtarget, mode, ignore)
6578 enum machine_mode mode;
6581 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6582 tree arglist = TREE_OPERAND (exp, 1);
6585 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6586 optab builtin_optab;
6588 switch (DECL_FUNCTION_CODE (fndecl))
6593 /* build_function_call changes these into ABS_EXPR. */
6598 case BUILT_IN_FSQRT:
6599 /* If not optimizing, call the library function. */
6604 /* Arg could be wrong type if user redeclared this fcn wrong. */
6605 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6606 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
6608 /* Stabilize and compute the argument. */
6609 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6610 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6612 exp = copy_node (exp);
6613 arglist = copy_node (arglist);
6614 TREE_OPERAND (exp, 1) = arglist;
6615 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6617 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6619 /* Make a suitable register to place result in. */
6620 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6625 switch (DECL_FUNCTION_CODE (fndecl))
6628 builtin_optab = sin_optab; break;
6630 builtin_optab = cos_optab; break;
6631 case BUILT_IN_FSQRT:
6632 builtin_optab = sqrt_optab; break;
6637 /* Compute into TARGET.
6638 Set TARGET to wherever the result comes back. */
6639 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6640 builtin_optab, op0, target, 0);
6642 /* If we were unable to expand via the builtin, stop the
6643 sequence (without outputting the insns) and break, causing
6644 a call the the library function. */
6651 /* Check the results by default. But if flag_fast_math is turned on,
6652 then assume sqrt will always be called with valid arguments. */
6654 if (! flag_fast_math)
6656 /* Don't define the builtin FP instructions
6657 if your machine is not IEEE. */
6658 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6661 lab1 = gen_label_rtx ();
6663 /* Test the result; if it is NaN, set errno=EDOM because
6664 the argument was not in the domain. */
6665 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6666 emit_jump_insn (gen_beq (lab1));
6670 #ifdef GEN_ERRNO_RTX
6671 rtx errno_rtx = GEN_ERRNO_RTX;
6674 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6677 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6680 /* We can't set errno=EDOM directly; let the library call do it.
6681 Pop the arguments right away in case the call gets deleted. */
6683 expand_call (exp, target, 0);
6690 /* Output the entire sequence. */
6691 insns = get_insns ();
6697 /* __builtin_apply_args returns block of memory allocated on
6698 the stack into which is stored the arg pointer, structure
6699 value address, static chain, and all the registers that might
6700 possibly be used in performing a function call. The code is
6701 moved to the start of the function so the incoming values are
6703 case BUILT_IN_APPLY_ARGS:
6704 /* Don't do __builtin_apply_args more than once in a function.
6705 Save the result of the first call and reuse it. */
6706 if (apply_args_value != 0)
6707 return apply_args_value;
6709 /* When this function is called, it means that registers must be
6710 saved on entry to this function. So we migrate the
6711 call to the first insn of this function. */
6716 temp = expand_builtin_apply_args ();
6720 apply_args_value = temp;
6722 /* Put the sequence after the NOTE that starts the function.
6723 If this is inside a SEQUENCE, make the outer-level insn
6724 chain current, so the code is placed at the start of the
6726 push_topmost_sequence ();
6727 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6728 pop_topmost_sequence ();
6732 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6733 FUNCTION with a copy of the parameters described by
6734 ARGUMENTS, and ARGSIZE. It returns a block of memory
6735 allocated on the stack into which is stored all the registers
6736 that might possibly be used for returning the result of a
6737 function. ARGUMENTS is the value returned by
6738 __builtin_apply_args. ARGSIZE is the number of bytes of
6739 arguments that must be copied. ??? How should this value be
6740 computed? We'll also need a safe worst case value for varargs
6742 case BUILT_IN_APPLY:
6744 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6745 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6746 || TREE_CHAIN (arglist) == 0
6747 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6748 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6749 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6757 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6758 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6760 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6763 /* __builtin_return (RESULT) causes the function to return the
6764 value described by RESULT. RESULT is address of the block of
6765 memory returned by __builtin_apply. */
6766 case BUILT_IN_RETURN:
6768 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6769 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6770 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6771 NULL_RTX, VOIDmode, 0));
6774 case BUILT_IN_SAVEREGS:
6775 /* Don't do __builtin_saveregs more than once in a function.
6776 Save the result of the first call and reuse it. */
6777 if (saveregs_value != 0)
6778 return saveregs_value;
6780 /* When this function is called, it means that registers must be
6781 saved on entry to this function. So we migrate the
6782 call to the first insn of this function. */
6785 rtx valreg, saved_valreg;
6787 /* Now really call the function. `expand_call' does not call
6788 expand_builtin, so there is no danger of infinite recursion here. */
6791 #ifdef EXPAND_BUILTIN_SAVEREGS
6792 /* Do whatever the machine needs done in this case. */
6793 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6795 /* The register where the function returns its value
6796 is likely to have something else in it, such as an argument.
6797 So preserve that register around the call. */
6798 if (value_mode != VOIDmode)
6800 valreg = hard_libcall_value (value_mode);
6801 saved_valreg = gen_reg_rtx (value_mode);
6802 emit_move_insn (saved_valreg, valreg);
6805 /* Generate the call, putting the value in a pseudo. */
6806 temp = expand_call (exp, target, ignore);
6808 if (value_mode != VOIDmode)
6809 emit_move_insn (valreg, saved_valreg);
6815 saveregs_value = temp;
6817 /* Put the sequence after the NOTE that starts the function.
6818 If this is inside a SEQUENCE, make the outer-level insn
6819 chain current, so the code is placed at the start of the
6821 push_topmost_sequence ();
6822 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6823 pop_topmost_sequence ();
6827 /* __builtin_args_info (N) returns word N of the arg space info
6828 for the current function. The number and meanings of words
6829 is controlled by the definition of CUMULATIVE_ARGS. */
6830 case BUILT_IN_ARGS_INFO:
6832 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6834 int *word_ptr = (int *) ¤t_function_args_info;
6835 tree type, elts, result;
6837 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6838 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6839 __FILE__, __LINE__);
6843 tree arg = TREE_VALUE (arglist);
6844 if (TREE_CODE (arg) != INTEGER_CST)
6845 error ("argument of `__builtin_args_info' must be constant");
6848 int wordnum = TREE_INT_CST_LOW (arg);
6850 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6851 error ("argument of `__builtin_args_info' out of range");
6853 return GEN_INT (word_ptr[wordnum]);
6857 error ("missing argument in `__builtin_args_info'");
6862 for (i = 0; i < nwords; i++)
6863 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6865 type = build_array_type (integer_type_node,
6866 build_index_type (build_int_2 (nwords, 0)));
6867 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6868 TREE_CONSTANT (result) = 1;
6869 TREE_STATIC (result) = 1;
6870 result = build (INDIRECT_REF, build_pointer_type (type), result);
6871 TREE_CONSTANT (result) = 1;
6872 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6876 /* Return the address of the first anonymous stack arg. */
6877 case BUILT_IN_NEXT_ARG:
6879 tree fntype = TREE_TYPE (current_function_decl);
6880 if (!(TYPE_ARG_TYPES (fntype) != 0
6881 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6882 != void_type_node)))
6884 error ("`va_start' used in function with fixed args");
6889 return expand_binop (Pmode, add_optab,
6890 current_function_internal_arg_pointer,
6891 current_function_arg_offset_rtx,
6892 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6894 case BUILT_IN_CLASSIFY_TYPE:
6897 tree type = TREE_TYPE (TREE_VALUE (arglist));
6898 enum tree_code code = TREE_CODE (type);
6899 if (code == VOID_TYPE)
6900 return GEN_INT (void_type_class);
6901 if (code == INTEGER_TYPE)
6902 return GEN_INT (integer_type_class);
6903 if (code == CHAR_TYPE)
6904 return GEN_INT (char_type_class);
6905 if (code == ENUMERAL_TYPE)
6906 return GEN_INT (enumeral_type_class);
6907 if (code == BOOLEAN_TYPE)
6908 return GEN_INT (boolean_type_class);
6909 if (code == POINTER_TYPE)
6910 return GEN_INT (pointer_type_class);
6911 if (code == REFERENCE_TYPE)
6912 return GEN_INT (reference_type_class);
6913 if (code == OFFSET_TYPE)
6914 return GEN_INT (offset_type_class);
6915 if (code == REAL_TYPE)
6916 return GEN_INT (real_type_class);
6917 if (code == COMPLEX_TYPE)
6918 return GEN_INT (complex_type_class);
6919 if (code == FUNCTION_TYPE)
6920 return GEN_INT (function_type_class);
6921 if (code == METHOD_TYPE)
6922 return GEN_INT (method_type_class);
6923 if (code == RECORD_TYPE)
6924 return GEN_INT (record_type_class);
6925 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6926 return GEN_INT (union_type_class);
6927 if (code == ARRAY_TYPE)
6928 return GEN_INT (array_type_class);
6929 if (code == STRING_TYPE)
6930 return GEN_INT (string_type_class);
6931 if (code == SET_TYPE)
6932 return GEN_INT (set_type_class);
6933 if (code == FILE_TYPE)
6934 return GEN_INT (file_type_class);
6935 if (code == LANG_TYPE)
6936 return GEN_INT (lang_type_class);
6938 return GEN_INT (no_type_class);
6940 case BUILT_IN_CONSTANT_P:
6944 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
6945 ? const1_rtx : const0_rtx);
6947 case BUILT_IN_FRAME_ADDRESS:
6948 /* The argument must be a nonnegative integer constant.
6949 It counts the number of frames to scan up the stack.
6950 The value is the address of that frame. */
6951 case BUILT_IN_RETURN_ADDRESS:
6952 /* The argument must be a nonnegative integer constant.
6953 It counts the number of frames to scan up the stack.
6954 The value is the return address saved in that frame. */
6956 /* Warning about missing arg was already issued. */
6958 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6960 error ("invalid arg to `__builtin_return_address'");
6963 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6965 error ("invalid arg to `__builtin_return_address'");
6970 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6971 rtx tem = frame_pointer_rtx;
6974 /* Some machines need special handling before we can access arbitrary
6975 frames. For example, on the sparc, we must first flush all
6976 register windows to the stack. */
6977 #ifdef SETUP_FRAME_ADDRESSES
6978 SETUP_FRAME_ADDRESSES ();
6981 /* On the sparc, the return address is not in the frame, it is
6982 in a register. There is no way to access it off of the current
6983 frame pointer, but it can be accessed off the previous frame
6984 pointer by reading the value from the register window save
6986 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6987 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6991 /* Scan back COUNT frames to the specified frame. */
6992 for (i = 0; i < count; i++)
6994 /* Assume the dynamic chain pointer is in the word that
6995 the frame address points to, unless otherwise specified. */
6996 #ifdef DYNAMIC_CHAIN_ADDRESS
6997 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6999 tem = memory_address (Pmode, tem);
7000 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7003 /* For __builtin_frame_address, return what we've got. */
7004 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7007 /* For __builtin_return_address,
7008 Get the return address from that frame. */
7009 #ifdef RETURN_ADDR_RTX
7010 return RETURN_ADDR_RTX (count, tem);
7012 tem = memory_address (Pmode,
7013 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7014 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7018 case BUILT_IN_ALLOCA:
7020 /* Arg could be non-integer if user redeclared this fcn wrong. */
7021 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7023 current_function_calls_alloca = 1;
7024 /* Compute the argument. */
7025 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7027 /* Allocate the desired space. */
7028 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7030 /* Record the new stack level for nonlocal gotos. */
7031 if (nonlocal_goto_handler_slot != 0)
7032 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7036 /* If not optimizing, call the library function. */
7041 /* Arg could be non-integer if user redeclared this fcn wrong. */
7042 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7045 /* Compute the argument. */
7046 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7047 /* Compute ffs, into TARGET if possible.
7048 Set TARGET to wherever the result comes back. */
7049 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7050 ffs_optab, op0, target, 1);
7055 case BUILT_IN_STRLEN:
7056 /* If not optimizing, call the library function. */
7061 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7062 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7066 tree src = TREE_VALUE (arglist);
7067 tree len = c_strlen (src);
7070 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7072 rtx result, src_rtx, char_rtx;
7073 enum machine_mode insn_mode = value_mode, char_mode;
7074 enum insn_code icode;
7076 /* If the length is known, just return it. */
7078 return expand_expr (len, target, mode, 0);
7080 /* If SRC is not a pointer type, don't do this operation inline. */
7084 /* Call a function if we can't compute strlen in the right mode. */
7086 while (insn_mode != VOIDmode)
7088 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7089 if (icode != CODE_FOR_nothing)
7092 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7094 if (insn_mode == VOIDmode)
7097 /* Make a place to write the result of the instruction. */
7100 && GET_CODE (result) == REG
7101 && GET_MODE (result) == insn_mode
7102 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7103 result = gen_reg_rtx (insn_mode);
7105 /* Make sure the operands are acceptable to the predicates. */
7107 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7108 result = gen_reg_rtx (insn_mode);
7110 src_rtx = memory_address (BLKmode,
7111 expand_expr (src, NULL_RTX, Pmode,
7113 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7114 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7116 char_rtx = const0_rtx;
7117 char_mode = insn_operand_mode[(int)icode][2];
7118 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7119 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7121 emit_insn (GEN_FCN (icode) (result,
7122 gen_rtx (MEM, BLKmode, src_rtx),
7123 char_rtx, GEN_INT (align)));
7125 /* Return the value in the proper mode for this function. */
7126 if (GET_MODE (result) == value_mode)
7128 else if (target != 0)
7130 convert_move (target, result, 0);
7134 return convert_to_mode (value_mode, result, 0);
7137 case BUILT_IN_STRCPY:
7138 /* If not optimizing, call the library function. */
7143 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7144 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7145 || TREE_CHAIN (arglist) == 0
7146 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7150 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7155 len = size_binop (PLUS_EXPR, len, integer_one_node);
7157 chainon (arglist, build_tree_list (NULL_TREE, len));
7161 case BUILT_IN_MEMCPY:
7162 /* If not optimizing, call the library function. */
7167 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7168 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7169 || TREE_CHAIN (arglist) == 0
7170 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7171 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7172 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7176 tree dest = TREE_VALUE (arglist);
7177 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7178 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7181 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7183 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7184 rtx dest_rtx, dest_mem, src_mem;
7186 /* If either SRC or DEST is not a pointer type, don't do
7187 this operation in-line. */
7188 if (src_align == 0 || dest_align == 0)
7190 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7191 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7195 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7196 dest_mem = gen_rtx (MEM, BLKmode,
7197 memory_address (BLKmode, dest_rtx));
7198 src_mem = gen_rtx (MEM, BLKmode,
7199 memory_address (BLKmode,
7200 expand_expr (src, NULL_RTX,
7204 /* Copy word part most expediently. */
7205 emit_block_move (dest_mem, src_mem,
7206 expand_expr (len, NULL_RTX, VOIDmode, 0),
7207 MIN (src_align, dest_align));
7211 /* These comparison functions need an instruction that returns an actual
7212 index. An ordinary compare that just sets the condition codes
7214 #ifdef HAVE_cmpstrsi
7215 case BUILT_IN_STRCMP:
7216 /* If not optimizing, call the library function. */
7221 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7222 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7223 || TREE_CHAIN (arglist) == 0
7224 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7226 else if (!HAVE_cmpstrsi)
7229 tree arg1 = TREE_VALUE (arglist);
7230 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7234 len = c_strlen (arg1);
7236 len = size_binop (PLUS_EXPR, integer_one_node, len);
7237 len2 = c_strlen (arg2);
7239 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7241 /* If we don't have a constant length for the first, use the length
7242 of the second, if we know it. We don't require a constant for
7243 this case; some cost analysis could be done if both are available
7244 but neither is constant. For now, assume they're equally cheap.
7246 If both strings have constant lengths, use the smaller. This
7247 could arise if optimization results in strcpy being called with
7248 two fixed strings, or if the code was machine-generated. We should
7249 add some code to the `memcmp' handler below to deal with such
7250 situations, someday. */
7251 if (!len || TREE_CODE (len) != INTEGER_CST)
7258 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7260 if (tree_int_cst_lt (len2, len))
7264 chainon (arglist, build_tree_list (NULL_TREE, len));
7268 case BUILT_IN_MEMCMP:
7269 /* If not optimizing, call the library function. */
7274 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7276 || TREE_CHAIN (arglist) == 0
7277 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7278 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7279 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7281 else if (!HAVE_cmpstrsi)
7284 tree arg1 = TREE_VALUE (arglist);
7285 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7286 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7290 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7292 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7293 enum machine_mode insn_mode
7294 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7296 /* If we don't have POINTER_TYPE, call the function. */
7297 if (arg1_align == 0 || arg2_align == 0)
7299 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7300 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7304 /* Make a place to write the result of the instruction. */
7307 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7308 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7309 result = gen_reg_rtx (insn_mode);
7311 emit_insn (gen_cmpstrsi (result,
7312 gen_rtx (MEM, BLKmode,
7313 expand_expr (arg1, NULL_RTX, Pmode,
7315 gen_rtx (MEM, BLKmode,
7316 expand_expr (arg2, NULL_RTX, Pmode,
7318 expand_expr (len, NULL_RTX, VOIDmode, 0),
7319 GEN_INT (MIN (arg1_align, arg2_align))));
7321 /* Return the value in the proper mode for this function. */
7322 mode = TYPE_MODE (TREE_TYPE (exp));
7323 if (GET_MODE (result) == mode)
7325 else if (target != 0)
7327 convert_move (target, result, 0);
7331 return convert_to_mode (mode, result, 0);
7334 case BUILT_IN_STRCMP:
7335 case BUILT_IN_MEMCMP:
7339 default: /* just do library call, if unknown builtin */
7340 error ("built-in function `%s' not currently supported",
7341 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7344 /* The switch statement above can drop through to cause the function
7345 to be called normally. */
7347 return expand_call (exp, target, ignore);
7350 /* Built-in functions to perform an untyped call and return. */
7352 /* For each register that may be used for calling a function, this
7353 gives a mode used to copy the register's value. VOIDmode indicates
7354 the register is not used for calling a function. If the machine
7355 has register windows, this gives only the outbound registers.
7356 INCOMING_REGNO gives the corresponding inbound register. */
7357 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7359 /* For each register that may be used for returning values, this gives
7360 a mode used to copy the register's value. VOIDmode indicates the
7361 register is not used for returning values. If the machine has
7362 register windows, this gives only the outbound registers.
7363 INCOMING_REGNO gives the corresponding inbound register. */
7364 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7366 /* For each register that may be used for calling a function, this
7367 gives the offset of that register into the block returned by
7368 __bultin_apply_args. 0 indicates that the register is not
7369 used for calling a function. */
7370 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7372 /* Return the offset of register REGNO into the block returned by
7373 __builtin_apply_args. This is not declared static, since it is
7374 needed in objc-act.c. */
7377 apply_args_register_offset (regno)
7382 /* Arguments are always put in outgoing registers (in the argument
7383 block) if such make sense. */
7384 #ifdef OUTGOING_REGNO
7385 regno = OUTGOING_REGNO(regno);
7387 return apply_args_reg_offset[regno];
7390 /* Return the size required for the block returned by __builtin_apply_args,
7391 and initialize apply_args_mode. */
7396 static int size = -1;
7398 enum machine_mode mode;
7400 /* The values computed by this function never change. */
7403 /* The first value is the incoming arg-pointer. */
7404 size = GET_MODE_SIZE (Pmode);
7406 /* The second value is the structure value address unless this is
7407 passed as an "invisible" first argument. */
7408 if (struct_value_rtx)
7409 size += GET_MODE_SIZE (Pmode);
7411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7412 if (FUNCTION_ARG_REGNO_P (regno))
7414 /* Search for the proper mode for copying this register's
7415 value. I'm not sure this is right, but it works so far. */
7416 enum machine_mode best_mode = VOIDmode;
7418 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7420 mode = GET_MODE_WIDER_MODE (mode))
7421 if (HARD_REGNO_MODE_OK (regno, mode)
7422 && HARD_REGNO_NREGS (regno, mode) == 1)
7425 if (best_mode == VOIDmode)
7426 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7428 mode = GET_MODE_WIDER_MODE (mode))
7429 if (HARD_REGNO_MODE_OK (regno, mode)
7430 && (mov_optab->handlers[(int) mode].insn_code
7431 != CODE_FOR_nothing))
7435 if (mode == VOIDmode)
7438 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7439 if (size % align != 0)
7440 size = CEIL (size, align) * align;
7441 apply_args_reg_offset[regno] = size;
7442 size += GET_MODE_SIZE (mode);
7443 apply_args_mode[regno] = mode;
7447 apply_args_mode[regno] = VOIDmode;
7448 apply_args_reg_offset[regno] = 0;
7454 /* Return the size required for the block returned by __builtin_apply,
7455 and initialize apply_result_mode. */
7458 apply_result_size ()
7460 static int size = -1;
7462 enum machine_mode mode;
7464 /* The values computed by this function never change. */
7469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7470 if (FUNCTION_VALUE_REGNO_P (regno))
7472 /* Search for the proper mode for copying this register's
7473 value. I'm not sure this is right, but it works so far. */
7474 enum machine_mode best_mode = VOIDmode;
7476 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7478 mode = GET_MODE_WIDER_MODE (mode))
7479 if (HARD_REGNO_MODE_OK (regno, mode))
7482 if (best_mode == VOIDmode)
7483 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7485 mode = GET_MODE_WIDER_MODE (mode))
7486 if (HARD_REGNO_MODE_OK (regno, mode)
7487 && (mov_optab->handlers[(int) mode].insn_code
7488 != CODE_FOR_nothing))
7492 if (mode == VOIDmode)
7495 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7496 if (size % align != 0)
7497 size = CEIL (size, align) * align;
7498 size += GET_MODE_SIZE (mode);
7499 apply_result_mode[regno] = mode;
7502 apply_result_mode[regno] = VOIDmode;
7504 /* Allow targets that use untyped_call and untyped_return to override
7505 the size so that machine-specific information can be stored here. */
7506 #ifdef APPLY_RESULT_SIZE
7507 size = APPLY_RESULT_SIZE;
7513 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7514 /* Create a vector describing the result block RESULT. If SAVEP is true,
7515 the result block is used to save the values; otherwise it is used to
7516 restore the values. */
7519 result_vector (savep, result)
7523 int regno, size, align, nelts;
7524 enum machine_mode mode;
7526 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7529 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7530 if ((mode = apply_result_mode[regno]) != VOIDmode)
7532 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7533 if (size % align != 0)
7534 size = CEIL (size, align) * align;
7535 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7536 mem = change_address (result, mode,
7537 plus_constant (XEXP (result, 0), size));
7538 savevec[nelts++] = (savep
7539 ? gen_rtx (SET, VOIDmode, mem, reg)
7540 : gen_rtx (SET, VOIDmode, reg, mem));
7541 size += GET_MODE_SIZE (mode);
7543 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7545 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7547 /* Save the state required to perform an untyped call with the same
7548 arguments as were passed to the current function. */
7551 expand_builtin_apply_args ()
7554 int size, align, regno;
7555 enum machine_mode mode;
7557 /* Create a block where the arg-pointer, structure value address,
7558 and argument registers can be saved. */
7559 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7561 /* Walk past the arg-pointer and structure value address. */
7562 size = GET_MODE_SIZE (Pmode);
7563 if (struct_value_rtx)
7564 size += GET_MODE_SIZE (Pmode);
7566 /* Save each register used in calling a function to the block. */
7567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7568 if ((mode = apply_args_mode[regno]) != VOIDmode)
7570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7571 if (size % align != 0)
7572 size = CEIL (size, align) * align;
7573 emit_move_insn (change_address (registers, mode,
7574 plus_constant (XEXP (registers, 0),
7576 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7577 size += GET_MODE_SIZE (mode);
7580 /* Save the arg pointer to the block. */
7581 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7582 copy_to_reg (virtual_incoming_args_rtx));
7583 size = GET_MODE_SIZE (Pmode);
7585 /* Save the structure value address unless this is passed as an
7586 "invisible" first argument. */
7587 if (struct_value_incoming_rtx)
7589 emit_move_insn (change_address (registers, Pmode,
7590 plus_constant (XEXP (registers, 0),
7592 copy_to_reg (struct_value_incoming_rtx));
7593 size += GET_MODE_SIZE (Pmode);
7596 /* Return the address of the block. */
7597 return copy_addr_to_reg (XEXP (registers, 0));
7600 /* Perform an untyped call and save the state required to perform an
7601 untyped return of whatever value was returned by the given function. */
7604 expand_builtin_apply (function, arguments, argsize)
7605 rtx function, arguments, argsize;
7607 int size, align, regno;
7608 enum machine_mode mode;
7609 rtx incoming_args, result, reg, dest, call_insn;
7610 rtx old_stack_level = 0;
7613 /* Create a block where the return registers can be saved. */
7614 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7616 /* ??? The argsize value should be adjusted here. */
7618 /* Fetch the arg pointer from the ARGUMENTS block. */
7619 incoming_args = gen_reg_rtx (Pmode);
7620 emit_move_insn (incoming_args,
7621 gen_rtx (MEM, Pmode, arguments));
7622 #ifndef STACK_GROWS_DOWNWARD
7623 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7624 incoming_args, 0, OPTAB_LIB_WIDEN);
7627 /* Perform postincrements before actually calling the function. */
7630 /* Push a new argument block and copy the arguments. */
7631 do_pending_stack_adjust ();
7632 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7634 /* Push a block of memory onto the stack to store the memory arguments.
7635 Save the address in a register, and copy the memory arguments. ??? I
7636 haven't figured out how the calling convention macros effect this,
7637 but it's likely that the source and/or destination addresses in
7638 the block copy will need updating in machine specific ways. */
7639 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7640 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7641 gen_rtx (MEM, BLKmode, incoming_args),
7643 PARM_BOUNDARY / BITS_PER_UNIT);
7645 /* Refer to the argument block. */
7647 arguments = gen_rtx (MEM, BLKmode, arguments);
7649 /* Walk past the arg-pointer and structure value address. */
7650 size = GET_MODE_SIZE (Pmode);
7651 if (struct_value_rtx)
7652 size += GET_MODE_SIZE (Pmode);
7654 /* Restore each of the registers previously saved. Make USE insns
7655 for each of these registers for use in making the call. */
7656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7657 if ((mode = apply_args_mode[regno]) != VOIDmode)
7659 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7660 if (size % align != 0)
7661 size = CEIL (size, align) * align;
7662 reg = gen_rtx (REG, mode, regno);
7663 emit_move_insn (reg,
7664 change_address (arguments, mode,
7665 plus_constant (XEXP (arguments, 0),
7668 push_to_sequence (use_insns);
7669 emit_insn (gen_rtx (USE, VOIDmode, reg));
7670 use_insns = get_insns ();
7672 size += GET_MODE_SIZE (mode);
7675 /* Restore the structure value address unless this is passed as an
7676 "invisible" first argument. */
7677 size = GET_MODE_SIZE (Pmode);
7678 if (struct_value_rtx)
7680 rtx value = gen_reg_rtx (Pmode);
7681 emit_move_insn (value,
7682 change_address (arguments, Pmode,
7683 plus_constant (XEXP (arguments, 0),
7685 emit_move_insn (struct_value_rtx, value);
7686 if (GET_CODE (struct_value_rtx) == REG)
7688 push_to_sequence (use_insns);
7689 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7690 use_insns = get_insns ();
7693 size += GET_MODE_SIZE (Pmode);
7696 /* All arguments and registers used for the call are set up by now! */
7697 function = prepare_call_address (function, NULL_TREE, &use_insns);
7699 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7700 and we don't want to load it into a register as an optimization,
7701 because prepare_call_address already did it if it should be done. */
7702 if (GET_CODE (function) != SYMBOL_REF)
7703 function = memory_address (FUNCTION_MODE, function);
7705 /* Generate the actual call instruction and save the return value. */
7706 #ifdef HAVE_untyped_call
7707 if (HAVE_untyped_call)
7708 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7709 result, result_vector (1, result)));
7712 #ifdef HAVE_call_value
7713 if (HAVE_call_value)
7717 /* Locate the unique return register. It is not possible to
7718 express a call that sets more than one return register using
7719 call_value; use untyped_call for that. In fact, untyped_call
7720 only needs to save the return registers in the given block. */
7721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7722 if ((mode = apply_result_mode[regno]) != VOIDmode)
7725 abort (); /* HAVE_untyped_call required. */
7726 valreg = gen_rtx (REG, mode, regno);
7729 emit_call_insn (gen_call_value (valreg,
7730 gen_rtx (MEM, FUNCTION_MODE, function),
7731 const0_rtx, NULL_RTX, const0_rtx));
7733 emit_move_insn (change_address (result, GET_MODE (valreg),
7741 /* Find the CALL insn we just emitted and write the USE insns before it. */
7742 for (call_insn = get_last_insn ();
7743 call_insn && GET_CODE (call_insn) != CALL_INSN;
7744 call_insn = PREV_INSN (call_insn))
7750 /* Put the USE insns before the CALL. */
7751 emit_insns_before (use_insns, call_insn);
7753 /* Restore the stack. */
7754 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7756 /* Return the address of the result block. */
7757 return copy_addr_to_reg (XEXP (result, 0));
7760 /* Perform an untyped return. */
7763 expand_builtin_return (result)
7766 int size, align, regno;
7767 enum machine_mode mode;
7771 apply_result_size ();
7772 result = gen_rtx (MEM, BLKmode, result);
7774 #ifdef HAVE_untyped_return
7775 if (HAVE_untyped_return)
7777 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7783 /* Restore the return value and note that each value is used. */
7785 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7786 if ((mode = apply_result_mode[regno]) != VOIDmode)
7788 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7789 if (size % align != 0)
7790 size = CEIL (size, align) * align;
7791 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7792 emit_move_insn (reg,
7793 change_address (result, mode,
7794 plus_constant (XEXP (result, 0),
7797 push_to_sequence (use_insns);
7798 emit_insn (gen_rtx (USE, VOIDmode, reg));
7799 use_insns = get_insns ();
7801 size += GET_MODE_SIZE (mode);
7804 /* Put the USE insns before the return. */
7805 emit_insns (use_insns);
7807 /* Return whatever values was restored by jumping directly to the end
7809 expand_null_return ();
7812 /* Expand code for a post- or pre- increment or decrement
7813 and return the RTX for the result.
7814 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7817 expand_increment (exp, post)
7821 register rtx op0, op1;
7822 register rtx temp, value;
7823 register tree incremented = TREE_OPERAND (exp, 0);
7824 optab this_optab = add_optab;
7826 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7827 int op0_is_copy = 0;
7828 int single_insn = 0;
7829 /* 1 means we can't store into OP0 directly,
7830 because it is a subreg narrower than a word,
7831 and we don't dare clobber the rest of the word. */
7834 if (output_bytecode)
7836 bc_expand_expr (exp);
7840 /* Stabilize any component ref that might need to be
7841 evaluated more than once below. */
7843 || TREE_CODE (incremented) == BIT_FIELD_REF
7844 || (TREE_CODE (incremented) == COMPONENT_REF
7845 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7846 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7847 incremented = stabilize_reference (incremented);
7848 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7849 ones into save exprs so that they don't accidentally get evaluated
7850 more than once by the code below. */
7851 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7852 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7853 incremented = save_expr (incremented);
7855 /* Compute the operands as RTX.
7856 Note whether OP0 is the actual lvalue or a copy of it:
7857 I believe it is a copy iff it is a register or subreg
7858 and insns were generated in computing it. */
7860 temp = get_last_insn ();
7861 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7863 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7864 in place but intead must do sign- or zero-extension during assignment,
7865 so we copy it into a new register and let the code below use it as
7868 Note that we can safely modify this SUBREG since it is know not to be
7869 shared (it was made by the expand_expr call above). */
7871 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7872 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7873 else if (GET_CODE (op0) == SUBREG
7874 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7877 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7878 && temp != get_last_insn ());
7879 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7881 /* Decide whether incrementing or decrementing. */
7882 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7883 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7884 this_optab = sub_optab;
7886 /* Convert decrement by a constant into a negative increment. */
7887 if (this_optab == sub_optab
7888 && GET_CODE (op1) == CONST_INT)
7890 op1 = GEN_INT (- INTVAL (op1));
7891 this_optab = add_optab;
7894 /* For a preincrement, see if we can do this with a single instruction. */
7897 icode = (int) this_optab->handlers[(int) mode].insn_code;
7898 if (icode != (int) CODE_FOR_nothing
7899 /* Make sure that OP0 is valid for operands 0 and 1
7900 of the insn we want to queue. */
7901 && (*insn_operand_predicate[icode][0]) (op0, mode)
7902 && (*insn_operand_predicate[icode][1]) (op0, mode)
7903 && (*insn_operand_predicate[icode][2]) (op1, mode))
7907 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7908 then we cannot just increment OP0. We must therefore contrive to
7909 increment the original value. Then, for postincrement, we can return
7910 OP0 since it is a copy of the old value. For preincrement, expand here
7911 unless we can do it with a single insn.
7913 Likewise if storing directly into OP0 would clobber high bits
7914 we need to preserve (bad_subreg). */
7915 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
7917 /* This is the easiest way to increment the value wherever it is.
7918 Problems with multiple evaluation of INCREMENTED are prevented
7919 because either (1) it is a component_ref or preincrement,
7920 in which case it was stabilized above, or (2) it is an array_ref
7921 with constant index in an array in a register, which is
7922 safe to reevaluate. */
7923 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7924 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7925 ? MINUS_EXPR : PLUS_EXPR),
7928 TREE_OPERAND (exp, 1));
7929 temp = expand_assignment (incremented, newexp, ! post, 0);
7930 return post ? op0 : temp;
7935 /* We have a true reference to the value in OP0.
7936 If there is an insn to add or subtract in this mode, queue it.
7937 Queueing the increment insn avoids the register shuffling
7938 that often results if we must increment now and first save
7939 the old value for subsequent use. */
7941 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
7942 op0 = stabilize (op0);
7945 icode = (int) this_optab->handlers[(int) mode].insn_code;
7946 if (icode != (int) CODE_FOR_nothing
7947 /* Make sure that OP0 is valid for operands 0 and 1
7948 of the insn we want to queue. */
7949 && (*insn_operand_predicate[icode][0]) (op0, mode)
7950 && (*insn_operand_predicate[icode][1]) (op0, mode))
7952 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7953 op1 = force_reg (mode, op1);
7955 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7959 /* Preincrement, or we can't increment with one simple insn. */
7961 /* Save a copy of the value before inc or dec, to return it later. */
7962 temp = value = copy_to_reg (op0);
7964 /* Arrange to return the incremented value. */
7965 /* Copy the rtx because expand_binop will protect from the queue,
7966 and the results of that would be invalid for us to return
7967 if our caller does emit_queue before using our result. */
7968 temp = copy_rtx (value = op0);
7970 /* Increment however we can. */
7971 op1 = expand_binop (mode, this_optab, value, op1, op0,
7972 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7973 /* Make sure the value is stored into OP0. */
7975 emit_move_insn (op0, op1);
7980 /* Expand all function calls contained within EXP, innermost ones first.
7981 But don't look within expressions that have sequence points.
7982 For each CALL_EXPR, record the rtx for its value
7983 in the CALL_EXPR_RTL field. */
7986 preexpand_calls (exp)
7989 register int nops, i;
7990 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7992 if (! do_preexpand_calls)
7995 /* Only expressions and references can contain calls. */
7997 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8000 switch (TREE_CODE (exp))
8003 /* Do nothing if already expanded. */
8004 if (CALL_EXPR_RTL (exp) != 0)
8007 /* Do nothing to built-in functions. */
8008 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8009 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8010 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8011 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8016 case TRUTH_ANDIF_EXPR:
8017 case TRUTH_ORIF_EXPR:
8018 /* If we find one of these, then we can be sure
8019 the adjust will be done for it (since it makes jumps).
8020 Do it now, so that if this is inside an argument
8021 of a function, we don't get the stack adjustment
8022 after some other args have already been pushed. */
8023 do_pending_stack_adjust ();
8028 case WITH_CLEANUP_EXPR:
8032 if (SAVE_EXPR_RTL (exp) != 0)
8036 nops = tree_code_length[(int) TREE_CODE (exp)];
8037 for (i = 0; i < nops; i++)
8038 if (TREE_OPERAND (exp, i) != 0)
8040 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8041 if (type == 'e' || type == '<' || type == '1' || type == '2'
8043 preexpand_calls (TREE_OPERAND (exp, i));
8047 /* At the start of a function, record that we have no previously-pushed
8048 arguments waiting to be popped. */
8051 init_pending_stack_adjust ()
8053 pending_stack_adjust = 0;
8056 /* When exiting from function, if safe, clear out any pending stack adjust
8057 so the adjustment won't get done. */
8060 clear_pending_stack_adjust ()
8062 #ifdef EXIT_IGNORE_STACK
8063 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8064 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8065 && ! flag_inline_functions)
8066 pending_stack_adjust = 0;
8070 /* Pop any previously-pushed arguments that have not been popped yet. */
8073 do_pending_stack_adjust ()
8075 if (inhibit_defer_pop == 0)
8077 if (pending_stack_adjust != 0)
8078 adjust_stack (GEN_INT (pending_stack_adjust));
8079 pending_stack_adjust = 0;
8083 /* Expand all cleanups up to OLD_CLEANUPS.
8084 Needed here, and also for language-dependent calls. */
8087 expand_cleanups_to (old_cleanups)
8090 while (cleanups_this_call != old_cleanups)
8092 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8093 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8097 /* Expand conditional expressions. */
8099 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8100 LABEL is an rtx of code CODE_LABEL, in this function and all the
8104 jumpifnot (exp, label)
8108 do_jump (exp, label, NULL_RTX);
8111 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8118 do_jump (exp, NULL_RTX, label);
8121 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8122 the result is zero, or IF_TRUE_LABEL if the result is one.
8123 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8124 meaning fall through in that case.
8126 do_jump always does any pending stack adjust except when it does not
8127 actually perform a jump. An example where there is no jump
8128 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8130 This function is responsible for optimizing cases such as
8131 &&, || and comparison operators in EXP. */
8134 do_jump (exp, if_false_label, if_true_label)
8136 rtx if_false_label, if_true_label;
8138 register enum tree_code code = TREE_CODE (exp);
8139 /* Some cases need to create a label to jump to
8140 in order to properly fall through.
8141 These cases set DROP_THROUGH_LABEL nonzero. */
8142 rtx drop_through_label = 0;
8156 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8162 /* This is not true with #pragma weak */
8164 /* The address of something can never be zero. */
8166 emit_jump (if_true_label);
8171 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8172 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8173 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8176 /* If we are narrowing the operand, we have to do the compare in the
8178 if ((TYPE_PRECISION (TREE_TYPE (exp))
8179 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8181 case NON_LVALUE_EXPR:
8182 case REFERENCE_EXPR:
8187 /* These cannot change zero->non-zero or vice versa. */
8188 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8192 /* This is never less insns than evaluating the PLUS_EXPR followed by
8193 a test and can be longer if the test is eliminated. */
8195 /* Reduce to minus. */
8196 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8197 TREE_OPERAND (exp, 0),
8198 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8199 TREE_OPERAND (exp, 1))));
8200 /* Process as MINUS. */
8204 /* Non-zero iff operands of minus differ. */
8205 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8206 TREE_OPERAND (exp, 0),
8207 TREE_OPERAND (exp, 1)),
8212 /* If we are AND'ing with a small constant, do this comparison in the
8213 smallest type that fits. If the machine doesn't have comparisons
8214 that small, it will be converted back to the wider comparison.
8215 This helps if we are testing the sign bit of a narrower object.
8216 combine can't do this for us because it can't know whether a
8217 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8219 if (! SLOW_BYTE_ACCESS
8220 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8221 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8222 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8223 && (type = type_for_size (i + 1, 1)) != 0
8224 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8225 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8226 != CODE_FOR_nothing))
8228 do_jump (convert (type, exp), if_false_label, if_true_label);
8233 case TRUTH_NOT_EXPR:
8234 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8237 case TRUTH_ANDIF_EXPR:
8238 if (if_false_label == 0)
8239 if_false_label = drop_through_label = gen_label_rtx ();
8240 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8241 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8244 case TRUTH_ORIF_EXPR:
8245 if (if_true_label == 0)
8246 if_true_label = drop_through_label = gen_label_rtx ();
8247 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8248 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8253 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8257 do_pending_stack_adjust ();
8258 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8265 int bitsize, bitpos, unsignedp;
8266 enum machine_mode mode;
8271 /* Get description of this reference. We don't actually care
8272 about the underlying object here. */
8273 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8274 &mode, &unsignedp, &volatilep);
8276 type = type_for_size (bitsize, unsignedp);
8277 if (! SLOW_BYTE_ACCESS
8278 && type != 0 && bitsize >= 0
8279 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8280 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8281 != CODE_FOR_nothing))
8283 do_jump (convert (type, exp), if_false_label, if_true_label);
8290 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8291 if (integer_onep (TREE_OPERAND (exp, 1))
8292 && integer_zerop (TREE_OPERAND (exp, 2)))
8293 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8295 else if (integer_zerop (TREE_OPERAND (exp, 1))
8296 && integer_onep (TREE_OPERAND (exp, 2)))
8297 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8301 register rtx label1 = gen_label_rtx ();
8302 drop_through_label = gen_label_rtx ();
8303 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8304 /* Now the THEN-expression. */
8305 do_jump (TREE_OPERAND (exp, 1),
8306 if_false_label ? if_false_label : drop_through_label,
8307 if_true_label ? if_true_label : drop_through_label);
8308 /* In case the do_jump just above never jumps. */
8309 do_pending_stack_adjust ();
8310 emit_label (label1);
8311 /* Now the ELSE-expression. */
8312 do_jump (TREE_OPERAND (exp, 2),
8313 if_false_label ? if_false_label : drop_through_label,
8314 if_true_label ? if_true_label : drop_through_label);
8319 if (integer_zerop (TREE_OPERAND (exp, 1)))
8320 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8321 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8324 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8325 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8326 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8327 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8329 comparison = compare (exp, EQ, EQ);
8333 if (integer_zerop (TREE_OPERAND (exp, 1)))
8334 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8335 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8338 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8339 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8340 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8341 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8343 comparison = compare (exp, NE, NE);
8347 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8349 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8350 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8352 comparison = compare (exp, LT, LTU);
8356 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8358 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8359 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8361 comparison = compare (exp, LE, LEU);
8365 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8367 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8368 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8370 comparison = compare (exp, GT, GTU);
8374 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8376 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8377 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8379 comparison = compare (exp, GE, GEU);
8384 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8386 /* This is not needed any more and causes poor code since it causes
8387 comparisons and tests from non-SI objects to have different code
8389 /* Copy to register to avoid generating bad insns by cse
8390 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8391 if (!cse_not_expected && GET_CODE (temp) == MEM)
8392 temp = copy_to_reg (temp);
8394 do_pending_stack_adjust ();
8395 if (GET_CODE (temp) == CONST_INT)
8396 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8397 else if (GET_CODE (temp) == LABEL_REF)
8398 comparison = const_true_rtx;
8399 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8400 && !can_compare_p (GET_MODE (temp)))
8401 /* Note swapping the labels gives us not-equal. */
8402 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8403 else if (GET_MODE (temp) != VOIDmode)
8404 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8405 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8406 GET_MODE (temp), NULL_RTX, 0);
8411 /* Do any postincrements in the expression that was tested. */
8414 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8415 straight into a conditional jump instruction as the jump condition.
8416 Otherwise, all the work has been done already. */
8418 if (comparison == const_true_rtx)
8421 emit_jump (if_true_label);
8423 else if (comparison == const0_rtx)
8426 emit_jump (if_false_label);
8428 else if (comparison)
8429 do_jump_for_compare (comparison, if_false_label, if_true_label);
8431 if (drop_through_label)
8433 /* If do_jump produces code that might be jumped around,
8434 do any stack adjusts from that code, before the place
8435 where control merges in. */
8436 do_pending_stack_adjust ();
8437 emit_label (drop_through_label);
8441 /* Given a comparison expression EXP for values too wide to be compared
8442 with one insn, test the comparison and jump to the appropriate label.
8443 The code of EXP is ignored; we always test GT if SWAP is 0,
8444 and LT if SWAP is 1. */
8447 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8450 rtx if_false_label, if_true_label;
8452 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8453 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8454 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8455 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8456 rtx drop_through_label = 0;
8457 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8460 if (! if_true_label || ! if_false_label)
8461 drop_through_label = gen_label_rtx ();
8462 if (! if_true_label)
8463 if_true_label = drop_through_label;
8464 if (! if_false_label)
8465 if_false_label = drop_through_label;
8467 /* Compare a word at a time, high order first. */
8468 for (i = 0; i < nwords; i++)
8471 rtx op0_word, op1_word;
8473 if (WORDS_BIG_ENDIAN)
8475 op0_word = operand_subword_force (op0, i, mode);
8476 op1_word = operand_subword_force (op1, i, mode);
8480 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8481 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8484 /* All but high-order word must be compared as unsigned. */
8485 comp = compare_from_rtx (op0_word, op1_word,
8486 (unsignedp || i > 0) ? GTU : GT,
8487 unsignedp, word_mode, NULL_RTX, 0);
8488 if (comp == const_true_rtx)
8489 emit_jump (if_true_label);
8490 else if (comp != const0_rtx)
8491 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8493 /* Consider lower words only if these are equal. */
8494 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8496 if (comp == const_true_rtx)
8497 emit_jump (if_false_label);
8498 else if (comp != const0_rtx)
8499 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8503 emit_jump (if_false_label);
8504 if (drop_through_label)
8505 emit_label (drop_through_label);
8508 /* Compare OP0 with OP1, word at a time, in mode MODE.
8509 UNSIGNEDP says to do unsigned comparison.
8510 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8513 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8514 enum machine_mode mode;
8517 rtx if_false_label, if_true_label;
8519 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8520 rtx drop_through_label = 0;
8523 if (! if_true_label || ! if_false_label)
8524 drop_through_label = gen_label_rtx ();
8525 if (! if_true_label)
8526 if_true_label = drop_through_label;
8527 if (! if_false_label)
8528 if_false_label = drop_through_label;
8530 /* Compare a word at a time, high order first. */
8531 for (i = 0; i < nwords; i++)
8534 rtx op0_word, op1_word;
8536 if (WORDS_BIG_ENDIAN)
8538 op0_word = operand_subword_force (op0, i, mode);
8539 op1_word = operand_subword_force (op1, i, mode);
8543 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8544 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8547 /* All but high-order word must be compared as unsigned. */
8548 comp = compare_from_rtx (op0_word, op1_word,
8549 (unsignedp || i > 0) ? GTU : GT,
8550 unsignedp, word_mode, NULL_RTX, 0);
8551 if (comp == const_true_rtx)
8552 emit_jump (if_true_label);
8553 else if (comp != const0_rtx)
8554 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8556 /* Consider lower words only if these are equal. */
8557 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8559 if (comp == const_true_rtx)
8560 emit_jump (if_false_label);
8561 else if (comp != const0_rtx)
8562 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8566 emit_jump (if_false_label);
8567 if (drop_through_label)
8568 emit_label (drop_through_label);
8571 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8572 with one insn, test the comparison and jump to the appropriate label. */
8575 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8577 rtx if_false_label, if_true_label;
8579 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8580 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8581 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8582 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8584 rtx drop_through_label = 0;
8586 if (! if_false_label)
8587 drop_through_label = if_false_label = gen_label_rtx ();
8589 for (i = 0; i < nwords; i++)
8591 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8592 operand_subword_force (op1, i, mode),
8593 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8594 word_mode, NULL_RTX, 0);
8595 if (comp == const_true_rtx)
8596 emit_jump (if_false_label);
8597 else if (comp != const0_rtx)
8598 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8602 emit_jump (if_true_label);
8603 if (drop_through_label)
8604 emit_label (drop_through_label);
8607 /* Jump according to whether OP0 is 0.
8608 We assume that OP0 has an integer mode that is too wide
8609 for the available compare insns. */
8612 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8614 rtx if_false_label, if_true_label;
8616 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8618 rtx drop_through_label = 0;
8620 if (! if_false_label)
8621 drop_through_label = if_false_label = gen_label_rtx ();
8623 for (i = 0; i < nwords; i++)
8625 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8627 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8628 if (comp == const_true_rtx)
8629 emit_jump (if_false_label);
8630 else if (comp != const0_rtx)
8631 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8635 emit_jump (if_true_label);
8636 if (drop_through_label)
8637 emit_label (drop_through_label);
8640 /* Given a comparison expression in rtl form, output conditional branches to
8641 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8644 do_jump_for_compare (comparison, if_false_label, if_true_label)
8645 rtx comparison, if_false_label, if_true_label;
8649 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8650 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8655 emit_jump (if_false_label);
8657 else if (if_false_label)
8660 rtx prev = get_last_insn ();
8664 prev = PREV_INSN (prev);
8666 /* Output the branch with the opposite condition. Then try to invert
8667 what is generated. If more than one insn is a branch, or if the
8668 branch is not the last insn written, abort. If we can't invert
8669 the branch, emit make a true label, redirect this jump to that,
8670 emit a jump to the false label and define the true label. */
8672 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8673 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8677 /* Here we get the insn before what was just emitted.
8678 On some machines, emitting the branch can discard
8679 the previous compare insn and emit a replacement. */
8681 /* If there's only one preceding insn... */
8682 insn = get_insns ();
8684 insn = NEXT_INSN (prev);
8686 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8687 if (GET_CODE (insn) == JUMP_INSN)
8694 if (branch != get_last_insn ())
8697 if (! invert_jump (branch, if_false_label))
8699 if_true_label = gen_label_rtx ();
8700 redirect_jump (branch, if_true_label);
8701 emit_jump (if_false_label);
8702 emit_label (if_true_label);
8707 /* Generate code for a comparison expression EXP
8708 (including code to compute the values to be compared)
8709 and set (CC0) according to the result.
8710 SIGNED_CODE should be the rtx operation for this comparison for
8711 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8713 We force a stack adjustment unless there are currently
8714 things pushed on the stack that aren't yet used. */
8717 compare (exp, signed_code, unsigned_code)
8719 enum rtx_code signed_code, unsigned_code;
8722 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8724 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8725 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8726 register enum machine_mode mode = TYPE_MODE (type);
8727 int unsignedp = TREE_UNSIGNED (type);
8728 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8730 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8732 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8733 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8736 /* Like compare but expects the values to compare as two rtx's.
8737 The decision as to signed or unsigned comparison must be made by the caller.
8739 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8742 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8743 size of MODE should be used. */
8746 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8747 register rtx op0, op1;
8750 enum machine_mode mode;
8756 /* If one operand is constant, make it the second one. Only do this
8757 if the other operand is not constant as well. */
8759 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8760 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8765 code = swap_condition (code);
8770 op0 = force_not_mem (op0);
8771 op1 = force_not_mem (op1);
8774 do_pending_stack_adjust ();
8776 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8777 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8781 /* There's no need to do this now that combine.c can eliminate lots of
8782 sign extensions. This can be less efficient in certain cases on other
8785 /* If this is a signed equality comparison, we can do it as an
8786 unsigned comparison since zero-extension is cheaper than sign
8787 extension and comparisons with zero are done as unsigned. This is
8788 the case even on machines that can do fast sign extension, since
8789 zero-extension is easier to combine with other operations than
8790 sign-extension is. If we are comparing against a constant, we must
8791 convert it to what it would look like unsigned. */
8792 if ((code == EQ || code == NE) && ! unsignedp
8793 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8795 if (GET_CODE (op1) == CONST_INT
8796 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8797 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8802 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8804 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8807 /* Generate code to calculate EXP using a store-flag instruction
8808 and return an rtx for the result. EXP is either a comparison
8809 or a TRUTH_NOT_EXPR whose operand is a comparison.
8811 If TARGET is nonzero, store the result there if convenient.
8813 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8816 Return zero if there is no suitable set-flag instruction
8817 available on this machine.
8819 Once expand_expr has been called on the arguments of the comparison,
8820 we are committed to doing the store flag, since it is not safe to
8821 re-evaluate the expression. We emit the store-flag insn by calling
8822 emit_store_flag, but only expand the arguments if we have a reason
8823 to believe that emit_store_flag will be successful. If we think that
8824 it will, but it isn't, we have to simulate the store-flag with a
8825 set/jump/set sequence. */
8828 do_store_flag (exp, target, mode, only_cheap)
8831 enum machine_mode mode;
8835 tree arg0, arg1, type;
8837 enum machine_mode operand_mode;
8841 enum insn_code icode;
8842 rtx subtarget = target;
8843 rtx result, label, pattern, jump_pat;
8845 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8846 result at the end. We can't simply invert the test since it would
8847 have already been inverted if it were valid. This case occurs for
8848 some floating-point comparisons. */
8850 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8851 invert = 1, exp = TREE_OPERAND (exp, 0);
8853 arg0 = TREE_OPERAND (exp, 0);
8854 arg1 = TREE_OPERAND (exp, 1);
8855 type = TREE_TYPE (arg0);
8856 operand_mode = TYPE_MODE (type);
8857 unsignedp = TREE_UNSIGNED (type);
8859 /* We won't bother with BLKmode store-flag operations because it would mean
8860 passing a lot of information to emit_store_flag. */
8861 if (operand_mode == BLKmode)
8867 /* Get the rtx comparison code to use. We know that EXP is a comparison
8868 operation of some type. Some comparisons against 1 and -1 can be
8869 converted to comparisons with zero. Do so here so that the tests
8870 below will be aware that we have a comparison with zero. These
8871 tests will not catch constants in the first operand, but constants
8872 are rarely passed as the first operand. */
8874 switch (TREE_CODE (exp))
8883 if (integer_onep (arg1))
8884 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8886 code = unsignedp ? LTU : LT;
8889 if (! unsignedp && integer_all_onesp (arg1))
8890 arg1 = integer_zero_node, code = LT;
8892 code = unsignedp ? LEU : LE;
8895 if (! unsignedp && integer_all_onesp (arg1))
8896 arg1 = integer_zero_node, code = GE;
8898 code = unsignedp ? GTU : GT;
8901 if (integer_onep (arg1))
8902 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8904 code = unsignedp ? GEU : GE;
8910 /* Put a constant second. */
8911 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8913 tem = arg0; arg0 = arg1; arg1 = tem;
8914 code = swap_condition (code);
8917 /* If this is an equality or inequality test of a single bit, we can
8918 do this by shifting the bit being tested to the low-order bit and
8919 masking the result with the constant 1. If the condition was EQ,
8920 we xor it with 1. This does not require an scc insn and is faster
8921 than an scc insn even if we have it. */
8923 if ((code == NE || code == EQ)
8924 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8925 && integer_pow2p (TREE_OPERAND (arg0, 1))
8926 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
8928 tree inner = TREE_OPERAND (arg0, 0);
8929 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
8930 NULL_RTX, VOIDmode, 0)));
8933 /* If INNER is a right shift of a constant and it plus BITNUM does
8934 not overflow, adjust BITNUM and INNER. */
8936 if (TREE_CODE (inner) == RSHIFT_EXPR
8937 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8938 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8939 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8940 < TYPE_PRECISION (type)))
8942 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8943 inner = TREE_OPERAND (inner, 0);
8946 /* If we are going to be able to omit the AND below, we must do our
8947 operations as unsigned. If we must use the AND, we have a choice.
8948 Normally unsigned is faster, but for some machines signed is. */
8949 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
8950 #ifdef LOAD_EXTEND_OP
8951 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
8957 if (subtarget == 0 || GET_CODE (subtarget) != REG
8958 || GET_MODE (subtarget) != operand_mode
8959 || ! safe_from_p (subtarget, inner))
8962 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
8965 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
8966 size_int (bitnum), subtarget, ops_unsignedp);
8968 if (GET_MODE (op0) != mode)
8969 op0 = convert_to_mode (mode, op0, ops_unsignedp);
8971 if ((code == EQ && ! invert) || (code == NE && invert))
8972 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
8973 ops_unsignedp, OPTAB_LIB_WIDEN);
8975 /* Put the AND last so it can combine with more things. */
8976 if (bitnum != TYPE_PRECISION (type) - 1)
8977 op0 = expand_and (op0, const1_rtx, subtarget);
8982 /* Now see if we are likely to be able to do this. Return if not. */
8983 if (! can_compare_p (operand_mode))
8985 icode = setcc_gen_code[(int) code];
8986 if (icode == CODE_FOR_nothing
8987 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8989 /* We can only do this if it is one of the special cases that
8990 can be handled without an scc insn. */
8991 if ((code == LT && integer_zerop (arg1))
8992 || (! only_cheap && code == GE && integer_zerop (arg1)))
8994 else if (BRANCH_COST >= 0
8995 && ! only_cheap && (code == NE || code == EQ)
8996 && TREE_CODE (type) != REAL_TYPE
8997 && ((abs_optab->handlers[(int) operand_mode].insn_code
8998 != CODE_FOR_nothing)
8999 || (ffs_optab->handlers[(int) operand_mode].insn_code
9000 != CODE_FOR_nothing)))
9006 preexpand_calls (exp);
9007 if (subtarget == 0 || GET_CODE (subtarget) != REG
9008 || GET_MODE (subtarget) != operand_mode
9009 || ! safe_from_p (subtarget, arg1))
9012 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9013 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9016 target = gen_reg_rtx (mode);
9018 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9019 because, if the emit_store_flag does anything it will succeed and
9020 OP0 and OP1 will not be used subsequently. */
9022 result = emit_store_flag (target, code,
9023 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9024 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9025 operand_mode, unsignedp, 1);
9030 result = expand_binop (mode, xor_optab, result, const1_rtx,
9031 result, 0, OPTAB_LIB_WIDEN);
9035 /* If this failed, we have to do this with set/compare/jump/set code. */
9036 if (target == 0 || GET_CODE (target) != REG
9037 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9038 target = gen_reg_rtx (GET_MODE (target));
9040 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9041 result = compare_from_rtx (op0, op1, code, unsignedp,
9042 operand_mode, NULL_RTX, 0);
9043 if (GET_CODE (result) == CONST_INT)
9044 return (((result == const0_rtx && ! invert)
9045 || (result != const0_rtx && invert))
9046 ? const0_rtx : const1_rtx);
9048 label = gen_label_rtx ();
9049 if (bcc_gen_fctn[(int) code] == 0)
9052 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9053 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9059 /* Generate a tablejump instruction (used for switch statements). */
9061 #ifdef HAVE_tablejump
9063 /* INDEX is the value being switched on, with the lowest value
9064 in the table already subtracted.
9065 MODE is its expected mode (needed if INDEX is constant).
9066 RANGE is the length of the jump table.
9067 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9069 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9070 index value is out of range. */
9073 do_tablejump (index, mode, range, table_label, default_label)
9074 rtx index, range, table_label, default_label;
9075 enum machine_mode mode;
9077 register rtx temp, vector;
9079 /* Do an unsigned comparison (in the proper mode) between the index
9080 expression and the value which represents the length of the range.
9081 Since we just finished subtracting the lower bound of the range
9082 from the index expression, this comparison allows us to simultaneously
9083 check that the original index expression value is both greater than
9084 or equal to the minimum value of the range and less than or equal to
9085 the maximum value of the range. */
9087 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
9088 emit_jump_insn (gen_bltu (default_label));
9090 /* If index is in range, it must fit in Pmode.
9091 Convert to Pmode so we can index with it. */
9093 index = convert_to_mode (Pmode, index, 1);
9095 /* Don't let a MEM slip thru, because then INDEX that comes
9096 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9097 and break_out_memory_refs will go to work on it and mess it up. */
9098 #ifdef PIC_CASE_VECTOR_ADDRESS
9099 if (flag_pic && GET_CODE (index) != REG)
9100 index = copy_to_mode_reg (Pmode, index);
9103 /* If flag_force_addr were to affect this address
9104 it could interfere with the tricky assumptions made
9105 about addresses that contain label-refs,
9106 which may be valid only very near the tablejump itself. */
9107 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9108 GET_MODE_SIZE, because this indicates how large insns are. The other
9109 uses should all be Pmode, because they are addresses. This code
9110 could fail if addresses and insns are not the same size. */
9111 index = gen_rtx (PLUS, Pmode,
9112 gen_rtx (MULT, Pmode, index,
9113 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9114 gen_rtx (LABEL_REF, Pmode, table_label));
9115 #ifdef PIC_CASE_VECTOR_ADDRESS
9117 index = PIC_CASE_VECTOR_ADDRESS (index);
9120 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9121 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9122 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9123 RTX_UNCHANGING_P (vector) = 1;
9124 convert_move (temp, vector, 0);
9126 emit_jump_insn (gen_tablejump (temp, table_label));
9128 #ifndef CASE_VECTOR_PC_RELATIVE
9129 /* If we are generating PIC code or if the table is PC-relative, the
9130 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9136 #endif /* HAVE_tablejump */
9139 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9140 to that value is on the top of the stack. The resulting type is TYPE, and
9141 the source declaration is DECL. */
9144 bc_load_memory (type, decl)
9147 enum bytecode_opcode opcode;
9150 /* Bit fields are special. We only know about signed and
9151 unsigned ints, and enums. The latter are treated as
9154 if (DECL_BIT_FIELD (decl))
9155 if (TREE_CODE (type) == ENUMERAL_TYPE
9156 || TREE_CODE (type) == INTEGER_TYPE)
9157 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9161 /* See corresponding comment in bc_store_memory(). */
9162 if (TYPE_MODE (type) == BLKmode
9163 || TYPE_MODE (type) == VOIDmode)
9166 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9168 if (opcode == neverneverland)
9171 bc_emit_bytecode (opcode);
9173 #ifdef DEBUG_PRINT_CODE
9174 fputc ('\n', stderr);
9179 /* Store the contents of the second stack slot to the address in the
9180 top stack slot. DECL is the declaration of the destination and is used
9181 to determine whether we're dealing with a bitfield. */
9184 bc_store_memory (type, decl)
9187 enum bytecode_opcode opcode;
9190 if (DECL_BIT_FIELD (decl))
9192 if (TREE_CODE (type) == ENUMERAL_TYPE
9193 || TREE_CODE (type) == INTEGER_TYPE)
9199 if (TYPE_MODE (type) == BLKmode)
9201 /* Copy structure. This expands to a block copy instruction, storeBLK.
9202 In addition to the arguments expected by the other store instructions,
9203 it also expects a type size (SImode) on top of the stack, which is the
9204 structure size in size units (usually bytes). The two first arguments
9205 are already on the stack; so we just put the size on level 1. For some
9206 other languages, the size may be variable, this is why we don't encode
9207 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9209 bc_expand_expr (TYPE_SIZE (type));
9213 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9215 if (opcode == neverneverland)
9218 bc_emit_bytecode (opcode);
9220 #ifdef DEBUG_PRINT_CODE
9221 fputc ('\n', stderr);
9226 /* Allocate local stack space sufficient to hold a value of the given
9227 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9228 integral power of 2. A special case is locals of type VOID, which
9229 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9230 remapped into the corresponding attribute of SI. */
9233 bc_allocate_local (size, alignment)
9234 int size, alignment;
9242 /* Normalize size and alignment */
9244 size = UNITS_PER_WORD;
9246 if (alignment < BITS_PER_UNIT)
9247 byte_alignment = 1 << (INT_ALIGN - 1);
9250 byte_alignment = alignment / BITS_PER_UNIT;
9252 if (local_vars_size & (byte_alignment - 1))
9253 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9255 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9256 local_vars_size += size;
9262 /* Allocate variable-sized local array. Variable-sized arrays are
9263 actually pointers to the address in memory where they are stored. */
9266 bc_allocate_variable_array (size)
9270 const int ptralign = (1 << (PTR_ALIGN - 1));
9273 if (local_vars_size & ptralign)
9274 local_vars_size += ptralign - (local_vars_size & ptralign);
9276 /* Note down local space needed: pointer to block; also return
9279 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9280 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9285 /* Push the machine address for the given external variable offset. */
9287 bc_load_externaddr (externaddr)
9290 bc_emit_bytecode (constP);
9291 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9292 BYTECODE_BC_LABEL (externaddr)->offset);
9294 #ifdef DEBUG_PRINT_CODE
9295 fputc ('\n', stderr);
9304 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9310 /* Like above, but expects an IDENTIFIER. */
9312 bc_load_externaddr_id (id, offset)
9316 if (!IDENTIFIER_POINTER (id))
9319 bc_emit_bytecode (constP);
9320 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9322 #ifdef DEBUG_PRINT_CODE
9323 fputc ('\n', stderr);
9328 /* Push the machine address for the given local variable offset. */
9330 bc_load_localaddr (localaddr)
9333 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9337 /* Push the machine address for the given parameter offset.
9338 NOTE: offset is in bits. */
9340 bc_load_parmaddr (parmaddr)
9343 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9348 /* Convert a[i] into *(a + i). */
9350 bc_canonicalize_array_ref (exp)
9353 tree type = TREE_TYPE (exp);
9354 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9355 TREE_OPERAND (exp, 0));
9356 tree index = TREE_OPERAND (exp, 1);
9359 /* Convert the integer argument to a type the same size as a pointer
9360 so the multiply won't overflow spuriously. */
9362 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9363 index = convert (type_for_size (POINTER_SIZE, 0), index);
9365 /* The array address isn't volatile even if the array is.
9366 (Of course this isn't terribly relevant since the bytecode
9367 translator treats nearly everything as volatile anyway.) */
9368 TREE_THIS_VOLATILE (array_adr) = 0;
9370 return build1 (INDIRECT_REF, type,
9371 fold (build (PLUS_EXPR,
9372 TYPE_POINTER_TO (type),
9374 fold (build (MULT_EXPR,
9375 TYPE_POINTER_TO (type),
9377 size_in_bytes (type))))));
9381 /* Load the address of the component referenced by the given
9382 COMPONENT_REF expression.
9384 Returns innermost lvalue. */
9387 bc_expand_component_address (exp)
9391 enum machine_mode mode;
9393 HOST_WIDE_INT SIval;
9396 tem = TREE_OPERAND (exp, 1);
9397 mode = DECL_MODE (tem);
9400 /* Compute cumulative bit offset for nested component refs
9401 and array refs, and find the ultimate containing object. */
9403 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9405 if (TREE_CODE (tem) == COMPONENT_REF)
9406 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9408 if (TREE_CODE (tem) == ARRAY_REF
9409 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9412 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9413 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9414 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9419 bc_expand_expr (tem);
9422 /* For bitfields also push their offset and size */
9423 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9424 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9426 if (SIval = bitpos / BITS_PER_UNIT)
9427 bc_emit_instruction (addconstPSI, SIval);
9429 return (TREE_OPERAND (exp, 1));
9433 /* Emit code to push two SI constants */
9435 bc_push_offset_and_size (offset, size)
9436 HOST_WIDE_INT offset, size;
9438 bc_emit_instruction (constSI, offset);
9439 bc_emit_instruction (constSI, size);
9443 /* Emit byte code to push the address of the given lvalue expression to
9444 the stack. If it's a bit field, we also push offset and size info.
9446 Returns innermost component, which allows us to determine not only
9447 its type, but also whether it's a bitfield. */
9450 bc_expand_address (exp)
9454 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9458 switch (TREE_CODE (exp))
9462 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9466 return (bc_expand_component_address (exp));
9470 bc_expand_expr (TREE_OPERAND (exp, 0));
9472 /* For variable-sized types: retrieve pointer. Sometimes the
9473 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9474 also make sure we have an operand, just in case... */
9476 if (TREE_OPERAND (exp, 0)
9477 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9479 bc_emit_instruction (loadP);
9481 /* If packed, also return offset and size */
9482 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9484 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9485 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9487 return (TREE_OPERAND (exp, 0));
9491 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9492 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9497 bc_load_parmaddr (DECL_RTL (exp));
9499 /* For variable-sized types: retrieve pointer */
9500 if (TYPE_SIZE (TREE_TYPE (exp))
9501 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9502 bc_emit_instruction (loadP);
9504 /* If packed, also return offset and size */
9505 if (DECL_BIT_FIELD (exp))
9506 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9507 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9513 bc_emit_instruction (returnP);
9519 if (BYTECODE_LABEL (DECL_RTL (exp)))
9520 bc_load_externaddr (DECL_RTL (exp));
9523 if (DECL_EXTERNAL (exp))
9524 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9525 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9527 bc_load_localaddr (DECL_RTL (exp));
9529 /* For variable-sized types: retrieve pointer */
9530 if (TYPE_SIZE (TREE_TYPE (exp))
9531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9532 bc_emit_instruction (loadP);
9534 /* If packed, also return offset and size */
9535 if (DECL_BIT_FIELD (exp))
9536 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9537 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9545 bc_emit_bytecode (constP);
9546 r = output_constant_def (exp);
9547 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9549 #ifdef DEBUG_PRINT_CODE
9550 fputc ('\n', stderr);
9561 /* Most lvalues don't have components. */
9566 /* Emit a type code to be used by the runtime support in handling
9567 parameter passing. The type code consists of the machine mode
9568 plus the minimal alignment shifted left 8 bits. */
9571 bc_runtime_type_code (type)
9576 switch (TREE_CODE (type))
9586 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9598 return build_int_2 (val, 0);
9602 /* Generate constructor label */
9604 bc_gen_constr_label ()
9606 static int label_counter;
9607 static char label[20];
9609 sprintf (label, "*LR%d", label_counter++);
9611 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9615 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9616 expand the constructor data as static data, and push a pointer to it.
9617 The pointer is put in the pointer table and is retrieved by a constP
9618 bytecode instruction. We then loop and store each constructor member in
9619 the corresponding component. Finally, we return the original pointer on
9623 bc_expand_constructor (constr)
9627 HOST_WIDE_INT ptroffs;
9631 /* Literal constructors are handled as constants, whereas
9632 non-literals are evaluated and stored element by element
9633 into the data segment. */
9635 /* Allocate space in proper segment and push pointer to space on stack.
9638 l = bc_gen_constr_label ();
9640 if (TREE_CONSTANT (constr))
9644 bc_emit_const_labeldef (l);
9645 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9651 bc_emit_data_labeldef (l);
9652 bc_output_data_constructor (constr);
9656 /* Add reference to pointer table and recall pointer to stack;
9657 this code is common for both types of constructors: literals
9658 and non-literals. */
9660 ptroffs = bc_define_pointer (l);
9661 bc_emit_instruction (constP, ptroffs);
9663 /* This is all that has to be done if it's a literal. */
9664 if (TREE_CONSTANT (constr))
9668 /* At this point, we have the pointer to the structure on top of the stack.
9669 Generate sequences of store_memory calls for the constructor. */
9671 /* constructor type is structure */
9672 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9676 /* If the constructor has fewer fields than the structure,
9677 clear the whole structure first. */
9679 if (list_length (CONSTRUCTOR_ELTS (constr))
9680 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9682 bc_emit_instruction (duplicate);
9683 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9684 bc_emit_instruction (clearBLK);
9687 /* Store each element of the constructor into the corresponding
9690 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9692 register tree field = TREE_PURPOSE (elt);
9693 register enum machine_mode mode;
9698 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9699 mode = DECL_MODE (field);
9700 unsignedp = TREE_UNSIGNED (field);
9702 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9704 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9705 /* The alignment of TARGET is
9706 at least what its type requires. */
9708 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9709 int_size_in_bytes (TREE_TYPE (constr)));
9714 /* Constructor type is array */
9715 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9719 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9720 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9721 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9722 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9724 /* If the constructor has fewer fields than the structure,
9725 clear the whole structure first. */
9727 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9729 bc_emit_instruction (duplicate);
9730 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9731 bc_emit_instruction (clearBLK);
9735 /* Store each element of the constructor into the corresponding
9736 element of TARGET, determined by counting the elements. */
9738 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9740 elt = TREE_CHAIN (elt), i++)
9742 register enum machine_mode mode;
9747 mode = TYPE_MODE (elttype);
9748 bitsize = GET_MODE_BITSIZE (mode);
9749 unsignedp = TREE_UNSIGNED (elttype);
9751 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9752 /* * TYPE_SIZE_UNIT (elttype) */ );
9754 bc_store_field (elt, bitsize, bitpos, mode,
9755 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9756 /* The alignment of TARGET is
9757 at least what its type requires. */
9759 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9760 int_size_in_bytes (TREE_TYPE (constr)));
9767 /* Store the value of EXP (an expression tree) into member FIELD of
9768 structure at address on stack, which has type TYPE, mode MODE and
9769 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9772 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9773 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9776 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9777 value_mode, unsignedp, align, total_size)
9778 int bitsize, bitpos;
9779 enum machine_mode mode;
9780 tree field, exp, type;
9781 enum machine_mode value_mode;
9787 /* Expand expression and copy pointer */
9788 bc_expand_expr (exp);
9789 bc_emit_instruction (over);
9792 /* If the component is a bit field, we cannot use addressing to access
9793 it. Use bit-field techniques to store in it. */
9795 if (DECL_BIT_FIELD (field))
9797 bc_store_bit_field (bitpos, bitsize, unsignedp);
9803 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9805 /* Advance pointer to the desired member */
9807 bc_emit_instruction (addconstPSI, offset);
9810 bc_store_memory (type, field);
9815 /* Store SI/SU in bitfield */
9817 bc_store_bit_field (offset, size, unsignedp)
9818 int offset, size, unsignedp;
9820 /* Push bitfield offset and size */
9821 bc_push_offset_and_size (offset, size);
9824 bc_emit_instruction (sstoreBI);
9828 /* Load SI/SU from bitfield */
9830 bc_load_bit_field (offset, size, unsignedp)
9831 int offset, size, unsignedp;
9833 /* Push bitfield offset and size */
9834 bc_push_offset_and_size (offset, size);
9836 /* Load: sign-extend if signed, else zero-extend */
9837 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9841 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9842 (adjust stack pointer upwards), negative means add that number of
9843 levels (adjust the stack pointer downwards). Only positive values
9844 normally make sense. */
9847 bc_adjust_stack (nlevels)
9856 bc_emit_instruction (drop);
9859 bc_emit_instruction (drop);
9864 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9865 stack_depth -= nlevels;
9868 #if defined (VALIDATE_STACK_FOR_BC)
9869 VALIDATE_STACK_FOR_BC ();