1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #include "insn-flags.h"
29 #include "insn-codes.h"
31 #include "insn-config.h"
34 #include "typeclass.h"
37 #include "bc-opcode.h"
38 #include "bc-typecd.h"
43 #define CEIL(x,y) (((x) + (y) - 1) / (y))
45 /* Decide whether a function's arguments should be processed
46 from first to last or from last to first.
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED /* If it's last to first */
59 #ifndef STACK_PUSH_CODE
60 #ifdef STACK_GROWS_DOWNWARD
61 #define STACK_PUSH_CODE PRE_DEC
63 #define STACK_PUSH_CODE PRE_INC
67 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70 /* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
78 /* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81 int do_preexpand_calls = 1;
83 /* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85 int pending_stack_adjust;
87 /* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91 int inhibit_defer_pop;
93 /* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95 tree cleanups_this_call;
97 /* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
100 static rtx saveregs_value;
102 /* Similarly for __builtin_apply_args. */
103 static rtx apply_args_value;
105 /* This structure is used by move_by_pieces to describe the move to
108 struct move_by_pieces
117 int explicit_inc_from;
123 /* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
127 extern int local_vars_size;
128 extern int stack_depth;
129 extern int max_stack_depth;
130 extern struct obstack permanent_obstack;
133 static rtx enqueue_insn PROTO((rtx, rtx));
134 static int queued_subexp_p PROTO((rtx));
135 static void init_queue PROTO((void));
136 static void move_by_pieces PROTO((rtx, rtx, int, int));
137 static int move_by_pieces_ninsns PROTO((unsigned int, int));
138 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
140 static void store_constructor PROTO((tree, rtx));
141 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
142 enum machine_mode, int, int, int));
143 static tree save_noncopied_parts PROTO((tree, tree));
144 static tree init_noncopied_parts PROTO((tree, tree));
145 static int safe_from_p PROTO((rtx, tree));
146 static int fixed_type_p PROTO((tree));
147 static int get_pointer_alignment PROTO((tree, unsigned));
148 static tree string_constant PROTO((tree, tree *));
149 static tree c_strlen PROTO((tree));
150 static rtx expand_builtin PROTO((tree, rtx, rtx,
151 enum machine_mode, int));
152 static int apply_args_size PROTO((void));
153 static int apply_result_size PROTO((void));
154 static rtx result_vector PROTO((int, rtx));
155 static rtx expand_builtin_apply_args PROTO((void));
156 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157 static void expand_builtin_return PROTO((rtx));
158 static rtx expand_increment PROTO((tree, int));
159 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160 tree bc_runtime_type_code PROTO((tree));
161 rtx bc_allocate_local PROTO((int, int));
162 void bc_store_memory PROTO((tree, tree));
163 tree bc_expand_component_address PROTO((tree));
164 tree bc_expand_address PROTO((tree));
165 void bc_expand_constructor PROTO((tree));
166 void bc_adjust_stack PROTO((int));
167 tree bc_canonicalize_array_ref PROTO((tree));
168 void bc_load_memory PROTO((tree, tree));
169 void bc_load_externaddr PROTO((rtx));
170 void bc_load_externaddr_id PROTO((tree, int));
171 void bc_load_localaddr PROTO((rtx));
172 void bc_load_parmaddr PROTO((rtx));
173 static void preexpand_calls PROTO((tree));
174 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
175 static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
176 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
182 /* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
186 static char direct_load[NUM_MACHINE_MODES];
187 static char direct_store[NUM_MACHINE_MODES];
189 /* MOVE_RATIO is the number of move instructions that is better than
193 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
196 /* A value of around 6 would minimize code size; infinity would minimize
198 #define MOVE_RATIO 15
202 /* This array records the insn_code of insns to perform block moves. */
203 enum insn_code movstr_optab[NUM_MACHINE_MODES];
205 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
207 #ifndef SLOW_UNALIGNED_ACCESS
208 #define SLOW_UNALIGNED_ACCESS 0
211 /* Register mappings for target machines without register windows. */
212 #ifndef INCOMING_REGNO
213 #define INCOMING_REGNO(OUT) (OUT)
215 #ifndef OUTGOING_REGNO
216 #define OUTGOING_REGNO(IN) (IN)
219 /* Maps used to convert modes to const, load, and store bytecodes. */
220 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
224 /* Initialize maps used to convert modes to const, load, and store
227 bc_init_mode_to_opcode_maps ()
231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
236 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
241 #include "modemap.def"
245 /* This is run once per compilation to set up which modes can be used
246 directly in memory and to initialize the block move optab. */
252 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 if (! HARD_REGNO_MODE_OK (regno, mode))
285 reg = gen_rtx (REG, mode, regno);
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
312 /* This is run at the start of compiling a function. */
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
323 apply_args_value = 0;
327 /* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
334 /* Instead of saving the postincrement queue, empty it. */
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
341 p->apply_args_value = apply_args_value;
342 p->forced_labels = forced_labels;
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
348 apply_args_value = 0;
352 /* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
356 restore_expr_status (p)
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
363 apply_args_value = p->apply_args_value;
364 forced_labels = p->forced_labels;
367 /* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
370 static rtx pending_chain;
372 /* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
380 enqueue_insn (var, body)
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
384 var, NULL_RTX, NULL_RTX, body, pending_chain);
385 return pending_chain;
388 /* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
404 protect_from_queue (x, modify)
408 register RTX_CODE code = GET_CODE (x);
410 #if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 register rtx y = XEXP (x, 0);
427 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
429 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
430 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
431 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
453 else if (code == PLUS || code == MULT)
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
478 return QUEUED_COPY (x);
481 /* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
490 register enum rtx_code code = GET_CODE (x);
496 return queued_subexp_p (XEXP (x, 0));
500 return queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1));
506 /* Perform all the pending incrementations. */
512 while (p = pending_chain)
514 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
515 pending_chain = QUEUED_NEXT (p);
526 /* Copy data from FROM to TO, where the machine modes are not the same.
527 Both modes may be integer, or both may be floating.
528 UNSIGNEDP should be nonzero if FROM is an unsigned type.
529 This causes zero-extension instead of sign-extension. */
532 convert_move (to, from, unsignedp)
533 register rtx to, from;
536 enum machine_mode to_mode = GET_MODE (to);
537 enum machine_mode from_mode = GET_MODE (from);
538 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
539 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
543 /* rtx code for making an equivalent value. */
544 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
546 to = protect_from_queue (to, 1);
547 from = protect_from_queue (from, 0);
549 if (to_real != from_real)
552 /* If FROM is a SUBREG that indicates that we have already done at least
553 the required extension, strip it. We don't handle such SUBREGs as
556 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
557 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
558 >= GET_MODE_SIZE (to_mode))
559 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
560 from = gen_lowpart (to_mode, from), from_mode = to_mode;
562 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
565 if (to_mode == from_mode
566 || (from_mode == VOIDmode && CONSTANT_P (from)))
568 emit_move_insn (to, from);
576 #ifdef HAVE_extendqfhf2
577 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
579 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
583 #ifdef HAVE_extendqfsf2
584 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
586 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
590 #ifdef HAVE_extendqfdf2
591 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
593 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
597 #ifdef HAVE_extendqfxf2
598 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
600 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
604 #ifdef HAVE_extendqftf2
605 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
607 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
612 #ifdef HAVE_extendhfsf2
613 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
615 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
619 #ifdef HAVE_extendhfdf2
620 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
622 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
626 #ifdef HAVE_extendhfxf2
627 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
629 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
633 #ifdef HAVE_extendhftf2
634 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
636 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
641 #ifdef HAVE_extendsfdf2
642 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
644 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
648 #ifdef HAVE_extendsfxf2
649 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
651 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
655 #ifdef HAVE_extendsftf2
656 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
658 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
662 #ifdef HAVE_extenddfxf2
663 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
665 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
669 #ifdef HAVE_extenddftf2
670 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
672 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
677 #ifdef HAVE_trunchfqf2
678 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
680 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
684 #ifdef HAVE_truncsfqf2
685 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
687 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
691 #ifdef HAVE_truncdfqf2
692 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
694 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
698 #ifdef HAVE_truncxfqf2
699 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
701 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
705 #ifdef HAVE_trunctfqf2
706 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
708 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncsfhf2
713 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
715 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
719 #ifdef HAVE_truncdfhf2
720 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
722 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncxfhf2
727 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
729 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
733 #ifdef HAVE_trunctfhf2
734 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
736 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
740 #ifdef HAVE_truncdfsf2
741 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfsf2
748 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
750 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfsf2
755 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
757 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
761 #ifdef HAVE_truncxfdf2
762 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
764 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
768 #ifdef HAVE_trunctfdf2
769 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
771 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
783 libcall = extendsfdf2_libfunc;
787 libcall = extendsfxf2_libfunc;
791 libcall = extendsftf2_libfunc;
800 libcall = truncdfsf2_libfunc;
804 libcall = extenddfxf2_libfunc;
808 libcall = extenddftf2_libfunc;
817 libcall = truncxfsf2_libfunc;
821 libcall = truncxfdf2_libfunc;
830 libcall = trunctfsf2_libfunc;
834 libcall = trunctfdf2_libfunc;
840 if (libcall == (rtx) 0)
841 /* This conversion is not implemented yet. */
844 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
846 emit_move_insn (to, value);
850 /* Now both modes are integers. */
852 /* Handle expanding beyond a word. */
853 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
854 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
861 enum machine_mode lowpart_mode;
862 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
864 /* Try converting directly if the insn is supported. */
865 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
868 /* If FROM is a SUBREG, put it into a register. Do this
869 so that we always generate the same set of insns for
870 better cse'ing; if an intermediate assignment occurred,
871 we won't be doing the operation directly on the SUBREG. */
872 if (optimize > 0 && GET_CODE (from) == SUBREG)
873 from = force_reg (from_mode, from);
874 emit_unop_insn (code, to, from, equiv_code);
877 /* Next, try converting via full word. */
878 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
879 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
880 != CODE_FOR_nothing))
882 if (GET_CODE (to) == REG)
883 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
884 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
885 emit_unop_insn (code, to,
886 gen_lowpart (word_mode, to), equiv_code);
890 /* No special multiword conversion insn; do it by hand. */
893 /* Get a copy of FROM widened to a word, if necessary. */
894 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
895 lowpart_mode = word_mode;
897 lowpart_mode = from_mode;
899 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
901 lowpart = gen_lowpart (lowpart_mode, to);
902 emit_move_insn (lowpart, lowfrom);
904 /* Compute the value to put in each remaining word. */
906 fill_value = const0_rtx;
911 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
912 && STORE_FLAG_VALUE == -1)
914 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
916 fill_value = gen_reg_rtx (word_mode);
917 emit_insn (gen_slt (fill_value));
923 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
924 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
926 fill_value = convert_to_mode (word_mode, fill_value, 1);
930 /* Fill the remaining words. */
931 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
933 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
934 rtx subword = operand_subword (to, index, 1, to_mode);
939 if (fill_value != subword)
940 emit_move_insn (subword, fill_value);
943 insns = get_insns ();
946 emit_no_conflict_block (insns, to, from, NULL_RTX,
947 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
951 /* Truncating multi-word to a word or less. */
952 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
953 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
955 if (!((GET_CODE (from) == MEM
956 && ! MEM_VOLATILE_P (from)
957 && direct_load[(int) to_mode]
958 && ! mode_dependent_address_p (XEXP (from, 0)))
959 || GET_CODE (from) == REG
960 || GET_CODE (from) == SUBREG))
961 from = force_reg (from_mode, from);
962 convert_move (to, gen_lowpart (word_mode, from), 0);
966 /* Handle pointer conversion */ /* SPEE 900220 */
967 if (to_mode == PSImode)
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 #endif /* HAVE_truncsipsi2 */
982 if (from_mode == PSImode)
984 if (to_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
991 #ifdef HAVE_extendpsisi2
992 if (HAVE_extendpsisi2)
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 #endif /* HAVE_extendpsisi2 */
1002 /* Now follow all the conversions between integers
1003 no more than a word long. */
1005 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1006 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1007 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1008 GET_MODE_BITSIZE (from_mode)))
1010 if (!((GET_CODE (from) == MEM
1011 && ! MEM_VOLATILE_P (from)
1012 && direct_load[(int) to_mode]
1013 && ! mode_dependent_address_p (XEXP (from, 0)))
1014 || GET_CODE (from) == REG
1015 || GET_CODE (from) == SUBREG))
1016 from = force_reg (from_mode, from);
1017 emit_move_insn (to, gen_lowpart (to_mode, from));
1021 /* Handle extension. */
1022 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1024 /* Convert directly if that works. */
1025 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1026 != CODE_FOR_nothing)
1028 /* If FROM is a SUBREG, put it into a register. Do this
1029 so that we always generate the same set of insns for
1030 better cse'ing; if an intermediate assignment occurred,
1031 we won't be doing the operation directly on the SUBREG. */
1032 if (optimize > 0 && GET_CODE (from) == SUBREG)
1033 from = force_reg (from_mode, from);
1034 emit_unop_insn (code, to, from, equiv_code);
1039 enum machine_mode intermediate;
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if ((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 && (can_extend_p (intermediate, from_mode, unsignedp)
1047 != CODE_FOR_nothing))
1049 convert_move (to, convert_to_mode (intermediate, from,
1050 unsignedp), unsignedp);
1054 /* No suitable intermediate mode. */
1059 /* Support special truncate insns for certain modes. */
1061 if (from_mode == DImode && to_mode == SImode)
1063 #ifdef HAVE_truncdisi2
1064 if (HAVE_truncdisi2)
1066 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1070 convert_move (to, force_reg (from_mode, from), unsignedp);
1074 if (from_mode == DImode && to_mode == HImode)
1076 #ifdef HAVE_truncdihi2
1077 if (HAVE_truncdihi2)
1079 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1087 if (from_mode == DImode && to_mode == QImode)
1089 #ifdef HAVE_truncdiqi2
1090 if (HAVE_truncdiqi2)
1092 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1100 if (from_mode == SImode && to_mode == HImode)
1102 #ifdef HAVE_truncsihi2
1103 if (HAVE_truncsihi2)
1105 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1113 if (from_mode == SImode && to_mode == QImode)
1115 #ifdef HAVE_truncsiqi2
1116 if (HAVE_truncsiqi2)
1118 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1126 if (from_mode == HImode && to_mode == QImode)
1128 #ifdef HAVE_trunchiqi2
1129 if (HAVE_trunchiqi2)
1131 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1139 /* Handle truncation of volatile memrefs, and so on;
1140 the things that couldn't be truncated directly,
1141 and for which there was no special instruction. */
1142 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1144 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1145 emit_move_insn (to, temp);
1149 /* Mode combination is not recognized. */
1153 /* Return an rtx for a value that would result
1154 from converting X to mode MODE.
1155 Both X and MODE may be floating, or both integer.
1156 UNSIGNEDP is nonzero if X is an unsigned value.
1157 This can be done by referring to a part of X in place
1158 or by copying to a new temporary with conversion.
1160 This function *must not* call protect_from_queue
1161 except when putting X into an insn (in which case convert_move does it). */
1164 convert_to_mode (mode, x, unsignedp)
1165 enum machine_mode mode;
1169 return convert_modes (mode, VOIDmode, x, unsignedp);
1172 /* Return an rtx for a value that would result
1173 from converting X from mode OLDMODE to mode MODE.
1174 Both modes may be floating, or both integer.
1175 UNSIGNEDP is nonzero if X is an unsigned value.
1177 This can be done by referring to a part of X in place
1178 or by copying to a new temporary with conversion.
1180 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1182 This function *must not* call protect_from_queue
1183 except when putting X into an insn (in which case convert_move does it). */
1186 convert_modes (mode, oldmode, x, unsignedp)
1187 enum machine_mode mode, oldmode;
1193 /* If FROM is a SUBREG that indicates that we have already done at least
1194 the required extension, strip it. */
1196 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1197 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1198 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1199 x = gen_lowpart (mode, x);
1201 if (GET_MODE (x) != VOIDmode)
1202 oldmode = GET_MODE (x);
1204 if (mode == oldmode)
1207 /* There is one case that we must handle specially: If we are converting
1208 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1209 we are to interpret the constant as unsigned, gen_lowpart will do
1210 the wrong if the constant appears negative. What we want to do is
1211 make the high-order word of the constant zero, not all ones. */
1213 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1214 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1215 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1216 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1218 /* We can do this with a gen_lowpart if both desired and current modes
1219 are integer, and this is either a constant integer, a register, or a
1220 non-volatile MEM. Except for the constant case where MODE is no
1221 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1223 if ((GET_CODE (x) == CONST_INT
1224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1225 || (GET_MODE_CLASS (mode) == MODE_INT
1226 && GET_MODE_CLASS (oldmode) == MODE_INT
1227 && (GET_CODE (x) == CONST_DOUBLE
1228 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1229 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1230 && direct_load[(int) mode])
1231 || (GET_CODE (x) == REG
1232 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1233 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1235 /* ?? If we don't know OLDMODE, we have to assume here that
1236 X does not need sign- or zero-extension. This may not be
1237 the case, but it's the best we can do. */
1238 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1239 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1241 HOST_WIDE_INT val = INTVAL (x);
1242 int width = GET_MODE_BITSIZE (oldmode);
1244 /* We must sign or zero-extend in this case. Start by
1245 zero-extending, then sign extend if we need to. */
1246 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1248 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1249 val |= (HOST_WIDE_INT) (-1) << width;
1251 return GEN_INT (val);
1254 return gen_lowpart (mode, x);
1257 temp = gen_reg_rtx (mode);
1258 convert_move (temp, x, unsignedp);
1262 /* Generate several move instructions to copy LEN bytes
1263 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1264 The caller must pass FROM and TO
1265 through protect_from_queue before calling.
1266 ALIGN (in bytes) is maximum alignment we can assume. */
1269 move_by_pieces (to, from, len, align)
1273 struct move_by_pieces data;
1274 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1275 int max_size = MOVE_MAX + 1;
1278 data.to_addr = to_addr;
1279 data.from_addr = from_addr;
1283 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1284 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1286 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1287 || GET_CODE (from_addr) == POST_INC
1288 || GET_CODE (from_addr) == POST_DEC);
1290 data.explicit_inc_from = 0;
1291 data.explicit_inc_to = 0;
1293 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1294 if (data.reverse) data.offset = len;
1297 /* If copying requires more than two move insns,
1298 copy addresses to registers (to make displacements shorter)
1299 and use post-increment if available. */
1300 if (!(data.autinc_from && data.autinc_to)
1301 && move_by_pieces_ninsns (len, align) > 2)
1303 #ifdef HAVE_PRE_DECREMENT
1304 if (data.reverse && ! data.autinc_from)
1306 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1307 data.autinc_from = 1;
1308 data.explicit_inc_from = -1;
1311 #ifdef HAVE_POST_INCREMENT
1312 if (! data.autinc_from)
1314 data.from_addr = copy_addr_to_reg (from_addr);
1315 data.autinc_from = 1;
1316 data.explicit_inc_from = 1;
1319 if (!data.autinc_from && CONSTANT_P (from_addr))
1320 data.from_addr = copy_addr_to_reg (from_addr);
1321 #ifdef HAVE_PRE_DECREMENT
1322 if (data.reverse && ! data.autinc_to)
1324 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1326 data.explicit_inc_to = -1;
1329 #ifdef HAVE_POST_INCREMENT
1330 if (! data.reverse && ! data.autinc_to)
1332 data.to_addr = copy_addr_to_reg (to_addr);
1334 data.explicit_inc_to = 1;
1337 if (!data.autinc_to && CONSTANT_P (to_addr))
1338 data.to_addr = copy_addr_to_reg (to_addr);
1341 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1342 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1345 /* First move what we can in the largest integer mode, then go to
1346 successively smaller modes. */
1348 while (max_size > 1)
1350 enum machine_mode mode = VOIDmode, tmode;
1351 enum insn_code icode;
1353 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1354 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1355 if (GET_MODE_SIZE (tmode) < max_size)
1358 if (mode == VOIDmode)
1361 icode = mov_optab->handlers[(int) mode].insn_code;
1362 if (icode != CODE_FOR_nothing
1363 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1364 GET_MODE_SIZE (mode)))
1365 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1367 max_size = GET_MODE_SIZE (mode);
1370 /* The code above should have handled everything. */
1375 /* Return number of insns required to move L bytes by pieces.
1376 ALIGN (in bytes) is maximum alignment we can assume. */
1379 move_by_pieces_ninsns (l, align)
1383 register int n_insns = 0;
1384 int max_size = MOVE_MAX + 1;
1386 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1387 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1390 while (max_size > 1)
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1395 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1396 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1397 if (GET_MODE_SIZE (tmode) < max_size)
1400 if (mode == VOIDmode)
1403 icode = mov_optab->handlers[(int) mode].insn_code;
1404 if (icode != CODE_FOR_nothing
1405 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1406 GET_MODE_SIZE (mode)))
1407 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1409 max_size = GET_MODE_SIZE (mode);
1415 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1416 with move instructions for mode MODE. GENFUN is the gen_... function
1417 to make a move insn for that mode. DATA has all the other info. */
1420 move_by_pieces_1 (genfun, mode, data)
1422 enum machine_mode mode;
1423 struct move_by_pieces *data;
1425 register int size = GET_MODE_SIZE (mode);
1426 register rtx to1, from1;
1428 while (data->len >= size)
1430 if (data->reverse) data->offset -= size;
1432 to1 = (data->autinc_to
1433 ? gen_rtx (MEM, mode, data->to_addr)
1434 : change_address (data->to, mode,
1435 plus_constant (data->to_addr, data->offset)));
1438 ? gen_rtx (MEM, mode, data->from_addr)
1439 : change_address (data->from, mode,
1440 plus_constant (data->from_addr, data->offset)));
1442 #ifdef HAVE_PRE_DECREMENT
1443 if (data->explicit_inc_to < 0)
1444 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1445 if (data->explicit_inc_from < 0)
1446 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1449 emit_insn ((*genfun) (to1, from1));
1450 #ifdef HAVE_POST_INCREMENT
1451 if (data->explicit_inc_to > 0)
1452 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1453 if (data->explicit_inc_from > 0)
1454 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1457 if (! data->reverse) data->offset += size;
1463 /* Emit code to move a block Y to a block X.
1464 This may be done with string-move instructions,
1465 with multiple scalar move instructions, or with a library call.
1467 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1469 SIZE is an rtx that says how long they are.
1470 ALIGN is the maximum alignment we can assume they have,
1471 measured in bytes. */
1474 emit_block_move (x, y, size, align)
1479 if (GET_MODE (x) != BLKmode)
1482 if (GET_MODE (y) != BLKmode)
1485 x = protect_from_queue (x, 1);
1486 y = protect_from_queue (y, 0);
1487 size = protect_from_queue (size, 0);
1489 if (GET_CODE (x) != MEM)
1491 if (GET_CODE (y) != MEM)
1496 if (GET_CODE (size) == CONST_INT
1497 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1498 move_by_pieces (x, y, INTVAL (size), align);
1501 /* Try the most limited insn first, because there's no point
1502 including more than one in the machine description unless
1503 the more limited one has some advantage. */
1505 rtx opalign = GEN_INT (align);
1506 enum machine_mode mode;
1508 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1509 mode = GET_MODE_WIDER_MODE (mode))
1511 enum insn_code code = movstr_optab[(int) mode];
1513 if (code != CODE_FOR_nothing
1514 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1515 here because if SIZE is less than the mode mask, as it is
1516 returned by the macro, it will definitely be less than the
1517 actual mode mask. */
1518 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1519 && (insn_operand_predicate[(int) code][0] == 0
1520 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1521 && (insn_operand_predicate[(int) code][1] == 0
1522 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1523 && (insn_operand_predicate[(int) code][3] == 0
1524 || (*insn_operand_predicate[(int) code][3]) (opalign,
1528 rtx last = get_last_insn ();
1531 op2 = convert_to_mode (mode, size, 1);
1532 if (insn_operand_predicate[(int) code][2] != 0
1533 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1534 op2 = copy_to_mode_reg (mode, op2);
1536 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1543 delete_insns_since (last);
1547 #ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memcpy_libfunc, 0,
1549 VOIDmode, 3, XEXP (x, 0), Pmode,
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
1555 emit_library_call (bcopy_libfunc, 0,
1556 VOIDmode, 3, XEXP (y, 0), Pmode,
1558 convert_to_mode (TYPE_MODE (sizetype), size,
1559 TREE_UNSIGNED (sizetype)),
1560 TYPE_MODE (sizetype));
1565 /* Copy all or part of a value X into registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1569 move_block_to_reg (regno, x, nregs, mode)
1573 enum machine_mode mode;
1578 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1579 x = validize_mem (force_const_mem (mode, x));
1581 /* See if the machine can do this with a load multiple insn. */
1582 #ifdef HAVE_load_multiple
1583 if (HAVE_load_multiple)
1585 last = get_last_insn ();
1586 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1594 delete_insns_since (last);
1598 for (i = 0; i < nregs; i++)
1599 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1600 operand_subword_force (x, i, mode));
1603 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1604 The number of registers to be filled is NREGS. SIZE indicates the number
1605 of bytes in the object X. */
1609 move_block_from_reg (regno, x, nregs, size)
1618 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1619 to the left before storing to memory. */
1620 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1622 rtx tem = operand_subword (x, 0, 1, BLKmode);
1628 shift = expand_shift (LSHIFT_EXPR, word_mode,
1629 gen_rtx (REG, word_mode, regno),
1630 build_int_2 ((UNITS_PER_WORD - size)
1631 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1632 emit_move_insn (tem, shift);
1636 /* See if the machine can do this with a store multiple insn. */
1637 #ifdef HAVE_store_multiple
1638 if (HAVE_store_multiple)
1640 last = get_last_insn ();
1641 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1649 delete_insns_since (last);
1653 for (i = 0; i < nregs; i++)
1655 rtx tem = operand_subword (x, i, 1, BLKmode);
1660 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1664 /* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1667 use_regs (regno, nregs)
1673 for (i = 0; i < nregs; i++)
1674 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1677 /* Write zeros through the storage of OBJECT.
1678 If OBJECT has BLKmode, SIZE is its length in bytes. */
1681 clear_storage (object, size)
1685 if (GET_MODE (object) == BLKmode)
1687 #ifdef TARGET_MEM_FUNCTIONS
1688 emit_library_call (memset_libfunc, 0,
1690 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1691 GEN_INT (size), Pmode);
1693 emit_library_call (bzero_libfunc, 0,
1695 XEXP (object, 0), Pmode,
1696 GEN_INT (size), Pmode);
1700 emit_move_insn (object, const0_rtx);
1703 /* Generate code to copy Y into X.
1704 Both Y and X must have the same mode, except that
1705 Y can be a constant with VOIDmode.
1706 This mode cannot be BLKmode; use emit_block_move for that.
1708 Return the last instruction emitted. */
1711 emit_move_insn (x, y)
1714 enum machine_mode mode = GET_MODE (x);
1716 x = protect_from_queue (x, 1);
1717 y = protect_from_queue (y, 0);
1719 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1722 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1723 y = force_const_mem (mode, y);
1725 /* If X or Y are memory references, verify that their addresses are valid
1727 if (GET_CODE (x) == MEM
1728 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1729 && ! push_operand (x, GET_MODE (x)))
1731 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1732 x = change_address (x, VOIDmode, XEXP (x, 0));
1734 if (GET_CODE (y) == MEM
1735 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1737 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1738 y = change_address (y, VOIDmode, XEXP (y, 0));
1740 if (mode == BLKmode)
1743 return emit_move_insn_1 (x, y);
1746 /* Low level part of emit_move_insn.
1747 Called just like emit_move_insn, but assumes X and Y
1748 are basically valid. */
1751 emit_move_insn_1 (x, y)
1754 enum machine_mode mode = GET_MODE (x);
1755 enum machine_mode submode;
1756 enum mode_class class = GET_MODE_CLASS (mode);
1759 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1761 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1763 /* Expand complex moves by moving real part and imag part, if possible. */
1764 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1765 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1767 (class == MODE_COMPLEX_INT
1768 ? MODE_INT : MODE_FLOAT),
1770 && (mov_optab->handlers[(int) submode].insn_code
1771 != CODE_FOR_nothing))
1773 /* Don't split destination if it is a stack push. */
1774 int stack = push_operand (x, GET_MODE (x));
1779 /* If this is a stack, push the highpart first, so it
1780 will be in the argument order.
1782 In that case, change_address is used only to convert
1783 the mode, not to change the address. */
1786 /* Note that the real part always precedes the imag part in memory
1787 regardless of machine's endianness. */
1788 #ifdef STACK_GROWS_DOWNWARD
1789 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1790 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1791 gen_imagpart (submode, y)));
1792 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1793 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1794 gen_realpart (submode, y)));
1796 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1797 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1798 gen_realpart (submode, y)));
1799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1800 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1801 gen_imagpart (submode, y)));
1806 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1807 (gen_realpart (submode, x), gen_realpart (submode, y)));
1808 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1809 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1812 insns = get_insns ();
1815 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1816 each with a separate pseudo as destination.
1817 It's not correct for flow to treat them as a unit. */
1818 if (GET_CODE (x) != CONCAT)
1819 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1823 return get_last_insn ();
1826 /* This will handle any multi-word mode that lacks a move_insn pattern.
1827 However, you will get better code if you define such patterns,
1828 even if they must turn into multiple assembler instructions. */
1829 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1837 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1840 rtx xpart = operand_subword (x, i, 1, mode);
1841 rtx ypart = operand_subword (y, i, 1, mode);
1843 /* If we can't get a part of Y, put Y into memory if it is a
1844 constant. Otherwise, force it into a register. If we still
1845 can't get a part of Y, abort. */
1846 if (ypart == 0 && CONSTANT_P (y))
1848 y = force_const_mem (mode, y);
1849 ypart = operand_subword (y, i, 1, mode);
1851 else if (ypart == 0)
1852 ypart = operand_subword_force (y, i, mode);
1854 if (xpart == 0 || ypart == 0)
1857 last_insn = emit_move_insn (xpart, ypart);
1860 insns = get_insns ();
1862 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1870 /* Pushing data onto the stack. */
1872 /* Push a block of length SIZE (perhaps variable)
1873 and return an rtx to address the beginning of the block.
1874 Note that it is not possible for the value returned to be a QUEUED.
1875 The value may be virtual_outgoing_args_rtx.
1877 EXTRA is the number of bytes of padding to push in addition to SIZE.
1878 BELOW nonzero means this padding comes at low addresses;
1879 otherwise, the padding comes at high addresses. */
1882 push_block (size, extra, below)
1887 if (CONSTANT_P (size))
1888 anti_adjust_stack (plus_constant (size, extra));
1889 else if (GET_CODE (size) == REG && extra == 0)
1890 anti_adjust_stack (size);
1893 rtx temp = copy_to_mode_reg (Pmode, size);
1895 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1896 temp, 0, OPTAB_LIB_WIDEN);
1897 anti_adjust_stack (temp);
1900 #ifdef STACK_GROWS_DOWNWARD
1901 temp = virtual_outgoing_args_rtx;
1902 if (extra != 0 && below)
1903 temp = plus_constant (temp, extra);
1905 if (GET_CODE (size) == CONST_INT)
1906 temp = plus_constant (virtual_outgoing_args_rtx,
1907 - INTVAL (size) - (below ? 0 : extra));
1908 else if (extra != 0 && !below)
1909 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1910 negate_rtx (Pmode, plus_constant (size, extra)));
1912 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1913 negate_rtx (Pmode, size));
1916 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1922 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1925 /* Generate code to push X onto the stack, assuming it has mode MODE and
1927 MODE is redundant except when X is a CONST_INT (since they don't
1929 SIZE is an rtx for the size of data to be copied (in bytes),
1930 needed only if X is BLKmode.
1932 ALIGN (in bytes) is maximum alignment we can assume.
1934 If PARTIAL and REG are both nonzero, then copy that many of the first
1935 words of X into registers starting with REG, and push the rest of X.
1936 The amount of space pushed is decreased by PARTIAL words,
1937 rounded *down* to a multiple of PARM_BOUNDARY.
1938 REG must be a hard register in this case.
1939 If REG is zero but PARTIAL is not, take any all others actions for an
1940 argument partially in registers, but do not actually load any
1943 EXTRA is the amount in bytes of extra space to leave next to this arg.
1944 This is ignored if an argument block has already been allocated.
1946 On a machine that lacks real push insns, ARGS_ADDR is the address of
1947 the bottom of the argument block for this call. We use indexing off there
1948 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1949 argument block has not been preallocated.
1951 ARGS_SO_FAR is the size of args previously pushed for this call. */
1954 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1955 args_addr, args_so_far)
1957 enum machine_mode mode;
1968 enum direction stack_direction
1969 #ifdef STACK_GROWS_DOWNWARD
1975 /* Decide where to pad the argument: `downward' for below,
1976 `upward' for above, or `none' for don't pad it.
1977 Default is below for small data on big-endian machines; else above. */
1978 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1980 /* Invert direction if stack is post-update. */
1981 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1982 if (where_pad != none)
1983 where_pad = (where_pad == downward ? upward : downward);
1985 xinner = x = protect_from_queue (x, 0);
1987 if (mode == BLKmode)
1989 /* Copy a block into the stack, entirely or partially. */
1992 int used = partial * UNITS_PER_WORD;
1993 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2001 /* USED is now the # of bytes we need not copy to the stack
2002 because registers will take care of them. */
2005 xinner = change_address (xinner, BLKmode,
2006 plus_constant (XEXP (xinner, 0), used));
2008 /* If the partial register-part of the arg counts in its stack size,
2009 skip the part of stack space corresponding to the registers.
2010 Otherwise, start copying to the beginning of the stack space,
2011 by setting SKIP to 0. */
2012 #ifndef REG_PARM_STACK_SPACE
2018 #ifdef PUSH_ROUNDING
2019 /* Do it with several push insns if that doesn't take lots of insns
2020 and if there is no difficulty with push insns that skip bytes
2021 on the stack for alignment purposes. */
2023 && GET_CODE (size) == CONST_INT
2025 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2027 /* Here we avoid the case of a structure whose weak alignment
2028 forces many pushes of a small amount of data,
2029 and such small pushes do rounding that causes trouble. */
2030 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2031 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2032 || PUSH_ROUNDING (align) == align)
2033 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2035 /* Push padding now if padding above and stack grows down,
2036 or if padding below and stack grows up.
2037 But if space already allocated, this has already been done. */
2038 if (extra && args_addr == 0
2039 && where_pad != none && where_pad != stack_direction)
2040 anti_adjust_stack (GEN_INT (extra));
2042 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2043 INTVAL (size) - used, align);
2046 #endif /* PUSH_ROUNDING */
2048 /* Otherwise make space on the stack and copy the data
2049 to the address of that space. */
2051 /* Deduct words put into registers from the size we must copy. */
2054 if (GET_CODE (size) == CONST_INT)
2055 size = GEN_INT (INTVAL (size) - used);
2057 size = expand_binop (GET_MODE (size), sub_optab, size,
2058 GEN_INT (used), NULL_RTX, 0,
2062 /* Get the address of the stack space.
2063 In this case, we do not deal with EXTRA separately.
2064 A single stack adjust will do. */
2067 temp = push_block (size, extra, where_pad == downward);
2070 else if (GET_CODE (args_so_far) == CONST_INT)
2071 temp = memory_address (BLKmode,
2072 plus_constant (args_addr,
2073 skip + INTVAL (args_so_far)));
2075 temp = memory_address (BLKmode,
2076 plus_constant (gen_rtx (PLUS, Pmode,
2077 args_addr, args_so_far),
2080 /* TEMP is the address of the block. Copy the data there. */
2081 if (GET_CODE (size) == CONST_INT
2082 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2085 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2086 INTVAL (size), align);
2089 /* Try the most limited insn first, because there's no point
2090 including more than one in the machine description unless
2091 the more limited one has some advantage. */
2092 #ifdef HAVE_movstrqi
2094 && GET_CODE (size) == CONST_INT
2095 && ((unsigned) INTVAL (size)
2096 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2098 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2099 xinner, size, GEN_INT (align));
2107 #ifdef HAVE_movstrhi
2109 && GET_CODE (size) == CONST_INT
2110 && ((unsigned) INTVAL (size)
2111 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2113 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2114 xinner, size, GEN_INT (align));
2122 #ifdef HAVE_movstrsi
2125 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2126 xinner, size, GEN_INT (align));
2134 #ifdef HAVE_movstrdi
2137 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2138 xinner, size, GEN_INT (align));
2147 #ifndef ACCUMULATE_OUTGOING_ARGS
2148 /* If the source is referenced relative to the stack pointer,
2149 copy it to another register to stabilize it. We do not need
2150 to do this if we know that we won't be changing sp. */
2152 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2153 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2154 temp = copy_to_reg (temp);
2157 /* Make inhibit_defer_pop nonzero around the library call
2158 to force it to pop the bcopy-arguments right away. */
2160 #ifdef TARGET_MEM_FUNCTIONS
2161 emit_library_call (memcpy_libfunc, 0,
2162 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2163 convert_to_mode (TYPE_MODE (sizetype),
2164 size, TREE_UNSIGNED (sizetype)),
2165 TYPE_MODE (sizetype));
2167 emit_library_call (bcopy_libfunc, 0,
2168 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2169 convert_to_mode (TYPE_MODE (sizetype),
2170 size, TREE_UNSIGNED (sizetype)),
2171 TYPE_MODE (sizetype));
2176 else if (partial > 0)
2178 /* Scalar partly in registers. */
2180 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2183 /* # words of start of argument
2184 that we must make space for but need not store. */
2185 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2186 int args_offset = INTVAL (args_so_far);
2189 /* Push padding now if padding above and stack grows down,
2190 or if padding below and stack grows up.
2191 But if space already allocated, this has already been done. */
2192 if (extra && args_addr == 0
2193 && where_pad != none && where_pad != stack_direction)
2194 anti_adjust_stack (GEN_INT (extra));
2196 /* If we make space by pushing it, we might as well push
2197 the real data. Otherwise, we can leave OFFSET nonzero
2198 and leave the space uninitialized. */
2202 /* Now NOT_STACK gets the number of words that we don't need to
2203 allocate on the stack. */
2204 not_stack = partial - offset;
2206 /* If the partial register-part of the arg counts in its stack size,
2207 skip the part of stack space corresponding to the registers.
2208 Otherwise, start copying to the beginning of the stack space,
2209 by setting SKIP to 0. */
2210 #ifndef REG_PARM_STACK_SPACE
2216 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2217 x = validize_mem (force_const_mem (mode, x));
2219 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2220 SUBREGs of such registers are not allowed. */
2221 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2222 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2223 x = copy_to_reg (x);
2225 /* Loop over all the words allocated on the stack for this arg. */
2226 /* We can do it by words, because any scalar bigger than a word
2227 has a size a multiple of a word. */
2228 #ifndef PUSH_ARGS_REVERSED
2229 for (i = not_stack; i < size; i++)
2231 for (i = size - 1; i >= not_stack; i--)
2233 if (i >= not_stack + offset)
2234 emit_push_insn (operand_subword_force (x, i, mode),
2235 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2237 GEN_INT (args_offset + ((i - not_stack + skip)
2238 * UNITS_PER_WORD)));
2244 /* Push padding now if padding above and stack grows down,
2245 or if padding below and stack grows up.
2246 But if space already allocated, this has already been done. */
2247 if (extra && args_addr == 0
2248 && where_pad != none && where_pad != stack_direction)
2249 anti_adjust_stack (GEN_INT (extra));
2251 #ifdef PUSH_ROUNDING
2253 addr = gen_push_operand ();
2256 if (GET_CODE (args_so_far) == CONST_INT)
2258 = memory_address (mode,
2259 plus_constant (args_addr, INTVAL (args_so_far)));
2261 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2264 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2268 /* If part should go in registers, copy that part
2269 into the appropriate registers. Do this now, at the end,
2270 since mem-to-mem copies above may do function calls. */
2271 if (partial > 0 && reg != 0)
2272 move_block_to_reg (REGNO (reg), x, partial, mode);
2274 if (extra && args_addr == 0 && where_pad == stack_direction)
2275 anti_adjust_stack (GEN_INT (extra));
2278 /* Expand an assignment that stores the value of FROM into TO.
2279 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2280 (This may contain a QUEUED rtx;
2281 if the value is constant, this rtx is a constant.)
2282 Otherwise, the returned value is NULL_RTX.
2284 SUGGEST_REG is no longer actually used.
2285 It used to mean, copy the value through a register
2286 and return that register, if that is possible.
2287 We now use WANT_VALUE to decide whether to do this. */
2290 expand_assignment (to, from, want_value, suggest_reg)
2295 register rtx to_rtx = 0;
2298 /* Don't crash if the lhs of the assignment was erroneous. */
2300 if (TREE_CODE (to) == ERROR_MARK)
2302 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2303 return want_value ? result : NULL_RTX;
2306 if (output_bytecode)
2308 tree dest_innermost;
2310 bc_expand_expr (from);
2311 bc_emit_instruction (duplicate);
2313 dest_innermost = bc_expand_address (to);
2315 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2316 take care of it here. */
2318 bc_store_memory (TREE_TYPE (to), dest_innermost);
2322 /* Assignment of a structure component needs special treatment
2323 if the structure component's rtx is not simply a MEM.
2324 Assignment of an array element at a constant index
2325 has the same problem. */
2327 if (TREE_CODE (to) == COMPONENT_REF
2328 || TREE_CODE (to) == BIT_FIELD_REF
2329 || (TREE_CODE (to) == ARRAY_REF
2330 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2333 enum machine_mode mode1;
2343 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2344 &mode1, &unsignedp, &volatilep);
2346 /* If we are going to use store_bit_field and extract_bit_field,
2347 make sure to_rtx will be safe for multiple use. */
2349 if (mode1 == VOIDmode && want_value)
2350 tem = stabilize_reference (tem);
2352 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2353 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2356 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2358 if (GET_CODE (to_rtx) != MEM)
2360 to_rtx = change_address (to_rtx, VOIDmode,
2361 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2362 force_reg (Pmode, offset_rtx)));
2363 /* If we have a variable offset, the known alignment
2364 is only that of the innermost structure containing the field.
2365 (Actually, we could sometimes do better by using the
2366 align of an element of the innermost array, but no need.) */
2367 if (TREE_CODE (to) == COMPONENT_REF
2368 || TREE_CODE (to) == BIT_FIELD_REF)
2370 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2374 if (GET_CODE (to_rtx) == MEM)
2375 MEM_VOLATILE_P (to_rtx) = 1;
2376 #if 0 /* This was turned off because, when a field is volatile
2377 in an object which is not volatile, the object may be in a register,
2378 and then we would abort over here. */
2384 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2386 /* Spurious cast makes HPUX compiler happy. */
2387 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2390 /* Required alignment of containing datum. */
2392 int_size_in_bytes (TREE_TYPE (tem)));
2393 preserve_temp_slots (result);
2397 /* If the value is meaningful, convert RESULT to the proper mode.
2398 Otherwise, return nothing. */
2399 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2400 TYPE_MODE (TREE_TYPE (from)),
2402 TREE_UNSIGNED (TREE_TYPE (to)))
2406 /* If the rhs is a function call and its value is not an aggregate,
2407 call the function before we start to compute the lhs.
2408 This is needed for correct code for cases such as
2409 val = setjmp (buf) on machines where reference to val
2410 requires loading up part of an address in a separate insn.
2412 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2413 a promoted variable where the zero- or sign- extension needs to be done.
2414 Handling this in the normal way is safe because no computation is done
2416 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2417 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2422 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2424 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2425 emit_move_insn (to_rtx, value);
2426 preserve_temp_slots (to_rtx);
2429 return want_value ? to_rtx : NULL_RTX;
2432 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2433 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2436 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2438 /* Don't move directly into a return register. */
2439 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2444 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2445 emit_move_insn (to_rtx, temp);
2446 preserve_temp_slots (to_rtx);
2449 return want_value ? to_rtx : NULL_RTX;
2452 /* In case we are returning the contents of an object which overlaps
2453 the place the value is being stored, use a safe function when copying
2454 a value through a pointer into a structure value return block. */
2455 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2456 && current_function_returns_struct
2457 && !current_function_returns_pcc_struct)
2462 size = expr_size (from);
2463 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2465 #ifdef TARGET_MEM_FUNCTIONS
2466 emit_library_call (memcpy_libfunc, 0,
2467 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2468 XEXP (from_rtx, 0), Pmode,
2469 convert_to_mode (TYPE_MODE (sizetype),
2470 size, TREE_UNSIGNED (sizetype)),
2471 TYPE_MODE (sizetype));
2473 emit_library_call (bcopy_libfunc, 0,
2474 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2475 XEXP (to_rtx, 0), Pmode,
2476 convert_to_mode (TYPE_MODE (sizetype),
2477 size, TREE_UNSIGNED (sizetype)),
2478 TYPE_MODE (sizetype));
2481 preserve_temp_slots (to_rtx);
2484 return want_value ? to_rtx : NULL_RTX;
2487 /* Compute FROM and store the value in the rtx we got. */
2490 result = store_expr (from, to_rtx, want_value);
2491 preserve_temp_slots (result);
2494 return want_value ? result : NULL_RTX;
2497 /* Generate code for computing expression EXP,
2498 and storing the value into TARGET.
2499 TARGET may contain a QUEUED rtx.
2501 If WANT_VALUE is nonzero, return a copy of the value
2502 not in TARGET, so that we can be sure to use the proper
2503 value in a containing expression even if TARGET has something
2504 else stored in it. If possible, we copy the value through a pseudo
2505 and return that pseudo. Or, if the value is constant, we try to
2506 return the constant. In some cases, we return a pseudo
2507 copied *from* TARGET.
2509 If the mode is BLKmode then we may return TARGET itself.
2510 It turns out that in BLKmode it doesn't cause a problem.
2511 because C has no operators that could combine two different
2512 assignments into the same BLKmode object with different values
2513 with no sequence point. Will other languages need this to
2516 If WANT_VALUE is 0, we return NULL, to make sure
2517 to catch quickly any cases where the caller uses the value
2518 and fails to set WANT_VALUE. */
2521 store_expr (exp, target, want_value)
2523 register rtx target;
2527 int dont_return_target = 0;
2529 if (TREE_CODE (exp) == COMPOUND_EXPR)
2531 /* Perform first part of compound expression, then assign from second
2533 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2535 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2537 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2539 /* For conditional expression, get safe form of the target. Then
2540 test the condition, doing the appropriate assignment on either
2541 side. This avoids the creation of unnecessary temporaries.
2542 For non-BLKmode, it is more efficient not to do this. */
2544 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2547 target = protect_from_queue (target, 1);
2550 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2551 store_expr (TREE_OPERAND (exp, 1), target, 0);
2553 emit_jump_insn (gen_jump (lab2));
2556 store_expr (TREE_OPERAND (exp, 2), target, 0);
2560 return want_value ? target : NULL_RTX;
2562 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2563 && GET_MODE (target) != BLKmode)
2564 /* If target is in memory and caller wants value in a register instead,
2565 arrange that. Pass TARGET as target for expand_expr so that,
2566 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2567 We know expand_expr will not use the target in that case.
2568 Don't do this if TARGET is volatile because we are supposed
2569 to write it and then read it. */
2571 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2572 GET_MODE (target), 0);
2573 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2574 temp = copy_to_reg (temp);
2575 dont_return_target = 1;
2577 else if (queued_subexp_p (target))
2578 /* If target contains a postincrement, let's not risk
2579 using it as the place to generate the rhs. */
2581 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2583 /* Expand EXP into a new pseudo. */
2584 temp = gen_reg_rtx (GET_MODE (target));
2585 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2588 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2590 /* If target is volatile, ANSI requires accessing the value
2591 *from* the target, if it is accessed. So make that happen.
2592 In no case return the target itself. */
2593 if (! MEM_VOLATILE_P (target) && want_value)
2594 dont_return_target = 1;
2596 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2597 /* If this is an scalar in a register that is stored in a wider mode
2598 than the declared mode, compute the result into its declared mode
2599 and then convert to the wider mode. Our value is the computed
2602 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2604 /* If TEMP is a volatile MEM and we want a result value, make
2605 the access now so it gets done only once. */
2606 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp))
2607 temp = copy_to_reg (temp);
2609 /* If TEMP is a VOIDmode constant, use convert_modes to make
2610 sure that we properly convert it. */
2611 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2612 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2613 TYPE_MODE (TREE_TYPE (exp)), temp,
2614 SUBREG_PROMOTED_UNSIGNED_P (target));
2616 convert_move (SUBREG_REG (target), temp,
2617 SUBREG_PROMOTED_UNSIGNED_P (target));
2618 return want_value ? temp : NULL_RTX;
2622 temp = expand_expr (exp, target, GET_MODE (target), 0);
2623 /* Return TARGET if it's a specified hardware register.
2624 If TARGET is a volatile mem ref, either return TARGET
2625 or return a reg copied *from* TARGET; ANSI requires this.
2627 Otherwise, if TEMP is not TARGET, return TEMP
2628 if it is constant (for efficiency),
2629 or if we really want the correct value. */
2630 if (!(target && GET_CODE (target) == REG
2631 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2632 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2634 && (CONSTANT_P (temp) || want_value))
2635 dont_return_target = 1;
2638 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2639 the same as that of TARGET, adjust the constant. This is needed, for
2640 example, in case it is a CONST_DOUBLE and we want only a word-sized
2642 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2643 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2644 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2645 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2647 /* If value was not generated in the target, store it there.
2648 Convert the value to TARGET's type first if nec. */
2650 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2652 target = protect_from_queue (target, 1);
2653 if (GET_MODE (temp) != GET_MODE (target)
2654 && GET_MODE (temp) != VOIDmode)
2656 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2657 if (dont_return_target)
2659 /* In this case, we will return TEMP,
2660 so make sure it has the proper mode.
2661 But don't forget to store the value into TARGET. */
2662 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2663 emit_move_insn (target, temp);
2666 convert_move (target, temp, unsignedp);
2669 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2671 /* Handle copying a string constant into an array.
2672 The string constant may be shorter than the array.
2673 So copy just the string's actual length, and clear the rest. */
2676 /* Get the size of the data type of the string,
2677 which is actually the size of the target. */
2678 size = expr_size (exp);
2679 if (GET_CODE (size) == CONST_INT
2680 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2681 emit_block_move (target, temp, size,
2682 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2685 /* Compute the size of the data to copy from the string. */
2687 = size_binop (MIN_EXPR,
2688 make_tree (sizetype, size),
2690 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2691 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2695 /* Copy that much. */
2696 emit_block_move (target, temp, copy_size_rtx,
2697 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2699 /* Figure out how much is left in TARGET
2700 that we have to clear. */
2701 if (GET_CODE (copy_size_rtx) == CONST_INT)
2703 temp = plus_constant (XEXP (target, 0),
2704 TREE_STRING_LENGTH (exp));
2705 size = plus_constant (size,
2706 - TREE_STRING_LENGTH (exp));
2710 enum machine_mode size_mode = Pmode;
2712 temp = force_reg (Pmode, XEXP (target, 0));
2713 temp = expand_binop (size_mode, add_optab, temp,
2714 copy_size_rtx, NULL_RTX, 0,
2717 size = expand_binop (size_mode, sub_optab, size,
2718 copy_size_rtx, NULL_RTX, 0,
2721 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2722 GET_MODE (size), 0, 0);
2723 label = gen_label_rtx ();
2724 emit_jump_insn (gen_blt (label));
2727 if (size != const0_rtx)
2729 #ifdef TARGET_MEM_FUNCTIONS
2730 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2731 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2733 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2734 temp, Pmode, size, Pmode);
2741 else if (GET_MODE (temp) == BLKmode)
2742 emit_block_move (target, temp, expr_size (exp),
2743 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2745 emit_move_insn (target, temp);
2748 /* If we don't want a value, return NULL_RTX. */
2752 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2753 ??? The latter test doesn't seem to make sense. */
2754 else if (dont_return_target && GET_CODE (temp) != MEM)
2757 /* Return TARGET itself if it is a hard register. */
2758 else if (want_value && GET_MODE (target) != BLKmode
2759 && ! (GET_CODE (target) == REG
2760 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2761 return copy_to_reg (target);
2767 /* Store the value of constructor EXP into the rtx TARGET.
2768 TARGET is either a REG or a MEM. */
2771 store_constructor (exp, target)
2775 tree type = TREE_TYPE (exp);
2777 /* We know our target cannot conflict, since safe_from_p has been called. */
2779 /* Don't try copying piece by piece into a hard register
2780 since that is vulnerable to being clobbered by EXP.
2781 Instead, construct in a pseudo register and then copy it all. */
2782 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2784 rtx temp = gen_reg_rtx (GET_MODE (target));
2785 store_constructor (exp, temp);
2786 emit_move_insn (target, temp);
2791 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2792 || TREE_CODE (type) == QUAL_UNION_TYPE)
2796 /* Inform later passes that the whole union value is dead. */
2797 if (TREE_CODE (type) == UNION_TYPE
2798 || TREE_CODE (type) == QUAL_UNION_TYPE)
2799 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2801 /* If we are building a static constructor into a register,
2802 set the initial value as zero so we can fold the value into
2804 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2805 emit_move_insn (target, const0_rtx);
2807 /* If the constructor has fewer fields than the structure,
2808 clear the whole structure first. */
2809 else if (list_length (CONSTRUCTOR_ELTS (exp))
2810 != list_length (TYPE_FIELDS (type)))
2811 clear_storage (target, int_size_in_bytes (type));
2813 /* Inform later passes that the old value is dead. */
2814 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2816 /* Store each element of the constructor into
2817 the corresponding field of TARGET. */
2819 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2821 register tree field = TREE_PURPOSE (elt);
2822 register enum machine_mode mode;
2826 tree pos, constant = 0, offset = 0;
2827 rtx to_rtx = target;
2829 /* Just ignore missing fields.
2830 We cleared the whole structure, above,
2831 if any fields are missing. */
2835 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2836 unsignedp = TREE_UNSIGNED (field);
2837 mode = DECL_MODE (field);
2838 if (DECL_BIT_FIELD (field))
2841 pos = DECL_FIELD_BITPOS (field);
2842 if (TREE_CODE (pos) == INTEGER_CST)
2844 else if (TREE_CODE (pos) == PLUS_EXPR
2845 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2846 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2851 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2857 if (contains_placeholder_p (offset))
2858 offset = build (WITH_RECORD_EXPR, sizetype,
2861 offset = size_binop (FLOOR_DIV_EXPR, offset,
2862 size_int (BITS_PER_UNIT));
2864 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2865 if (GET_CODE (to_rtx) != MEM)
2869 = change_address (to_rtx, VOIDmode,
2870 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2871 force_reg (Pmode, offset_rtx)));
2874 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
2875 /* The alignment of TARGET is
2876 at least what its type requires. */
2878 TYPE_ALIGN (type) / BITS_PER_UNIT,
2879 int_size_in_bytes (type));
2882 else if (TREE_CODE (type) == ARRAY_TYPE)
2886 tree domain = TYPE_DOMAIN (type);
2887 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2888 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2889 tree elttype = TREE_TYPE (type);
2891 /* If the constructor has fewer fields than the structure,
2892 clear the whole structure first. Similarly if this this is
2893 static constructor of a non-BLKmode object. */
2895 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2896 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2897 clear_storage (target, int_size_in_bytes (type));
2899 /* Inform later passes that the old value is dead. */
2900 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2902 /* Store each element of the constructor into
2903 the corresponding element of TARGET, determined
2904 by counting the elements. */
2905 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2907 elt = TREE_CHAIN (elt), i++)
2909 register enum machine_mode mode;
2913 tree index = TREE_PURPOSE (elt);
2914 rtx xtarget = target;
2916 mode = TYPE_MODE (elttype);
2917 bitsize = GET_MODE_BITSIZE (mode);
2918 unsignedp = TREE_UNSIGNED (elttype);
2920 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2922 /* We don't currently allow variable indices in a
2923 C initializer, but let's try here to support them. */
2924 rtx pos_rtx, addr, xtarget;
2927 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2928 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2929 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2930 xtarget = change_address (target, mode, addr);
2931 store_expr (TREE_VALUE (elt), xtarget, 0);
2936 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
2937 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2939 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2941 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2942 /* The alignment of TARGET is
2943 at least what its type requires. */
2945 TYPE_ALIGN (type) / BITS_PER_UNIT,
2946 int_size_in_bytes (type));
2955 /* Store the value of EXP (an expression tree)
2956 into a subfield of TARGET which has mode MODE and occupies
2957 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2958 If MODE is VOIDmode, it means that we are storing into a bit-field.
2960 If VALUE_MODE is VOIDmode, return nothing in particular.
2961 UNSIGNEDP is not used in this case.
2963 Otherwise, return an rtx for the value stored. This rtx
2964 has mode VALUE_MODE if that is convenient to do.
2965 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2967 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2968 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2971 store_field (target, bitsize, bitpos, mode, exp, value_mode,
2972 unsignedp, align, total_size)
2974 int bitsize, bitpos;
2975 enum machine_mode mode;
2977 enum machine_mode value_mode;
2982 HOST_WIDE_INT width_mask = 0;
2984 if (bitsize < HOST_BITS_PER_WIDE_INT)
2985 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2987 /* If we are storing into an unaligned field of an aligned union that is
2988 in a register, we may have the mode of TARGET being an integer mode but
2989 MODE == BLKmode. In that case, get an aligned object whose size and
2990 alignment are the same as TARGET and store TARGET into it (we can avoid
2991 the store if the field being stored is the entire width of TARGET). Then
2992 call ourselves recursively to store the field into a BLKmode version of
2993 that object. Finally, load from the object into TARGET. This is not
2994 very efficient in general, but should only be slightly more expensive
2995 than the otherwise-required unaligned accesses. Perhaps this can be
2996 cleaned up later. */
2999 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3001 rtx object = assign_stack_temp (GET_MODE (target),
3002 GET_MODE_SIZE (GET_MODE (target)), 0);
3003 rtx blk_object = copy_rtx (object);
3005 PUT_MODE (blk_object, BLKmode);
3007 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3008 emit_move_insn (object, target);
3010 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3013 /* Even though we aren't returning target, we need to
3014 give it the updated value. */
3015 emit_move_insn (target, object);
3020 /* If the structure is in a register or if the component
3021 is a bit field, we cannot use addressing to access it.
3022 Use bit-field techniques or SUBREG to store in it. */
3024 if (mode == VOIDmode
3025 || (mode != BLKmode && ! direct_store[(int) mode])
3026 || GET_CODE (target) == REG
3027 || GET_CODE (target) == SUBREG
3028 /* If the field isn't aligned enough to store as an ordinary memref,
3029 store it as a bit field. */
3030 || (STRICT_ALIGNMENT
3031 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3032 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3034 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3036 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3038 if (mode != VOIDmode && mode != BLKmode
3039 && mode != TYPE_MODE (TREE_TYPE (exp)))
3040 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3042 /* Store the value in the bitfield. */
3043 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3044 if (value_mode != VOIDmode)
3046 /* The caller wants an rtx for the value. */
3047 /* If possible, avoid refetching from the bitfield itself. */
3049 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3052 enum machine_mode tmode;
3055 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3056 tmode = GET_MODE (temp);
3057 if (tmode == VOIDmode)
3059 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3060 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3061 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3063 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3064 NULL_RTX, value_mode, 0, align,
3071 rtx addr = XEXP (target, 0);
3074 /* If a value is wanted, it must be the lhs;
3075 so make the address stable for multiple use. */
3077 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3078 && ! CONSTANT_ADDRESS_P (addr)
3079 /* A frame-pointer reference is already stable. */
3080 && ! (GET_CODE (addr) == PLUS
3081 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3082 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3083 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3084 addr = copy_to_reg (addr);
3086 /* Now build a reference to just the desired component. */
3088 to_rtx = change_address (target, mode,
3089 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3090 MEM_IN_STRUCT_P (to_rtx) = 1;
3092 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3096 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3097 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3098 ARRAY_REFs and find the ultimate containing object, which we return.
3100 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3101 bit position, and *PUNSIGNEDP to the signedness of the field.
3102 If the position of the field is variable, we store a tree
3103 giving the variable offset (in units) in *POFFSET.
3104 This offset is in addition to the bit position.
3105 If the position is not variable, we store 0 in *POFFSET.
3107 If any of the extraction expressions is volatile,
3108 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3110 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3111 is a mode that can be used to access the field. In that case, *PBITSIZE
3114 If the field describes a variable-sized object, *PMODE is set to
3115 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3116 this case, but the address of the object can be found. */
3119 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3120 punsignedp, pvolatilep)
3125 enum machine_mode *pmode;
3129 tree orig_exp = exp;
3131 enum machine_mode mode = VOIDmode;
3132 tree offset = integer_zero_node;
3134 if (TREE_CODE (exp) == COMPONENT_REF)
3136 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3137 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3138 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3139 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3141 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3143 size_tree = TREE_OPERAND (exp, 1);
3144 *punsignedp = TREE_UNSIGNED (exp);
3148 mode = TYPE_MODE (TREE_TYPE (exp));
3149 *pbitsize = GET_MODE_BITSIZE (mode);
3150 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3155 if (TREE_CODE (size_tree) != INTEGER_CST)
3156 mode = BLKmode, *pbitsize = -1;
3158 *pbitsize = TREE_INT_CST_LOW (size_tree);
3161 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3162 and find the ultimate containing object. */
3168 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3170 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3171 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3172 : TREE_OPERAND (exp, 2));
3174 /* If this field hasn't been filled in yet, don't go
3175 past it. This should only happen when folding expressions
3176 made during type construction. */
3180 if (TREE_CODE (pos) == PLUS_EXPR)
3183 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3185 constant = TREE_OPERAND (pos, 0);
3186 var = TREE_OPERAND (pos, 1);
3188 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3190 constant = TREE_OPERAND (pos, 1);
3191 var = TREE_OPERAND (pos, 0);
3196 *pbitpos += TREE_INT_CST_LOW (constant);
3197 offset = size_binop (PLUS_EXPR, offset,
3198 size_binop (FLOOR_DIV_EXPR, var,
3199 size_int (BITS_PER_UNIT)));
3201 else if (TREE_CODE (pos) == INTEGER_CST)
3202 *pbitpos += TREE_INT_CST_LOW (pos);
3205 /* Assume here that the offset is a multiple of a unit.
3206 If not, there should be an explicitly added constant. */
3207 offset = size_binop (PLUS_EXPR, offset,
3208 size_binop (FLOOR_DIV_EXPR, pos,
3209 size_int (BITS_PER_UNIT)));
3213 else if (TREE_CODE (exp) == ARRAY_REF)
3215 /* This code is based on the code in case ARRAY_REF in expand_expr
3216 below. We assume here that the size of an array element is
3217 always an integral multiple of BITS_PER_UNIT. */
3219 tree index = TREE_OPERAND (exp, 1);
3220 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3222 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3223 tree index_type = TREE_TYPE (index);
3225 if (! integer_zerop (low_bound))
3226 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3228 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3230 index = convert (type_for_size (POINTER_SIZE, 0), index);
3231 index_type = TREE_TYPE (index);
3234 index = fold (build (MULT_EXPR, index_type, index,
3235 TYPE_SIZE (TREE_TYPE (exp))));
3237 if (TREE_CODE (index) == INTEGER_CST
3238 && TREE_INT_CST_HIGH (index) == 0)
3239 *pbitpos += TREE_INT_CST_LOW (index);
3241 offset = size_binop (PLUS_EXPR, offset,
3242 size_binop (FLOOR_DIV_EXPR, index,
3243 size_int (BITS_PER_UNIT)));
3245 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3246 && ! ((TREE_CODE (exp) == NOP_EXPR
3247 || TREE_CODE (exp) == CONVERT_EXPR)
3248 && (TYPE_MODE (TREE_TYPE (exp))
3249 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3252 /* If any reference in the chain is volatile, the effect is volatile. */
3253 if (TREE_THIS_VOLATILE (exp))
3255 exp = TREE_OPERAND (exp, 0);
3258 /* If this was a bit-field, see if there is a mode that allows direct
3259 access in case EXP is in memory. */
3260 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3262 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3263 if (mode == BLKmode)
3267 if (integer_zerop (offset))
3270 if (offset != 0 && contains_placeholder_p (offset))
3271 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3278 /* Given an rtx VALUE that may contain additions and multiplications,
3279 return an equivalent value that just refers to a register or memory.
3280 This is done by generating instructions to perform the arithmetic
3281 and returning a pseudo-register containing the value.
3283 The returned value may be a REG, SUBREG, MEM or constant. */
3286 force_operand (value, target)
3289 register optab binoptab = 0;
3290 /* Use a temporary to force order of execution of calls to
3294 /* Use subtarget as the target for operand 0 of a binary operation. */
3295 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3297 if (GET_CODE (value) == PLUS)
3298 binoptab = add_optab;
3299 else if (GET_CODE (value) == MINUS)
3300 binoptab = sub_optab;
3301 else if (GET_CODE (value) == MULT)
3303 op2 = XEXP (value, 1);
3304 if (!CONSTANT_P (op2)
3305 && !(GET_CODE (op2) == REG && op2 != subtarget))
3307 tmp = force_operand (XEXP (value, 0), subtarget);
3308 return expand_mult (GET_MODE (value), tmp,
3309 force_operand (op2, NULL_RTX),
3315 op2 = XEXP (value, 1);
3316 if (!CONSTANT_P (op2)
3317 && !(GET_CODE (op2) == REG && op2 != subtarget))
3319 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3321 binoptab = add_optab;
3322 op2 = negate_rtx (GET_MODE (value), op2);
3325 /* Check for an addition with OP2 a constant integer and our first
3326 operand a PLUS of a virtual register and something else. In that
3327 case, we want to emit the sum of the virtual register and the
3328 constant first and then add the other value. This allows virtual
3329 register instantiation to simply modify the constant rather than
3330 creating another one around this addition. */
3331 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3332 && GET_CODE (XEXP (value, 0)) == PLUS
3333 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3334 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3335 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3337 rtx temp = expand_binop (GET_MODE (value), binoptab,
3338 XEXP (XEXP (value, 0), 0), op2,
3339 subtarget, 0, OPTAB_LIB_WIDEN);
3340 return expand_binop (GET_MODE (value), binoptab, temp,
3341 force_operand (XEXP (XEXP (value, 0), 1), 0),
3342 target, 0, OPTAB_LIB_WIDEN);
3345 tmp = force_operand (XEXP (value, 0), subtarget);
3346 return expand_binop (GET_MODE (value), binoptab, tmp,
3347 force_operand (op2, NULL_RTX),
3348 target, 0, OPTAB_LIB_WIDEN);
3349 /* We give UNSIGNEDP = 0 to expand_binop
3350 because the only operations we are expanding here are signed ones. */
3355 /* Subroutine of expand_expr:
3356 save the non-copied parts (LIST) of an expr (LHS), and return a list
3357 which can restore these values to their previous values,
3358 should something modify their storage. */
3361 save_noncopied_parts (lhs, list)
3368 for (tail = list; tail; tail = TREE_CHAIN (tail))
3369 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3370 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3373 tree part = TREE_VALUE (tail);
3374 tree part_type = TREE_TYPE (part);
3375 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3376 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3377 int_size_in_bytes (part_type), 0);
3378 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3379 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3380 parts = tree_cons (to_be_saved,
3381 build (RTL_EXPR, part_type, NULL_TREE,
3384 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3389 /* Subroutine of expand_expr:
3390 record the non-copied parts (LIST) of an expr (LHS), and return a list
3391 which specifies the initial values of these parts. */
3394 init_noncopied_parts (lhs, list)
3401 for (tail = list; tail; tail = TREE_CHAIN (tail))
3402 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3403 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3406 tree part = TREE_VALUE (tail);
3407 tree part_type = TREE_TYPE (part);
3408 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3409 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3414 /* Subroutine of expand_expr: return nonzero iff there is no way that
3415 EXP can reference X, which is being modified. */
3418 safe_from_p (x, exp)
3428 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3429 find the underlying pseudo. */
3430 if (GET_CODE (x) == SUBREG)
3433 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3437 /* If X is a location in the outgoing argument area, it is always safe. */
3438 if (GET_CODE (x) == MEM
3439 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3440 || (GET_CODE (XEXP (x, 0)) == PLUS
3441 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3444 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3447 exp_rtl = DECL_RTL (exp);
3454 if (TREE_CODE (exp) == TREE_LIST)
3455 return ((TREE_VALUE (exp) == 0
3456 || safe_from_p (x, TREE_VALUE (exp)))
3457 && (TREE_CHAIN (exp) == 0
3458 || safe_from_p (x, TREE_CHAIN (exp))));
3463 return safe_from_p (x, TREE_OPERAND (exp, 0));
3467 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3468 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3472 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3473 the expression. If it is set, we conflict iff we are that rtx or
3474 both are in memory. Otherwise, we check all operands of the
3475 expression recursively. */
3477 switch (TREE_CODE (exp))
3480 return (staticp (TREE_OPERAND (exp, 0))
3481 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3484 if (GET_CODE (x) == MEM)
3489 exp_rtl = CALL_EXPR_RTL (exp);
3492 /* Assume that the call will clobber all hard registers and
3494 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3495 || GET_CODE (x) == MEM)
3502 exp_rtl = RTL_EXPR_RTL (exp);
3504 /* We don't know what this can modify. */
3509 case WITH_CLEANUP_EXPR:
3510 exp_rtl = RTL_EXPR_RTL (exp);
3514 exp_rtl = SAVE_EXPR_RTL (exp);
3518 /* The only operand we look at is operand 1. The rest aren't
3519 part of the expression. */
3520 return safe_from_p (x, TREE_OPERAND (exp, 1));
3522 case METHOD_CALL_EXPR:
3523 /* This takes a rtx argument, but shouldn't appear here. */
3527 /* If we have an rtx, we do not need to scan our operands. */
3531 nops = tree_code_length[(int) TREE_CODE (exp)];
3532 for (i = 0; i < nops; i++)
3533 if (TREE_OPERAND (exp, i) != 0
3534 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3538 /* If we have an rtl, find any enclosed object. Then see if we conflict
3542 if (GET_CODE (exp_rtl) == SUBREG)
3544 exp_rtl = SUBREG_REG (exp_rtl);
3545 if (GET_CODE (exp_rtl) == REG
3546 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3550 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3551 are memory and EXP is not readonly. */
3552 return ! (rtx_equal_p (x, exp_rtl)
3553 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3554 && ! TREE_READONLY (exp)));
3557 /* If we reach here, it is safe. */
3561 /* Subroutine of expand_expr: return nonzero iff EXP is an
3562 expression whose type is statically determinable. */
3568 if (TREE_CODE (exp) == PARM_DECL
3569 || TREE_CODE (exp) == VAR_DECL
3570 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3571 || TREE_CODE (exp) == COMPONENT_REF
3572 || TREE_CODE (exp) == ARRAY_REF)
3577 /* expand_expr: generate code for computing expression EXP.
3578 An rtx for the computed value is returned. The value is never null.
3579 In the case of a void EXP, const0_rtx is returned.
3581 The value may be stored in TARGET if TARGET is nonzero.
3582 TARGET is just a suggestion; callers must assume that
3583 the rtx returned may not be the same as TARGET.
3585 If TARGET is CONST0_RTX, it means that the value will be ignored.
3587 If TMODE is not VOIDmode, it suggests generating the
3588 result in mode TMODE. But this is done only when convenient.
3589 Otherwise, TMODE is ignored and the value generated in its natural mode.
3590 TMODE is just a suggestion; callers must assume that
3591 the rtx returned may not have mode TMODE.
3593 Note that TARGET may have neither TMODE nor MODE. In that case, it
3594 probably will not be used.
3596 If MODIFIER is EXPAND_SUM then when EXP is an addition
3597 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3598 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3599 products as above, or REG or MEM, or constant.
3600 Ordinarily in such cases we would output mul or add instructions
3601 and then return a pseudo reg containing the sum.
3603 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3604 it also marks a label as absolutely required (it can't be dead).
3605 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3606 This is used for outputting expressions used in initializers.
3608 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3609 with a constant address even if that address is not normally legitimate.
3610 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
3613 expand_expr (exp, target, tmode, modifier)
3616 enum machine_mode tmode;
3617 enum expand_modifier modifier;
3619 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3620 This is static so it will be accessible to our recursive callees. */
3621 static tree placeholder_list = 0;
3622 register rtx op0, op1, temp;
3623 tree type = TREE_TYPE (exp);
3624 int unsignedp = TREE_UNSIGNED (type);
3625 register enum machine_mode mode = TYPE_MODE (type);
3626 register enum tree_code code = TREE_CODE (exp);
3628 /* Use subtarget as the target for operand 0 of a binary operation. */
3629 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3630 rtx original_target = target;
3631 /* Maybe defer this until sure not doing bytecode? */
3632 int ignore = (target == const0_rtx
3633 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3634 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3635 || code == COND_EXPR)
3636 && TREE_CODE (type) == VOID_TYPE));
3640 if (output_bytecode && modifier != EXPAND_INITIALIZER)
3642 bc_expand_expr (exp);
3646 /* Don't use hard regs as subtargets, because the combiner
3647 can only handle pseudo regs. */
3648 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3650 /* Avoid subtargets inside loops,
3651 since they hide some invariant expressions. */
3652 if (preserve_subexpressions_p ())
3655 /* If we are going to ignore this result, we need only do something
3656 if there is a side-effect somewhere in the expression. If there
3657 is, short-circuit the most common cases here. Note that we must
3658 not call expand_expr with anything but const0_rtx in case this
3659 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
3663 if (! TREE_SIDE_EFFECTS (exp))
3666 /* Ensure we reference a volatile object even if value is ignored. */
3667 if (TREE_THIS_VOLATILE (exp)
3668 && TREE_CODE (exp) != FUNCTION_DECL
3669 && mode != VOIDmode && mode != BLKmode)
3671 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3672 if (GET_CODE (temp) == MEM)
3673 temp = copy_to_reg (temp);
3677 if (TREE_CODE_CLASS (code) == '1')
3678 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3679 VOIDmode, modifier);
3680 else if (TREE_CODE_CLASS (code) == '2'
3681 || TREE_CODE_CLASS (code) == '<')
3683 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3684 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3687 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3688 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3689 /* If the second operand has no side effects, just evaluate
3691 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3692 VOIDmode, modifier);
3697 /* If will do cse, generate all results into pseudo registers
3698 since 1) that allows cse to find more things
3699 and 2) otherwise cse could produce an insn the machine
3702 if (! cse_not_expected && mode != BLKmode && target
3703 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3710 tree function = decl_function_context (exp);
3711 /* Handle using a label in a containing function. */
3712 if (function != current_function_decl && function != 0)
3714 struct function *p = find_function_data (function);
3715 /* Allocate in the memory associated with the function
3716 that the label is in. */
3717 push_obstacks (p->function_obstack,
3718 p->function_maybepermanent_obstack);
3720 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3721 label_rtx (exp), p->forced_labels);
3724 else if (modifier == EXPAND_INITIALIZER)
3725 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3726 label_rtx (exp), forced_labels);
3727 temp = gen_rtx (MEM, FUNCTION_MODE,
3728 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3729 if (function != current_function_decl && function != 0)
3730 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3735 if (DECL_RTL (exp) == 0)
3737 error_with_decl (exp, "prior parameter's size depends on `%s'");
3738 return CONST0_RTX (mode);
3741 /* ... fall through ... */
3744 /* If a static var's type was incomplete when the decl was written,
3745 but the type is complete now, lay out the decl now. */
3746 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3747 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3749 push_obstacks_nochange ();
3750 end_temporary_allocation ();
3751 layout_decl (exp, 0);
3752 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3756 /* ... fall through ... */
3760 if (DECL_RTL (exp) == 0)
3763 /* Ensure variable marked as used even if it doesn't go through
3764 a parser. If it hasn't be used yet, write out an external
3766 if (! TREE_USED (exp))
3768 assemble_external (exp);
3769 TREE_USED (exp) = 1;
3772 /* Handle variables inherited from containing functions. */
3773 context = decl_function_context (exp);
3775 /* We treat inline_function_decl as an alias for the current function
3776 because that is the inline function whose vars, types, etc.
3777 are being merged into the current function.
3778 See expand_inline_function. */
3780 if (context != 0 && context != current_function_decl
3781 && context != inline_function_decl
3782 /* If var is static, we don't need a static chain to access it. */
3783 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3784 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3788 /* Mark as non-local and addressable. */
3789 DECL_NONLOCAL (exp) = 1;
3790 mark_addressable (exp);
3791 if (GET_CODE (DECL_RTL (exp)) != MEM)
3793 addr = XEXP (DECL_RTL (exp), 0);
3794 if (GET_CODE (addr) == MEM)
3795 addr = gen_rtx (MEM, Pmode,
3796 fix_lexical_addr (XEXP (addr, 0), exp));
3798 addr = fix_lexical_addr (addr, exp);
3799 return change_address (DECL_RTL (exp), mode, addr);
3802 /* This is the case of an array whose size is to be determined
3803 from its initializer, while the initializer is still being parsed.
3806 if (GET_CODE (DECL_RTL (exp)) == MEM
3807 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3808 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3809 XEXP (DECL_RTL (exp), 0));
3811 /* If DECL_RTL is memory, we are in the normal case and either
3812 the address is not valid or it is not a register and -fforce-addr
3813 is specified, get the address into a register. */
3815 if (GET_CODE (DECL_RTL (exp)) == MEM
3816 && modifier != EXPAND_CONST_ADDRESS
3817 && modifier != EXPAND_SUM
3818 && modifier != EXPAND_INITIALIZER
3819 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3821 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
3822 return change_address (DECL_RTL (exp), VOIDmode,
3823 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3825 /* If the mode of DECL_RTL does not match that of the decl, it
3826 must be a promoted value. We return a SUBREG of the wanted mode,
3827 but mark it so that we know that it was already extended. */
3829 if (GET_CODE (DECL_RTL (exp)) == REG
3830 && GET_MODE (DECL_RTL (exp)) != mode)
3832 /* Get the signedness used for this variable. Ensure we get the
3833 same mode we got when the variable was declared. */
3834 if (GET_MODE (DECL_RTL (exp))
3835 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
3838 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3839 SUBREG_PROMOTED_VAR_P (temp) = 1;
3840 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3844 return DECL_RTL (exp);
3847 return immed_double_const (TREE_INT_CST_LOW (exp),
3848 TREE_INT_CST_HIGH (exp),
3852 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3855 /* If optimized, generate immediate CONST_DOUBLE
3856 which will be turned into memory by reload if necessary.
3858 We used to force a register so that loop.c could see it. But
3859 this does not allow gen_* patterns to perform optimizations with
3860 the constants. It also produces two insns in cases like "x = 1.0;".
3861 On most machines, floating-point constants are not permitted in
3862 many insns, so we'd end up copying it to a register in any case.
3864 Now, we do the copying in expand_binop, if appropriate. */
3865 return immed_real_const (exp);
3869 if (! TREE_CST_RTL (exp))
3870 output_constant_def (exp);
3872 /* TREE_CST_RTL probably contains a constant address.
3873 On RISC machines where a constant address isn't valid,
3874 make some insns to get that address into a register. */
3875 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3876 && modifier != EXPAND_CONST_ADDRESS
3877 && modifier != EXPAND_INITIALIZER
3878 && modifier != EXPAND_SUM
3879 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
3881 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
3882 return change_address (TREE_CST_RTL (exp), VOIDmode,
3883 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3884 return TREE_CST_RTL (exp);
3887 context = decl_function_context (exp);
3889 /* We treat inline_function_decl as an alias for the current function
3890 because that is the inline function whose vars, types, etc.
3891 are being merged into the current function.
3892 See expand_inline_function. */
3893 if (context == current_function_decl || context == inline_function_decl)
3896 /* If this is non-local, handle it. */
3899 temp = SAVE_EXPR_RTL (exp);
3900 if (temp && GET_CODE (temp) == REG)
3902 put_var_into_stack (exp);
3903 temp = SAVE_EXPR_RTL (exp);
3905 if (temp == 0 || GET_CODE (temp) != MEM)
3907 return change_address (temp, mode,
3908 fix_lexical_addr (XEXP (temp, 0), exp));
3910 if (SAVE_EXPR_RTL (exp) == 0)
3912 if (mode == BLKmode)
3915 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3916 MEM_IN_STRUCT_P (temp)
3917 = (TREE_CODE (type) == RECORD_TYPE
3918 || TREE_CODE (type) == UNION_TYPE
3919 || TREE_CODE (type) == QUAL_UNION_TYPE
3920 || TREE_CODE (type) == ARRAY_TYPE);
3923 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
3925 SAVE_EXPR_RTL (exp) = temp;
3926 if (!optimize && GET_CODE (temp) == REG)
3927 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3930 /* If the mode of TEMP does not match that of the expression, it
3931 must be a promoted value. We pass store_expr a SUBREG of the
3932 wanted mode but mark it so that we know that it was already
3933 extended. Note that `unsignedp' was modified above in
3936 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3938 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3939 SUBREG_PROMOTED_VAR_P (temp) = 1;
3940 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3943 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3946 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3947 must be a promoted value. We return a SUBREG of the wanted mode,
3948 but mark it so that we know that it was already extended. */
3950 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3951 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3953 temp = gen_rtx (SUBREG, promote_mode (type, mode, &unsignedp, 0),
3954 SAVE_EXPR_RTL (exp), 0);
3955 SUBREG_PROMOTED_VAR_P (temp) = 1;
3956 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3960 return SAVE_EXPR_RTL (exp);
3962 case PLACEHOLDER_EXPR:
3963 /* If there is an object on the head of the placeholder list,
3964 see if some object in it's references is of type TYPE. For
3965 further information, see tree.def. */
3966 if (placeholder_list)
3969 tree old_list = placeholder_list;
3971 for (object = TREE_PURPOSE (placeholder_list);
3972 TREE_TYPE (object) != type
3973 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
3974 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3975 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3976 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
3977 object = TREE_OPERAND (object, 0))
3980 if (object && TREE_TYPE (object) == type)
3982 /* Expand this object skipping the list entries before
3983 it was found in case it is also a PLACEHOLDER_EXPR.
3984 In that case, we want to translate it using subsequent
3986 placeholder_list = TREE_CHAIN (placeholder_list);
3987 temp = expand_expr (object, original_target, tmode, modifier);
3988 placeholder_list = old_list;
3993 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
3996 case WITH_RECORD_EXPR:
3997 /* Put the object on the placeholder list, expand our first operand,
3998 and pop the list. */
3999 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4001 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4003 placeholder_list = TREE_CHAIN (placeholder_list);
4007 expand_exit_loop_if_false (NULL_PTR,
4008 invert_truthvalue (TREE_OPERAND (exp, 0)));
4013 expand_start_loop (1);
4014 expand_expr_stmt (TREE_OPERAND (exp, 0));
4022 tree vars = TREE_OPERAND (exp, 0);
4023 int vars_need_expansion = 0;
4025 /* Need to open a binding contour here because
4026 if there are any cleanups they most be contained here. */
4027 expand_start_bindings (0);
4029 /* Mark the corresponding BLOCK for output in its proper place. */
4030 if (TREE_OPERAND (exp, 2) != 0
4031 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4032 insert_block (TREE_OPERAND (exp, 2));
4034 /* If VARS have not yet been expanded, expand them now. */
4037 if (DECL_RTL (vars) == 0)
4039 vars_need_expansion = 1;
4042 expand_decl_init (vars);
4043 vars = TREE_CHAIN (vars);
4046 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4048 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4054 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4056 emit_insns (RTL_EXPR_SEQUENCE (exp));
4057 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4058 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4059 free_temps_for_rtl_expr (exp);
4060 return RTL_EXPR_RTL (exp);
4063 /* If we don't need the result, just ensure we evaluate any
4068 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4069 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4073 /* All elts simple constants => refer to a constant in memory. But
4074 if this is a non-BLKmode mode, let it store a field at a time
4075 since that should make a CONST_INT or CONST_DOUBLE when we
4076 fold. Likewise, if we have a target we can use, it is best to
4077 store directly into the target. If we are making an initializer and
4078 all operands are constant, put it in memory as well. */
4079 else if ((TREE_STATIC (exp)
4080 && ((mode == BLKmode
4081 && ! (target != 0 && safe_from_p (target, exp)))
4082 || TREE_ADDRESSABLE (exp)))
4083 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4085 rtx constructor = output_constant_def (exp);
4086 if (modifier != EXPAND_CONST_ADDRESS
4087 && modifier != EXPAND_INITIALIZER
4088 && modifier != EXPAND_SUM
4089 && (! memory_address_p (GET_MODE (constructor),
4090 XEXP (constructor, 0))
4092 && GET_CODE (XEXP (constructor, 0)) != REG)))
4093 constructor = change_address (constructor, VOIDmode,
4094 XEXP (constructor, 0));
4100 if (target == 0 || ! safe_from_p (target, exp))
4102 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4103 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4106 enum tree_code c = TREE_CODE (type);
4108 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4109 if (c == RECORD_TYPE || c == UNION_TYPE
4110 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
4111 MEM_IN_STRUCT_P (target) = 1;
4114 store_constructor (exp, target);
4120 tree exp1 = TREE_OPERAND (exp, 0);
4123 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4124 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4125 This code has the same general effect as simply doing
4126 expand_expr on the save expr, except that the expression PTR
4127 is computed for use as a memory address. This means different
4128 code, suitable for indexing, may be generated. */
4129 if (TREE_CODE (exp1) == SAVE_EXPR
4130 && SAVE_EXPR_RTL (exp1) == 0
4131 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4132 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4133 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4135 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4136 VOIDmode, EXPAND_SUM);
4137 op0 = memory_address (mode, temp);
4138 op0 = copy_all_regs (op0);
4139 SAVE_EXPR_RTL (exp1) = op0;
4143 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4144 op0 = memory_address (mode, op0);
4147 temp = gen_rtx (MEM, mode, op0);
4148 /* If address was computed by addition,
4149 mark this as an element of an aggregate. */
4150 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4151 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4152 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4153 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4154 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4155 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4156 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
4157 || (TREE_CODE (exp1) == ADDR_EXPR
4158 && (exp2 = TREE_OPERAND (exp1, 0))
4159 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4160 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4161 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4162 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
4163 MEM_IN_STRUCT_P (temp) = 1;
4164 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4165 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4166 a location is accessed through a pointer to const does not mean
4167 that the value there can never change. */
4168 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4174 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4178 tree array = TREE_OPERAND (exp, 0);
4179 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4180 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4181 tree index = TREE_OPERAND (exp, 1);
4182 tree index_type = TREE_TYPE (index);
4185 if (TREE_CODE (low_bound) != INTEGER_CST
4186 && contains_placeholder_p (low_bound))
4187 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4189 /* Optimize the special-case of a zero lower bound.
4191 We convert the low_bound to sizetype to avoid some problems
4192 with constant folding. (E.g. suppose the lower bound is 1,
4193 and its mode is QI. Without the conversion, (ARRAY
4194 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4195 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4197 But sizetype isn't quite right either (especially if
4198 the lowbound is negative). FIXME */
4200 if (! integer_zerop (low_bound))
4201 index = fold (build (MINUS_EXPR, index_type, index,
4202 convert (sizetype, low_bound)));
4204 if (TREE_CODE (index) != INTEGER_CST
4205 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4207 /* Nonconstant array index or nonconstant element size.
4208 Generate the tree for *(&array+index) and expand that,
4209 except do it in a language-independent way
4210 and don't complain about non-lvalue arrays.
4211 `mark_addressable' should already have been called
4212 for any array for which this case will be reached. */
4214 /* Don't forget the const or volatile flag from the array
4216 tree variant_type = build_type_variant (type,
4217 TREE_READONLY (exp),
4218 TREE_THIS_VOLATILE (exp));
4219 tree array_adr = build1 (ADDR_EXPR,
4220 build_pointer_type (variant_type), array);
4222 tree size = size_in_bytes (type);
4224 /* Convert the integer argument to a type the same size as a
4225 pointer so the multiply won't overflow spuriously. */
4226 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4227 index = convert (type_for_size (POINTER_SIZE, 0), index);
4229 if (TREE_CODE (size) != INTEGER_CST
4230 && contains_placeholder_p (size))
4231 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4233 /* Don't think the address has side effects
4234 just because the array does.
4235 (In some cases the address might have side effects,
4236 and we fail to record that fact here. However, it should not
4237 matter, since expand_expr should not care.) */
4238 TREE_SIDE_EFFECTS (array_adr) = 0;
4240 elt = build1 (INDIRECT_REF, type,
4241 fold (build (PLUS_EXPR,
4242 TYPE_POINTER_TO (variant_type),
4244 fold (build (MULT_EXPR,
4245 TYPE_POINTER_TO (variant_type),
4248 /* Volatility, etc., of new expression is same as old
4250 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4251 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4252 TREE_READONLY (elt) = TREE_READONLY (exp);
4254 return expand_expr (elt, target, tmode, modifier);
4257 /* Fold an expression like: "foo"[2].
4258 This is not done in fold so it won't happen inside &. */
4260 if (TREE_CODE (array) == STRING_CST
4261 && TREE_CODE (index) == INTEGER_CST
4262 && !TREE_INT_CST_HIGH (index)
4263 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4264 && GET_MODE_CLASS (mode) == MODE_INT)
4265 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4267 /* If this is a constant index into a constant array,
4268 just get the value from the array. Handle both the cases when
4269 we have an explicit constructor and when our operand is a variable
4270 that was declared const. */
4272 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4274 if (TREE_CODE (index) == INTEGER_CST
4275 && TREE_INT_CST_HIGH (index) == 0)
4277 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4279 i = TREE_INT_CST_LOW (index);
4281 elem = TREE_CHAIN (elem);
4283 return expand_expr (fold (TREE_VALUE (elem)), target,
4288 else if (optimize >= 1
4289 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4290 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4291 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4293 if (TREE_CODE (index) == INTEGER_CST
4294 && TREE_INT_CST_HIGH (index) == 0)
4296 tree init = DECL_INITIAL (array);
4298 i = TREE_INT_CST_LOW (index);
4299 if (TREE_CODE (init) == CONSTRUCTOR)
4301 tree elem = CONSTRUCTOR_ELTS (init);
4304 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4305 elem = TREE_CHAIN (elem);
4307 return expand_expr (fold (TREE_VALUE (elem)), target,
4310 else if (TREE_CODE (init) == STRING_CST
4311 && i < TREE_STRING_LENGTH (init))
4312 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4317 /* Treat array-ref with constant index as a component-ref. */
4321 /* If the operand is a CONSTRUCTOR, we can just extract the
4322 appropriate field if it is present. */
4323 if (code != ARRAY_REF
4324 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4328 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4329 elt = TREE_CHAIN (elt))
4330 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4331 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4335 enum machine_mode mode1;
4340 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4341 &mode1, &unsignedp, &volatilep);
4344 /* If we got back the original object, something is wrong. Perhaps
4345 we are evaluating an expression too early. In any event, don't
4346 infinitely recurse. */
4350 /* In some cases, we will be offsetting OP0's address by a constant.
4351 So get it as a sum, if possible. If we will be using it
4352 directly in an insn, we validate it. */
4353 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4355 /* If this is a constant, put it into a register if it is a
4356 legitimate constant and memory if it isn't. */
4357 if (CONSTANT_P (op0))
4359 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4360 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4361 op0 = force_reg (mode, op0);
4363 op0 = validize_mem (force_const_mem (mode, op0));
4366 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4369 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4371 if (GET_CODE (op0) != MEM)
4373 op0 = change_address (op0, VOIDmode,
4374 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4375 force_reg (Pmode, offset_rtx)));
4376 /* If we have a variable offset, the known alignment
4377 is only that of the innermost structure containing the field.
4378 (Actually, we could sometimes do better by using the
4379 size of an element of the innermost array, but no need.) */
4380 if (TREE_CODE (exp) == COMPONENT_REF
4381 || TREE_CODE (exp) == BIT_FIELD_REF)
4382 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4386 /* Don't forget about volatility even if this is a bitfield. */
4387 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4389 op0 = copy_rtx (op0);
4390 MEM_VOLATILE_P (op0) = 1;
4393 /* In cases where an aligned union has an unaligned object
4394 as a field, we might be extracting a BLKmode value from
4395 an integer-mode (e.g., SImode) object. Handle this case
4396 by doing the extract into an object as wide as the field
4397 (which we know to be the width of a basic mode), then
4398 storing into memory, and changing the mode to BLKmode. */
4399 if (mode1 == VOIDmode
4400 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4401 && modifier != EXPAND_CONST_ADDRESS
4402 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4403 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4404 /* If the field isn't aligned enough to fetch as a memref,
4405 fetch it as a bit field. */
4406 || (STRICT_ALIGNMENT
4407 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4408 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4410 enum machine_mode ext_mode = mode;
4412 if (ext_mode == BLKmode)
4413 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4415 if (ext_mode == BLKmode)
4418 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4419 unsignedp, target, ext_mode, ext_mode,
4421 int_size_in_bytes (TREE_TYPE (tem)));
4422 if (mode == BLKmode)
4424 rtx new = assign_stack_temp (ext_mode,
4425 bitsize / BITS_PER_UNIT, 0);
4427 emit_move_insn (new, op0);
4428 op0 = copy_rtx (new);
4429 PUT_MODE (op0, BLKmode);
4430 MEM_IN_STRUCT_P (op0) = 1;
4436 /* Get a reference to just this component. */
4437 if (modifier == EXPAND_CONST_ADDRESS
4438 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4439 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4440 (bitpos / BITS_PER_UNIT)));
4442 op0 = change_address (op0, mode1,
4443 plus_constant (XEXP (op0, 0),
4444 (bitpos / BITS_PER_UNIT)));
4445 MEM_IN_STRUCT_P (op0) = 1;
4446 MEM_VOLATILE_P (op0) |= volatilep;
4447 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4450 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4451 convert_move (target, op0, unsignedp);
4457 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4458 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4459 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4460 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4461 MEM_IN_STRUCT_P (temp) = 1;
4462 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4463 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4464 a location is accessed through a pointer to const does not mean
4465 that the value there can never change. */
4466 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4471 /* Intended for a reference to a buffer of a file-object in Pascal.
4472 But it's not certain that a special tree code will really be
4473 necessary for these. INDIRECT_REF might work for them. */
4479 /* Pascal set IN expression.
4482 rlo = set_low - (set_low%bits_per_word);
4483 the_word = set [ (index - rlo)/bits_per_word ];
4484 bit_index = index % bits_per_word;
4485 bitmask = 1 << bit_index;
4486 return !!(the_word & bitmask); */
4488 tree set = TREE_OPERAND (exp, 0);
4489 tree index = TREE_OPERAND (exp, 1);
4490 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4491 tree set_type = TREE_TYPE (set);
4492 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4493 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4494 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4495 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4496 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4497 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4498 rtx setaddr = XEXP (setval, 0);
4499 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4501 rtx diff, quo, rem, addr, bit, result;
4503 preexpand_calls (exp);
4505 /* If domain is empty, answer is no. Likewise if index is constant
4506 and out of bounds. */
4507 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4508 && TREE_CODE (set_low_bound) == INTEGER_CST
4509 && tree_int_cst_lt (set_high_bound, set_low_bound)
4510 || (TREE_CODE (index) == INTEGER_CST
4511 && TREE_CODE (set_low_bound) == INTEGER_CST
4512 && tree_int_cst_lt (index, set_low_bound))
4513 || (TREE_CODE (set_high_bound) == INTEGER_CST
4514 && TREE_CODE (index) == INTEGER_CST
4515 && tree_int_cst_lt (set_high_bound, index))))
4519 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4521 /* If we get here, we have to generate the code for both cases
4522 (in range and out of range). */
4524 op0 = gen_label_rtx ();
4525 op1 = gen_label_rtx ();
4527 if (! (GET_CODE (index_val) == CONST_INT
4528 && GET_CODE (lo_r) == CONST_INT))
4530 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4531 GET_MODE (index_val), iunsignedp, 0);
4532 emit_jump_insn (gen_blt (op1));
4535 if (! (GET_CODE (index_val) == CONST_INT
4536 && GET_CODE (hi_r) == CONST_INT))
4538 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4539 GET_MODE (index_val), iunsignedp, 0);
4540 emit_jump_insn (gen_bgt (op1));
4543 /* Calculate the element number of bit zero in the first word
4545 if (GET_CODE (lo_r) == CONST_INT)
4546 rlow = GEN_INT (INTVAL (lo_r)
4547 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4549 rlow = expand_binop (index_mode, and_optab, lo_r,
4550 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4551 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4553 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4554 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4556 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4557 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4558 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4559 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4561 addr = memory_address (byte_mode,
4562 expand_binop (index_mode, add_optab, diff,
4563 setaddr, NULL_RTX, iunsignedp,
4566 /* Extract the bit we want to examine */
4567 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4568 gen_rtx (MEM, byte_mode, addr),
4569 make_tree (TREE_TYPE (index), rem),
4571 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4572 GET_MODE (target) == byte_mode ? target : 0,
4573 1, OPTAB_LIB_WIDEN);
4575 if (result != target)
4576 convert_move (target, result, 1);
4578 /* Output the code to handle the out-of-range case. */
4581 emit_move_insn (target, const0_rtx);
4586 case WITH_CLEANUP_EXPR:
4587 if (RTL_EXPR_RTL (exp) == 0)
4590 = expand_expr (TREE_OPERAND (exp, 0),
4591 target ? target : const0_rtx,
4594 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4595 /* That's it for this cleanup. */
4596 TREE_OPERAND (exp, 2) = 0;
4598 return RTL_EXPR_RTL (exp);
4601 /* Check for a built-in function. */
4602 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4603 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4605 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4606 return expand_builtin (exp, target, subtarget, tmode, ignore);
4608 /* If this call was expanded already by preexpand_calls,
4609 just return the result we got. */
4610 if (CALL_EXPR_RTL (exp) != 0)
4611 return CALL_EXPR_RTL (exp);
4613 return expand_call (exp, target, ignore);
4615 case NON_LVALUE_EXPR:
4618 case REFERENCE_EXPR:
4619 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4620 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4622 if (TREE_CODE (type) == UNION_TYPE)
4624 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4627 if (mode == BLKmode)
4629 if (TYPE_SIZE (type) == 0
4630 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4632 target = assign_stack_temp (BLKmode,
4633 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4634 + BITS_PER_UNIT - 1)
4635 / BITS_PER_UNIT, 0);
4638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4641 if (GET_CODE (target) == MEM)
4642 /* Store data into beginning of memory target. */
4643 store_expr (TREE_OPERAND (exp, 0),
4644 change_address (target, TYPE_MODE (valtype), 0), 0);
4646 else if (GET_CODE (target) == REG)
4647 /* Store this field into a union of the proper type. */
4648 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4649 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4651 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4655 /* Return the entire union. */
4659 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4660 if (GET_MODE (op0) == mode)
4663 /* If OP0 is a constant, just convert it into the proper mode. */
4664 if (CONSTANT_P (op0))
4666 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4667 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4669 if (modifier == EXPAND_INITIALIZER)
4670 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4672 if (flag_force_mem && GET_CODE (op0) == MEM)
4673 op0 = copy_to_reg (op0);
4677 convert_to_mode (mode, op0,
4678 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4680 convert_move (target, op0,
4681 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4685 /* We come here from MINUS_EXPR when the second operand is a constant. */
4687 this_optab = add_optab;
4689 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4690 something else, make sure we add the register to the constant and
4691 then to the other thing. This case can occur during strength
4692 reduction and doing it this way will produce better code if the
4693 frame pointer or argument pointer is eliminated.
4695 fold-const.c will ensure that the constant is always in the inner
4696 PLUS_EXPR, so the only case we need to do anything about is if
4697 sp, ap, or fp is our second argument, in which case we must swap
4698 the innermost first argument and our second argument. */
4700 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4701 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4702 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4703 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4704 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4705 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4707 tree t = TREE_OPERAND (exp, 1);
4709 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4710 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4713 /* If the result is to be Pmode and we are adding an integer to
4714 something, we might be forming a constant. So try to use
4715 plus_constant. If it produces a sum and we can't accept it,
4716 use force_operand. This allows P = &ARR[const] to generate
4717 efficient code on machines where a SYMBOL_REF is not a valid
4720 If this is an EXPAND_SUM call, always return the sum. */
4721 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4724 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4725 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4726 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4728 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4730 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4731 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4732 op1 = force_operand (op1, target);
4736 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4737 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4738 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4740 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4742 if (! CONSTANT_P (op0))
4744 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4745 VOIDmode, modifier);
4746 /* Don't go to both_summands if modifier
4747 says it's not right to return a PLUS. */
4748 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4752 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4753 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4754 op0 = force_operand (op0, target);
4759 /* No sense saving up arithmetic to be done
4760 if it's all in the wrong mode to form part of an address.
4761 And force_operand won't know whether to sign-extend or
4763 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4767 preexpand_calls (exp);
4768 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4771 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4772 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4775 /* Make sure any term that's a sum with a constant comes last. */
4776 if (GET_CODE (op0) == PLUS
4777 && CONSTANT_P (XEXP (op0, 1)))
4783 /* If adding to a sum including a constant,
4784 associate it to put the constant outside. */
4785 if (GET_CODE (op1) == PLUS
4786 && CONSTANT_P (XEXP (op1, 1)))
4788 rtx constant_term = const0_rtx;
4790 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4793 /* Ensure that MULT comes first if there is one. */
4794 else if (GET_CODE (op0) == MULT)
4795 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4797 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4799 /* Let's also eliminate constants from op0 if possible. */
4800 op0 = eliminate_constant_term (op0, &constant_term);
4802 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4803 their sum should be a constant. Form it into OP1, since the
4804 result we want will then be OP0 + OP1. */
4806 temp = simplify_binary_operation (PLUS, mode, constant_term,
4811 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4814 /* Put a constant term last and put a multiplication first. */
4815 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4816 temp = op1, op1 = op0, op0 = temp;
4818 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4819 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4822 /* For initializers, we are allowed to return a MINUS of two
4823 symbolic constants. Here we handle all cases when both operands
4825 /* Handle difference of two symbolic constants,
4826 for the sake of an initializer. */
4827 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4828 && really_constant_p (TREE_OPERAND (exp, 0))
4829 && really_constant_p (TREE_OPERAND (exp, 1)))
4831 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4832 VOIDmode, modifier);
4833 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4834 VOIDmode, modifier);
4836 /* If one operand is a CONST_INT, put it last. */
4837 if (GET_CODE (op0) == CONST_INT)
4838 temp = op0, op0 = op1, op1 = temp;
4840 /* If the last operand is a CONST_INT, use plus_constant of
4841 the negated constant. Else make the MINUS. */
4842 if (GET_CODE (op1) == CONST_INT)
4843 return plus_constant (op0, - INTVAL (op1));
4845 return gen_rtx (MINUS, mode, op0, op1);
4847 /* Convert A - const to A + (-const). */
4848 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4850 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4851 fold (build1 (NEGATE_EXPR, type,
4852 TREE_OPERAND (exp, 1))));
4855 this_optab = sub_optab;
4859 preexpand_calls (exp);
4860 /* If first operand is constant, swap them.
4861 Thus the following special case checks need only
4862 check the second operand. */
4863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4865 register tree t1 = TREE_OPERAND (exp, 0);
4866 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4867 TREE_OPERAND (exp, 1) = t1;
4870 /* Attempt to return something suitable for generating an
4871 indexed address, for machines that support that. */
4873 if (modifier == EXPAND_SUM && mode == Pmode
4874 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4875 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4877 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4879 /* Apply distributive law if OP0 is x+c. */
4880 if (GET_CODE (op0) == PLUS
4881 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4882 return gen_rtx (PLUS, mode,
4883 gen_rtx (MULT, mode, XEXP (op0, 0),
4884 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4885 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4886 * INTVAL (XEXP (op0, 1))));
4888 if (GET_CODE (op0) != REG)
4889 op0 = force_operand (op0, NULL_RTX);
4890 if (GET_CODE (op0) != REG)
4891 op0 = copy_to_mode_reg (mode, op0);
4893 return gen_rtx (MULT, mode, op0,
4894 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4897 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4900 /* Check for multiplying things that have been extended
4901 from a narrower type. If this machine supports multiplying
4902 in that narrower type with a result in the desired type,
4903 do it that way, and avoid the explicit type-conversion. */
4904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4905 && TREE_CODE (type) == INTEGER_TYPE
4906 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4907 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4908 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4909 && int_fits_type_p (TREE_OPERAND (exp, 1),
4910 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4911 /* Don't use a widening multiply if a shift will do. */
4912 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4913 > HOST_BITS_PER_WIDE_INT)
4914 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4916 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4917 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4919 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4920 /* If both operands are extended, they must either both
4921 be zero-extended or both be sign-extended. */
4922 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4924 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4926 enum machine_mode innermode
4927 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4928 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4929 ? umul_widen_optab : smul_widen_optab);
4930 if (mode == GET_MODE_WIDER_MODE (innermode)
4931 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4933 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4934 NULL_RTX, VOIDmode, 0);
4935 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4936 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4939 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4940 NULL_RTX, VOIDmode, 0);
4944 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4945 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4946 return expand_mult (mode, op0, op1, target, unsignedp);
4948 case TRUNC_DIV_EXPR:
4949 case FLOOR_DIV_EXPR:
4951 case ROUND_DIV_EXPR:
4952 case EXACT_DIV_EXPR:
4953 preexpand_calls (exp);
4954 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4956 /* Possible optimization: compute the dividend with EXPAND_SUM
4957 then if the divisor is constant can optimize the case
4958 where some terms of the dividend have coeffs divisible by it. */
4959 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4960 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4961 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4964 this_optab = flodiv_optab;
4967 case TRUNC_MOD_EXPR:
4968 case FLOOR_MOD_EXPR:
4970 case ROUND_MOD_EXPR:
4971 preexpand_calls (exp);
4972 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4974 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4975 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4976 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4978 case FIX_ROUND_EXPR:
4979 case FIX_FLOOR_EXPR:
4981 abort (); /* Not used for C. */
4983 case FIX_TRUNC_EXPR:
4984 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4986 target = gen_reg_rtx (mode);
4987 expand_fix (target, op0, unsignedp);
4991 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4993 target = gen_reg_rtx (mode);
4994 /* expand_float can't figure out what to do if FROM has VOIDmode.
4995 So give it the correct mode. With -O, cse will optimize this. */
4996 if (GET_MODE (op0) == VOIDmode)
4997 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4999 expand_float (target, op0,
5000 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5004 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5005 temp = expand_unop (mode, neg_optab, op0, target, 0);
5011 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5013 /* Handle complex values specially. */
5014 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5015 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5016 return expand_complex_abs (mode, op0, target, unsignedp);
5018 /* Unsigned abs is simply the operand. Testing here means we don't
5019 risk generating incorrect code below. */
5020 if (TREE_UNSIGNED (type))
5023 /* First try to do it with a special abs instruction. */
5024 temp = expand_unop (mode, abs_optab, op0, target, 0);
5028 /* If this machine has expensive jumps, we can do integer absolute
5029 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5030 where W is the width of MODE. */
5032 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5034 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5035 size_int (GET_MODE_BITSIZE (mode) - 1),
5038 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5041 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5048 /* If that does not win, use conditional jump and negate. */
5049 target = original_target;
5050 op1 = gen_label_rtx ();
5051 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5052 || GET_MODE (target) != mode
5053 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5054 || (GET_CODE (target) == REG
5055 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5056 target = gen_reg_rtx (mode);
5058 emit_move_insn (target, op0);
5061 /* If this mode is an integer too wide to compare properly,
5062 compare word by word. Rely on CSE to optimize constant cases. */
5063 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
5064 do_jump_by_parts_greater_rtx (mode, 0, target, const0_rtx,
5068 temp = compare_from_rtx (target, CONST0_RTX (mode), GE, 0, mode,
5070 if (temp == const1_rtx)
5072 else if (temp != const0_rtx)
5074 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5075 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op1));
5081 op0 = expand_unop (mode, neg_optab, target, target, 0);
5083 emit_move_insn (target, op0);
5090 target = original_target;
5091 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5092 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5093 || GET_MODE (target) != mode
5094 || (GET_CODE (target) == REG
5095 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5096 target = gen_reg_rtx (mode);
5097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5098 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5100 /* First try to do it with a special MIN or MAX instruction.
5101 If that does not win, use a conditional jump to select the proper
5103 this_optab = (TREE_UNSIGNED (type)
5104 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5105 : (code == MIN_EXPR ? smin_optab : smax_optab));
5107 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5113 emit_move_insn (target, op0);
5115 op0 = gen_label_rtx ();
5117 /* If this mode is an integer too wide to compare properly,
5118 compare word by word. Rely on cse to optimize constant cases. */
5119 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5121 if (code == MAX_EXPR)
5122 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5123 target, op1, NULL_RTX, op0);
5125 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5126 op1, target, NULL_RTX, op0);
5127 emit_move_insn (target, op1);
5131 if (code == MAX_EXPR)
5132 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5133 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5134 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5136 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5137 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5138 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5139 if (temp == const0_rtx)
5140 emit_move_insn (target, op1);
5141 else if (temp != const_true_rtx)
5143 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5144 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5147 emit_move_insn (target, op1);
5154 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5155 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5161 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5162 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5167 /* ??? Can optimize bitwise operations with one arg constant.
5168 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5169 and (a bitwise1 b) bitwise2 b (etc)
5170 but that is probably not worth while. */
5172 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5173 boolean values when we want in all cases to compute both of them. In
5174 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5175 as actual zero-or-1 values and then bitwise anding. In cases where
5176 there cannot be any side effects, better code would be made by
5177 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5178 how to recognize those cases. */
5180 case TRUTH_AND_EXPR:
5182 this_optab = and_optab;
5187 this_optab = ior_optab;
5190 case TRUTH_XOR_EXPR:
5192 this_optab = xor_optab;
5199 preexpand_calls (exp);
5200 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5202 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5203 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5206 /* Could determine the answer when only additive constants differ. Also,
5207 the addition of one can be handled by changing the condition. */
5214 preexpand_calls (exp);
5215 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5219 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5220 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5222 && GET_CODE (original_target) == REG
5223 && (GET_MODE (original_target)
5224 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5226 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5229 if (temp != original_target)
5230 temp = copy_to_reg (temp);
5232 op1 = gen_label_rtx ();
5233 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5234 GET_MODE (temp), unsignedp, 0);
5235 emit_jump_insn (gen_beq (op1));
5236 emit_move_insn (temp, const1_rtx);
5241 /* If no set-flag instruction, must generate a conditional
5242 store into a temporary variable. Drop through
5243 and handle this like && and ||. */
5245 case TRUTH_ANDIF_EXPR:
5246 case TRUTH_ORIF_EXPR:
5248 && (target == 0 || ! safe_from_p (target, exp)
5249 /* Make sure we don't have a hard reg (such as function's return
5250 value) live across basic blocks, if not optimizing. */
5251 || (!optimize && GET_CODE (target) == REG
5252 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5253 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5256 emit_clr_insn (target);
5258 op1 = gen_label_rtx ();
5259 jumpifnot (exp, op1);
5262 emit_0_to_1_insn (target);
5265 return ignore ? const0_rtx : target;
5267 case TRUTH_NOT_EXPR:
5268 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5269 /* The parser is careful to generate TRUTH_NOT_EXPR
5270 only with operands that are always zero or one. */
5271 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5272 target, 1, OPTAB_LIB_WIDEN);
5278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5280 return expand_expr (TREE_OPERAND (exp, 1),
5281 (ignore ? const0_rtx : target),
5286 /* Note that COND_EXPRs whose type is a structure or union
5287 are required to be constructed to contain assignments of
5288 a temporary variable, so that we can evaluate them here
5289 for side effect only. If type is void, we must do likewise. */
5291 /* If an arm of the branch requires a cleanup,
5292 only that cleanup is performed. */
5295 tree binary_op = 0, unary_op = 0;
5296 tree old_cleanups = cleanups_this_call;
5297 cleanups_this_call = 0;
5299 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5300 convert it to our mode, if necessary. */
5301 if (integer_onep (TREE_OPERAND (exp, 1))
5302 && integer_zerop (TREE_OPERAND (exp, 2))
5303 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5312 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5313 if (GET_MODE (op0) == mode)
5317 target = gen_reg_rtx (mode);
5318 convert_move (target, op0, unsignedp);
5322 /* If we are not to produce a result, we have no target. Otherwise,
5323 if a target was specified use it; it will not be used as an
5324 intermediate target unless it is safe. If no target, use a
5329 else if (original_target
5330 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5331 && GET_MODE (original_target) == mode)
5332 temp = original_target;
5333 else if (mode == BLKmode)
5335 if (TYPE_SIZE (type) == 0
5336 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5339 temp = assign_stack_temp (BLKmode,
5340 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5341 + BITS_PER_UNIT - 1)
5342 / BITS_PER_UNIT, 0);
5343 MEM_IN_STRUCT_P (temp)
5344 = (TREE_CODE (type) == RECORD_TYPE
5345 || TREE_CODE (type) == UNION_TYPE
5346 || TREE_CODE (type) == QUAL_UNION_TYPE
5347 || TREE_CODE (type) == ARRAY_TYPE);
5350 temp = gen_reg_rtx (mode);
5352 /* Check for X ? A + B : A. If we have this, we can copy
5353 A to the output and conditionally add B. Similarly for unary
5354 operations. Don't do this if X has side-effects because
5355 those side effects might affect A or B and the "?" operation is
5356 a sequence point in ANSI. (We test for side effects later.) */
5358 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5359 && operand_equal_p (TREE_OPERAND (exp, 2),
5360 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5361 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5362 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5363 && operand_equal_p (TREE_OPERAND (exp, 1),
5364 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5365 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5366 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5367 && operand_equal_p (TREE_OPERAND (exp, 2),
5368 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5369 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5370 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5371 && operand_equal_p (TREE_OPERAND (exp, 1),
5372 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5373 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5375 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5376 operation, do this as A + (X != 0). Similarly for other simple
5377 binary operators. */
5378 if (temp && singleton && binary_op
5379 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5380 && (TREE_CODE (binary_op) == PLUS_EXPR
5381 || TREE_CODE (binary_op) == MINUS_EXPR
5382 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5383 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5384 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5385 && integer_onep (TREE_OPERAND (binary_op, 1))
5386 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5389 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5390 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5391 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5392 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5395 /* If we had X ? A : A + 1, do this as A + (X == 0).
5397 We have to invert the truth value here and then put it
5398 back later if do_store_flag fails. We cannot simply copy
5399 TREE_OPERAND (exp, 0) to another variable and modify that
5400 because invert_truthvalue can modify the tree pointed to
5402 if (singleton == TREE_OPERAND (exp, 1))
5403 TREE_OPERAND (exp, 0)
5404 = invert_truthvalue (TREE_OPERAND (exp, 0));
5406 result = do_store_flag (TREE_OPERAND (exp, 0),
5407 (safe_from_p (temp, singleton)
5409 mode, BRANCH_COST <= 1);
5413 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5414 return expand_binop (mode, boptab, op1, result, temp,
5415 unsignedp, OPTAB_LIB_WIDEN);
5417 else if (singleton == TREE_OPERAND (exp, 1))
5418 TREE_OPERAND (exp, 0)
5419 = invert_truthvalue (TREE_OPERAND (exp, 0));
5423 op0 = gen_label_rtx ();
5425 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5429 /* If the target conflicts with the other operand of the
5430 binary op, we can't use it. Also, we can't use the target
5431 if it is a hard register, because evaluating the condition
5432 might clobber it. */
5434 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5435 || (GET_CODE (temp) == REG
5436 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5437 temp = gen_reg_rtx (mode);
5438 store_expr (singleton, temp, 0);
5441 expand_expr (singleton,
5442 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5443 if (cleanups_this_call)
5445 sorry ("aggregate value in COND_EXPR");
5446 cleanups_this_call = 0;
5448 if (singleton == TREE_OPERAND (exp, 1))
5449 jumpif (TREE_OPERAND (exp, 0), op0);
5451 jumpifnot (TREE_OPERAND (exp, 0), op0);
5453 if (binary_op && temp == 0)
5454 /* Just touch the other operand. */
5455 expand_expr (TREE_OPERAND (binary_op, 1),
5456 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5458 store_expr (build (TREE_CODE (binary_op), type,
5459 make_tree (type, temp),
5460 TREE_OPERAND (binary_op, 1)),
5463 store_expr (build1 (TREE_CODE (unary_op), type,
5464 make_tree (type, temp)),
5469 /* This is now done in jump.c and is better done there because it
5470 produces shorter register lifetimes. */
5472 /* Check for both possibilities either constants or variables
5473 in registers (but not the same as the target!). If so, can
5474 save branches by assigning one, branching, and assigning the
5476 else if (temp && GET_MODE (temp) != BLKmode
5477 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5478 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5479 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5480 && DECL_RTL (TREE_OPERAND (exp, 1))
5481 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5482 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5483 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5484 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5485 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5486 && DECL_RTL (TREE_OPERAND (exp, 2))
5487 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5488 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5490 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5491 temp = gen_reg_rtx (mode);
5492 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5493 jumpifnot (TREE_OPERAND (exp, 0), op0);
5494 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5498 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5499 comparison operator. If we have one of these cases, set the
5500 output to A, branch on A (cse will merge these two references),
5501 then set the output to FOO. */
5503 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5504 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5505 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5506 TREE_OPERAND (exp, 1), 0)
5507 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5508 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5510 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5511 temp = gen_reg_rtx (mode);
5512 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5513 jumpif (TREE_OPERAND (exp, 0), op0);
5514 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5518 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5519 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5520 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5521 TREE_OPERAND (exp, 2), 0)
5522 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5523 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5525 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5526 temp = gen_reg_rtx (mode);
5527 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5528 jumpifnot (TREE_OPERAND (exp, 0), op0);
5529 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5534 op1 = gen_label_rtx ();
5535 jumpifnot (TREE_OPERAND (exp, 0), op0);
5537 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5539 expand_expr (TREE_OPERAND (exp, 1),
5540 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5541 if (cleanups_this_call)
5543 sorry ("aggregate value in COND_EXPR");
5544 cleanups_this_call = 0;
5548 emit_jump_insn (gen_jump (op1));
5552 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5554 expand_expr (TREE_OPERAND (exp, 2),
5555 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5558 if (cleanups_this_call)
5560 sorry ("aggregate value in COND_EXPR");
5561 cleanups_this_call = 0;
5567 cleanups_this_call = old_cleanups;
5573 /* Something needs to be initialized, but we didn't know
5574 where that thing was when building the tree. For example,
5575 it could be the return value of a function, or a parameter
5576 to a function which lays down in the stack, or a temporary
5577 variable which must be passed by reference.
5579 We guarantee that the expression will either be constructed
5580 or copied into our original target. */
5582 tree slot = TREE_OPERAND (exp, 0);
5585 if (TREE_CODE (slot) != VAR_DECL)
5590 if (DECL_RTL (slot) != 0)
5592 target = DECL_RTL (slot);
5593 /* If we have already expanded the slot, so don't do
5595 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5600 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5601 /* All temp slots at this level must not conflict. */
5602 preserve_temp_slots (target);
5603 DECL_RTL (slot) = target;
5606 /* We set IGNORE when we know that we're already
5607 doing this for a cleanup. */
5610 /* Since SLOT is not known to the called function
5611 to belong to its stack frame, we must build an explicit
5612 cleanup. This case occurs when we must build up a reference
5613 to pass the reference as an argument. In this case,
5614 it is very likely that such a reference need not be
5617 if (TREE_OPERAND (exp, 2) == 0)
5618 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5619 if (TREE_OPERAND (exp, 2))
5620 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5621 cleanups_this_call);
5626 /* This case does occur, when expanding a parameter which
5627 needs to be constructed on the stack. The target
5628 is the actual stack address that we want to initialize.
5629 The function we call will perform the cleanup in this case. */
5631 /* If we have already assigned it space, use that space,
5632 not target that we were passed in, as our target
5633 parameter is only a hint. */
5634 if (DECL_RTL (slot) != 0)
5636 target = DECL_RTL (slot);
5637 /* If we have already expanded the slot, so don't do
5639 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5643 DECL_RTL (slot) = target;
5646 exp1 = TREE_OPERAND (exp, 1);
5647 /* Mark it as expanded. */
5648 TREE_OPERAND (exp, 1) = NULL_TREE;
5650 return expand_expr (exp1, target, tmode, modifier);
5655 tree lhs = TREE_OPERAND (exp, 0);
5656 tree rhs = TREE_OPERAND (exp, 1);
5657 tree noncopied_parts = 0;
5658 tree lhs_type = TREE_TYPE (lhs);
5660 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5661 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5662 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5663 TYPE_NONCOPIED_PARTS (lhs_type));
5664 while (noncopied_parts != 0)
5666 expand_assignment (TREE_VALUE (noncopied_parts),
5667 TREE_PURPOSE (noncopied_parts), 0, 0);
5668 noncopied_parts = TREE_CHAIN (noncopied_parts);
5675 /* If lhs is complex, expand calls in rhs before computing it.
5676 That's so we don't compute a pointer and save it over a call.
5677 If lhs is simple, compute it first so we can give it as a
5678 target if the rhs is just a call. This avoids an extra temp and copy
5679 and that prevents a partial-subsumption which makes bad code.
5680 Actually we could treat component_ref's of vars like vars. */
5682 tree lhs = TREE_OPERAND (exp, 0);
5683 tree rhs = TREE_OPERAND (exp, 1);
5684 tree noncopied_parts = 0;
5685 tree lhs_type = TREE_TYPE (lhs);
5689 if (TREE_CODE (lhs) != VAR_DECL
5690 && TREE_CODE (lhs) != RESULT_DECL
5691 && TREE_CODE (lhs) != PARM_DECL)
5692 preexpand_calls (exp);
5694 /* Check for |= or &= of a bitfield of size one into another bitfield
5695 of size 1. In this case, (unless we need the result of the
5696 assignment) we can do this more efficiently with a
5697 test followed by an assignment, if necessary.
5699 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5700 things change so we do, this code should be enhanced to
5703 && TREE_CODE (lhs) == COMPONENT_REF
5704 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5705 || TREE_CODE (rhs) == BIT_AND_EXPR)
5706 && TREE_OPERAND (rhs, 0) == lhs
5707 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5708 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5709 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5711 rtx label = gen_label_rtx ();
5713 do_jump (TREE_OPERAND (rhs, 1),
5714 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5715 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5716 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5717 (TREE_CODE (rhs) == BIT_IOR_EXPR
5719 : integer_zero_node)),
5721 do_pending_stack_adjust ();
5726 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5727 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5728 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5729 TYPE_NONCOPIED_PARTS (lhs_type));
5731 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5732 while (noncopied_parts != 0)
5734 expand_assignment (TREE_PURPOSE (noncopied_parts),
5735 TREE_VALUE (noncopied_parts), 0, 0);
5736 noncopied_parts = TREE_CHAIN (noncopied_parts);
5741 case PREINCREMENT_EXPR:
5742 case PREDECREMENT_EXPR:
5743 return expand_increment (exp, 0);
5745 case POSTINCREMENT_EXPR:
5746 case POSTDECREMENT_EXPR:
5747 /* Faster to treat as pre-increment if result is not used. */
5748 return expand_increment (exp, ! ignore);
5751 /* If nonzero, TEMP will be set to the address of something that might
5752 be a MEM corresponding to a stack slot. */
5755 /* Are we taking the address of a nested function? */
5756 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5757 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5759 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5760 op0 = force_operand (op0, target);
5762 /* If we are taking the address of something erroneous, just
5764 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
5768 /* We make sure to pass const0_rtx down if we came in with
5769 ignore set, to avoid doing the cleanups twice for something. */
5770 op0 = expand_expr (TREE_OPERAND (exp, 0),
5771 ignore ? const0_rtx : NULL_RTX, VOIDmode,
5772 (modifier == EXPAND_INITIALIZER
5773 ? modifier : EXPAND_CONST_ADDRESS));
5775 /* We would like the object in memory. If it is a constant,
5776 we can have it be statically allocated into memory. For
5777 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
5778 memory and store the value into it. */
5780 if (CONSTANT_P (op0))
5781 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5783 else if (GET_CODE (op0) == MEM)
5784 temp = XEXP (op0, 0);
5786 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5787 || GET_CODE (op0) == CONCAT)
5789 /* If this object is in a register, it must be not
5791 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5792 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5794 = assign_stack_temp (inner_mode,
5795 int_size_in_bytes (inner_type), 1);
5797 emit_move_insn (memloc, op0);
5801 if (GET_CODE (op0) != MEM)
5804 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5805 return XEXP (op0, 0);
5807 op0 = force_operand (XEXP (op0, 0), target);
5810 if (flag_force_addr && GET_CODE (op0) != REG)
5811 op0 = force_reg (Pmode, op0);
5813 if (GET_CODE (op0) == REG)
5814 mark_reg_pointer (op0);
5816 /* If we might have had a temp slot, add an equivalent address
5819 update_temp_slot_address (temp, op0);
5823 case ENTRY_VALUE_EXPR:
5826 /* COMPLEX type for Extended Pascal & Fortran */
5829 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5832 /* Get the rtx code of the operands. */
5833 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5834 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5837 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5841 /* Move the real (op0) and imaginary (op1) parts to their location. */
5842 emit_move_insn (gen_realpart (mode, target), op0);
5843 emit_move_insn (gen_imagpart (mode, target), op1);
5845 insns = get_insns ();
5848 /* Complex construction should appear as a single unit. */
5849 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5850 each with a separate pseudo as destination.
5851 It's not correct for flow to treat them as a unit. */
5852 if (GET_CODE (target) != CONCAT)
5853 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
5861 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5862 return gen_realpart (mode, op0);
5865 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5866 return gen_imagpart (mode, op0);
5873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5876 target = gen_reg_rtx (mode);
5880 /* Store the realpart and the negated imagpart to target. */
5881 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5883 imag_t = gen_imagpart (mode, target);
5884 temp = expand_unop (mode, neg_optab,
5885 gen_imagpart (mode, op0), imag_t, 0);
5887 emit_move_insn (imag_t, temp);
5889 insns = get_insns ();
5892 /* Conjugate should appear as a single unit
5893 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5894 each with a separate pseudo as destination.
5895 It's not correct for flow to treat them as a unit. */
5896 if (GET_CODE (target) != CONCAT)
5897 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
5905 op0 = CONST0_RTX (tmode);
5911 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
5914 /* Here to do an ordinary binary operator, generating an instruction
5915 from the optab already placed in `this_optab'. */
5917 preexpand_calls (exp);
5918 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5923 temp = expand_binop (mode, this_optab, op0, op1, target,
5924 unsignedp, OPTAB_LIB_WIDEN);
5931 /* Emit bytecode to evaluate the given expression EXP to the stack. */
5933 bc_expand_expr (exp)
5936 enum tree_code code;
5939 struct binary_operator *binoptab;
5940 struct unary_operator *unoptab;
5941 struct increment_operator *incroptab;
5942 struct bc_label *lab, *lab1;
5943 enum bytecode_opcode opcode;
5946 code = TREE_CODE (exp);
5952 if (DECL_RTL (exp) == 0)
5954 error_with_decl (exp, "prior parameter's size depends on `%s'");
5958 bc_load_parmaddr (DECL_RTL (exp));
5959 bc_load_memory (TREE_TYPE (exp), exp);
5965 if (DECL_RTL (exp) == 0)
5969 if (BYTECODE_LABEL (DECL_RTL (exp)))
5970 bc_load_externaddr (DECL_RTL (exp));
5972 bc_load_localaddr (DECL_RTL (exp));
5974 if (TREE_PUBLIC (exp))
5975 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5976 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
5978 bc_load_localaddr (DECL_RTL (exp));
5980 bc_load_memory (TREE_TYPE (exp), exp);
5985 #ifdef DEBUG_PRINT_CODE
5986 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5988 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
5990 : TYPE_MODE (TREE_TYPE (exp)))],
5991 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5997 #ifdef DEBUG_PRINT_CODE
5998 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6000 /* FIX THIS: find a better way to pass real_cst's. -bson */
6001 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6002 (double) TREE_REAL_CST (exp));
6011 /* We build a call description vector describing the type of
6012 the return value and of the arguments; this call vector,
6013 together with a pointer to a location for the return value
6014 and the base of the argument list, is passed to the low
6015 level machine dependent call subroutine, which is responsible
6016 for putting the arguments wherever real functions expect
6017 them, as well as getting the return value back. */
6019 tree calldesc = 0, arg;
6023 /* Push the evaluated args on the evaluation stack in reverse
6024 order. Also make an entry for each arg in the calldesc
6025 vector while we're at it. */
6027 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6029 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6032 bc_expand_expr (TREE_VALUE (arg));
6034 calldesc = tree_cons ((tree) 0,
6035 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6037 calldesc = tree_cons ((tree) 0,
6038 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6042 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6044 /* Allocate a location for the return value and push its
6045 address on the evaluation stack. Also make an entry
6046 at the front of the calldesc for the return value type. */
6048 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6049 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6050 bc_load_localaddr (retval);
6052 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6053 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6055 /* Prepend the argument count. */
6056 calldesc = tree_cons ((tree) 0,
6057 build_int_2 (nargs, 0),
6060 /* Push the address of the call description vector on the stack. */
6061 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6062 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6063 build_index_type (build_int_2 (nargs * 2, 0)));
6064 r = output_constant_def (calldesc);
6065 bc_load_externaddr (r);
6067 /* Push the address of the function to be called. */
6068 bc_expand_expr (TREE_OPERAND (exp, 0));
6070 /* Call the function, popping its address and the calldesc vector
6071 address off the evaluation stack in the process. */
6072 bc_emit_instruction (call);
6074 /* Pop the arguments off the stack. */
6075 bc_adjust_stack (nargs);
6077 /* Load the return value onto the stack. */
6078 bc_load_localaddr (retval);
6079 bc_load_memory (type, TREE_OPERAND (exp, 0));
6085 if (!SAVE_EXPR_RTL (exp))
6087 /* First time around: copy to local variable */
6088 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6089 TYPE_ALIGN (TREE_TYPE(exp)));
6090 bc_expand_expr (TREE_OPERAND (exp, 0));
6091 bc_emit_instruction (duplicate);
6093 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6094 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6098 /* Consecutive reference: use saved copy */
6099 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6100 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6105 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6106 how are they handled instead? */
6109 TREE_USED (exp) = 1;
6110 bc_expand_expr (STMT_BODY (exp));
6117 bc_expand_expr (TREE_OPERAND (exp, 0));
6118 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6123 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6128 bc_expand_address (TREE_OPERAND (exp, 0));
6133 bc_expand_expr (TREE_OPERAND (exp, 0));
6134 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6139 bc_expand_expr (bc_canonicalize_array_ref (exp));
6144 bc_expand_component_address (exp);
6146 /* If we have a bitfield, generate a proper load */
6147 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6152 bc_expand_expr (TREE_OPERAND (exp, 0));
6153 bc_emit_instruction (drop);
6154 bc_expand_expr (TREE_OPERAND (exp, 1));
6159 bc_expand_expr (TREE_OPERAND (exp, 0));
6160 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6161 lab = bc_get_bytecode_label ();
6162 bc_emit_bytecode (xjumpifnot);
6163 bc_emit_bytecode_labelref (lab);
6165 #ifdef DEBUG_PRINT_CODE
6166 fputc ('\n', stderr);
6168 bc_expand_expr (TREE_OPERAND (exp, 1));
6169 lab1 = bc_get_bytecode_label ();
6170 bc_emit_bytecode (jump);
6171 bc_emit_bytecode_labelref (lab1);
6173 #ifdef DEBUG_PRINT_CODE
6174 fputc ('\n', stderr);
6177 bc_emit_bytecode_labeldef (lab);
6178 bc_expand_expr (TREE_OPERAND (exp, 2));
6179 bc_emit_bytecode_labeldef (lab1);
6182 case TRUTH_ANDIF_EXPR:
6184 opcode = xjumpifnot;
6187 case TRUTH_ORIF_EXPR:
6194 binoptab = optab_plus_expr;
6199 binoptab = optab_minus_expr;
6204 binoptab = optab_mult_expr;
6207 case TRUNC_DIV_EXPR:
6208 case FLOOR_DIV_EXPR:
6210 case ROUND_DIV_EXPR:
6211 case EXACT_DIV_EXPR:
6213 binoptab = optab_trunc_div_expr;
6216 case TRUNC_MOD_EXPR:
6217 case FLOOR_MOD_EXPR:
6219 case ROUND_MOD_EXPR:
6221 binoptab = optab_trunc_mod_expr;
6224 case FIX_ROUND_EXPR:
6225 case FIX_FLOOR_EXPR:
6227 abort (); /* Not used for C. */
6229 case FIX_TRUNC_EXPR:
6236 abort (); /* FIXME */
6240 binoptab = optab_rdiv_expr;
6245 binoptab = optab_bit_and_expr;
6250 binoptab = optab_bit_ior_expr;
6255 binoptab = optab_bit_xor_expr;
6260 binoptab = optab_lshift_expr;
6265 binoptab = optab_rshift_expr;
6268 case TRUTH_AND_EXPR:
6270 binoptab = optab_truth_and_expr;
6275 binoptab = optab_truth_or_expr;
6280 binoptab = optab_lt_expr;
6285 binoptab = optab_le_expr;
6290 binoptab = optab_ge_expr;
6295 binoptab = optab_gt_expr;
6300 binoptab = optab_eq_expr;
6305 binoptab = optab_ne_expr;
6310 unoptab = optab_negate_expr;
6315 unoptab = optab_bit_not_expr;
6318 case TRUTH_NOT_EXPR:
6320 unoptab = optab_truth_not_expr;
6323 case PREDECREMENT_EXPR:
6325 incroptab = optab_predecrement_expr;
6328 case PREINCREMENT_EXPR:
6330 incroptab = optab_preincrement_expr;
6333 case POSTDECREMENT_EXPR:
6335 incroptab = optab_postdecrement_expr;
6338 case POSTINCREMENT_EXPR:
6340 incroptab = optab_postincrement_expr;
6345 bc_expand_constructor (exp);
6355 tree vars = TREE_OPERAND (exp, 0);
6356 int vars_need_expansion = 0;
6358 /* Need to open a binding contour here because
6359 if there are any cleanups they most be contained here. */
6360 expand_start_bindings (0);
6362 /* Mark the corresponding BLOCK for output. */
6363 if (TREE_OPERAND (exp, 2) != 0)
6364 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6366 /* If VARS have not yet been expanded, expand them now. */
6369 if (DECL_RTL (vars) == 0)
6371 vars_need_expansion = 1;
6372 bc_expand_decl (vars, 0);
6374 bc_expand_decl_init (vars);
6375 vars = TREE_CHAIN (vars);
6378 bc_expand_expr (TREE_OPERAND (exp, 1));
6380 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6390 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6391 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6397 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6403 bc_expand_expr (TREE_OPERAND (exp, 0));
6404 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6405 lab = bc_get_bytecode_label ();
6407 bc_emit_instruction (duplicate);
6408 bc_emit_bytecode (opcode);
6409 bc_emit_bytecode_labelref (lab);
6411 #ifdef DEBUG_PRINT_CODE
6412 fputc ('\n', stderr);
6415 bc_emit_instruction (drop);
6417 bc_expand_expr (TREE_OPERAND (exp, 1));
6418 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6419 bc_emit_bytecode_labeldef (lab);
6425 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6427 /* Push the quantum. */
6428 bc_expand_expr (TREE_OPERAND (exp, 1));
6430 /* Convert it to the lvalue's type. */
6431 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6433 /* Push the address of the lvalue */
6434 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6436 /* Perform actual increment */
6437 bc_expand_increment (incroptab, type);
6441 /* Return the alignment in bits of EXP, a pointer valued expression.
6442 But don't return more than MAX_ALIGN no matter what.
6443 The alignment returned is, by default, the alignment of the thing that
6444 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6446 Otherwise, look at the expression to see if we can do better, i.e., if the
6447 expression is actually pointing at an object whose alignment is tighter. */
6450 get_pointer_alignment (exp, max_align)
6454 unsigned align, inner;
6456 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6459 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6460 align = MIN (align, max_align);
6464 switch (TREE_CODE (exp))
6468 case NON_LVALUE_EXPR:
6469 exp = TREE_OPERAND (exp, 0);
6470 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6472 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6473 align = MIN (inner, max_align);
6477 /* If sum of pointer + int, restrict our maximum alignment to that
6478 imposed by the integer. If not, we can't do any better than
6480 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6483 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6488 exp = TREE_OPERAND (exp, 0);
6492 /* See what we are pointing at and look at its alignment. */
6493 exp = TREE_OPERAND (exp, 0);
6494 if (TREE_CODE (exp) == FUNCTION_DECL)
6495 align = FUNCTION_BOUNDARY;
6496 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6497 align = DECL_ALIGN (exp);
6498 #ifdef CONSTANT_ALIGNMENT
6499 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6500 align = CONSTANT_ALIGNMENT (exp, align);
6502 return MIN (align, max_align);
6510 /* Return the tree node and offset if a given argument corresponds to
6511 a string constant. */
6514 string_constant (arg, ptr_offset)
6520 if (TREE_CODE (arg) == ADDR_EXPR
6521 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6523 *ptr_offset = integer_zero_node;
6524 return TREE_OPERAND (arg, 0);
6526 else if (TREE_CODE (arg) == PLUS_EXPR)
6528 tree arg0 = TREE_OPERAND (arg, 0);
6529 tree arg1 = TREE_OPERAND (arg, 1);
6534 if (TREE_CODE (arg0) == ADDR_EXPR
6535 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6538 return TREE_OPERAND (arg0, 0);
6540 else if (TREE_CODE (arg1) == ADDR_EXPR
6541 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6544 return TREE_OPERAND (arg1, 0);
6551 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6552 way, because it could contain a zero byte in the middle.
6553 TREE_STRING_LENGTH is the size of the character array, not the string.
6555 Unfortunately, string_constant can't access the values of const char
6556 arrays with initializers, so neither can we do so here. */
6566 src = string_constant (src, &offset_node);
6569 max = TREE_STRING_LENGTH (src);
6570 ptr = TREE_STRING_POINTER (src);
6571 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6573 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6574 compute the offset to the following null if we don't know where to
6575 start searching for it. */
6577 for (i = 0; i < max; i++)
6580 /* We don't know the starting offset, but we do know that the string
6581 has no internal zero bytes. We can assume that the offset falls
6582 within the bounds of the string; otherwise, the programmer deserves
6583 what he gets. Subtract the offset from the length of the string,
6585 /* This would perhaps not be valid if we were dealing with named
6586 arrays in addition to literal string constants. */
6587 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6590 /* We have a known offset into the string. Start searching there for
6591 a null character. */
6592 if (offset_node == 0)
6596 /* Did we get a long long offset? If so, punt. */
6597 if (TREE_INT_CST_HIGH (offset_node) != 0)
6599 offset = TREE_INT_CST_LOW (offset_node);
6601 /* If the offset is known to be out of bounds, warn, and call strlen at
6603 if (offset < 0 || offset > max)
6605 warning ("offset outside bounds of constant string");
6608 /* Use strlen to search for the first zero byte. Since any strings
6609 constructed with build_string will have nulls appended, we win even
6610 if we get handed something like (char[4])"abcd".
6612 Since OFFSET is our starting index into the string, no further
6613 calculation is needed. */
6614 return size_int (strlen (ptr + offset));
6617 /* Expand an expression EXP that calls a built-in function,
6618 with result going to TARGET if that's convenient
6619 (and in mode MODE if that's convenient).
6620 SUBTARGET may be used as the target for computing one of EXP's operands.
6621 IGNORE is nonzero if the value is to be ignored. */
6623 #define CALLED_AS_BUILT_IN(NODE) \
6624 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6627 expand_builtin (exp, target, subtarget, mode, ignore)
6631 enum machine_mode mode;
6634 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6635 tree arglist = TREE_OPERAND (exp, 1);
6638 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6639 optab builtin_optab;
6641 switch (DECL_FUNCTION_CODE (fndecl))
6646 /* build_function_call changes these into ABS_EXPR. */
6651 case BUILT_IN_FSQRT:
6652 /* If not optimizing, call the library function. */
6657 /* Arg could be wrong type if user redeclared this fcn wrong. */
6658 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
6661 /* Stabilize and compute the argument. */
6662 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6663 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6665 exp = copy_node (exp);
6666 arglist = copy_node (arglist);
6667 TREE_OPERAND (exp, 1) = arglist;
6668 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6670 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6672 /* Make a suitable register to place result in. */
6673 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6678 switch (DECL_FUNCTION_CODE (fndecl))
6681 builtin_optab = sin_optab; break;
6683 builtin_optab = cos_optab; break;
6684 case BUILT_IN_FSQRT:
6685 builtin_optab = sqrt_optab; break;
6690 /* Compute into TARGET.
6691 Set TARGET to wherever the result comes back. */
6692 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6693 builtin_optab, op0, target, 0);
6695 /* If we were unable to expand via the builtin, stop the
6696 sequence (without outputting the insns) and break, causing
6697 a call the the library function. */
6704 /* Check the results by default. But if flag_fast_math is turned on,
6705 then assume sqrt will always be called with valid arguments. */
6707 if (! flag_fast_math)
6709 /* Don't define the builtin FP instructions
6710 if your machine is not IEEE. */
6711 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6714 lab1 = gen_label_rtx ();
6716 /* Test the result; if it is NaN, set errno=EDOM because
6717 the argument was not in the domain. */
6718 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6719 emit_jump_insn (gen_beq (lab1));
6723 #ifdef GEN_ERRNO_RTX
6724 rtx errno_rtx = GEN_ERRNO_RTX;
6727 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6730 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6733 /* We can't set errno=EDOM directly; let the library call do it.
6734 Pop the arguments right away in case the call gets deleted. */
6736 expand_call (exp, target, 0);
6743 /* Output the entire sequence. */
6744 insns = get_insns ();
6750 /* __builtin_apply_args returns block of memory allocated on
6751 the stack into which is stored the arg pointer, structure
6752 value address, static chain, and all the registers that might
6753 possibly be used in performing a function call. The code is
6754 moved to the start of the function so the incoming values are
6756 case BUILT_IN_APPLY_ARGS:
6757 /* Don't do __builtin_apply_args more than once in a function.
6758 Save the result of the first call and reuse it. */
6759 if (apply_args_value != 0)
6760 return apply_args_value;
6762 /* When this function is called, it means that registers must be
6763 saved on entry to this function. So we migrate the
6764 call to the first insn of this function. */
6769 temp = expand_builtin_apply_args ();
6773 apply_args_value = temp;
6775 /* Put the sequence after the NOTE that starts the function.
6776 If this is inside a SEQUENCE, make the outer-level insn
6777 chain current, so the code is placed at the start of the
6779 push_topmost_sequence ();
6780 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6781 pop_topmost_sequence ();
6785 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6786 FUNCTION with a copy of the parameters described by
6787 ARGUMENTS, and ARGSIZE. It returns a block of memory
6788 allocated on the stack into which is stored all the registers
6789 that might possibly be used for returning the result of a
6790 function. ARGUMENTS is the value returned by
6791 __builtin_apply_args. ARGSIZE is the number of bytes of
6792 arguments that must be copied. ??? How should this value be
6793 computed? We'll also need a safe worst case value for varargs
6795 case BUILT_IN_APPLY:
6797 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6798 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6799 || TREE_CHAIN (arglist) == 0
6800 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6801 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6802 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6810 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6811 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6813 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6816 /* __builtin_return (RESULT) causes the function to return the
6817 value described by RESULT. RESULT is address of the block of
6818 memory returned by __builtin_apply. */
6819 case BUILT_IN_RETURN:
6821 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6822 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6823 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6824 NULL_RTX, VOIDmode, 0));
6827 case BUILT_IN_SAVEREGS:
6828 /* Don't do __builtin_saveregs more than once in a function.
6829 Save the result of the first call and reuse it. */
6830 if (saveregs_value != 0)
6831 return saveregs_value;
6833 /* When this function is called, it means that registers must be
6834 saved on entry to this function. So we migrate the
6835 call to the first insn of this function. */
6839 /* Now really call the function. `expand_call' does not call
6840 expand_builtin, so there is no danger of infinite recursion here. */
6843 #ifdef EXPAND_BUILTIN_SAVEREGS
6844 /* Do whatever the machine needs done in this case. */
6845 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6847 /* The register where the function returns its value
6848 is likely to have something else in it, such as an argument.
6849 So preserve that register around the call. */
6851 if (value_mode != VOIDmode)
6853 rtx valreg = hard_libcall_value (value_mode);
6854 rtx saved_valreg = gen_reg_rtx (value_mode);
6856 emit_move_insn (saved_valreg, valreg);
6857 temp = expand_call (exp, target, ignore);
6858 emit_move_insn (valreg, saved_valreg);
6861 /* Generate the call, putting the value in a pseudo. */
6862 temp = expand_call (exp, target, ignore);
6868 saveregs_value = temp;
6870 /* Put the sequence after the NOTE that starts the function.
6871 If this is inside a SEQUENCE, make the outer-level insn
6872 chain current, so the code is placed at the start of the
6874 push_topmost_sequence ();
6875 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6876 pop_topmost_sequence ();
6880 /* __builtin_args_info (N) returns word N of the arg space info
6881 for the current function. The number and meanings of words
6882 is controlled by the definition of CUMULATIVE_ARGS. */
6883 case BUILT_IN_ARGS_INFO:
6885 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6887 int *word_ptr = (int *) ¤t_function_args_info;
6888 tree type, elts, result;
6890 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6891 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6892 __FILE__, __LINE__);
6896 tree arg = TREE_VALUE (arglist);
6897 if (TREE_CODE (arg) != INTEGER_CST)
6898 error ("argument of `__builtin_args_info' must be constant");
6901 int wordnum = TREE_INT_CST_LOW (arg);
6903 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6904 error ("argument of `__builtin_args_info' out of range");
6906 return GEN_INT (word_ptr[wordnum]);
6910 error ("missing argument in `__builtin_args_info'");
6915 for (i = 0; i < nwords; i++)
6916 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6918 type = build_array_type (integer_type_node,
6919 build_index_type (build_int_2 (nwords, 0)));
6920 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6921 TREE_CONSTANT (result) = 1;
6922 TREE_STATIC (result) = 1;
6923 result = build (INDIRECT_REF, build_pointer_type (type), result);
6924 TREE_CONSTANT (result) = 1;
6925 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6929 /* Return the address of the first anonymous stack arg. */
6930 case BUILT_IN_NEXT_ARG:
6932 tree fntype = TREE_TYPE (current_function_decl);
6934 if (TYPE_ARG_TYPES (fntype) == 0
6935 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6937 || current_function_varargs)
6939 error ("`va_start' used in function with fixed args");
6945 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
6946 tree arg = TREE_VALUE (arglist);
6948 /* Strip off all nops for the sake of the comparison. This
6949 is not quite the same as STRIP_NOPS. It does more. */
6950 while (TREE_CODE (arg) == NOP_EXPR
6951 || TREE_CODE (arg) == CONVERT_EXPR
6952 || TREE_CODE (arg) == NON_LVALUE_EXPR)
6953 arg = TREE_OPERAND (arg, 0);
6954 if (arg != last_parm)
6955 warning ("second parameter of `va_start' not last named argument");
6958 /* Evidently an out of date version of <stdarg.h>; can't validate
6959 va_start's second argument, but can still work as intended. */
6960 warning ("`__builtin_next_arg' called without an argument");
6963 return expand_binop (Pmode, add_optab,
6964 current_function_internal_arg_pointer,
6965 current_function_arg_offset_rtx,
6966 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6968 case BUILT_IN_CLASSIFY_TYPE:
6971 tree type = TREE_TYPE (TREE_VALUE (arglist));
6972 enum tree_code code = TREE_CODE (type);
6973 if (code == VOID_TYPE)
6974 return GEN_INT (void_type_class);
6975 if (code == INTEGER_TYPE)
6976 return GEN_INT (integer_type_class);
6977 if (code == CHAR_TYPE)
6978 return GEN_INT (char_type_class);
6979 if (code == ENUMERAL_TYPE)
6980 return GEN_INT (enumeral_type_class);
6981 if (code == BOOLEAN_TYPE)
6982 return GEN_INT (boolean_type_class);
6983 if (code == POINTER_TYPE)
6984 return GEN_INT (pointer_type_class);
6985 if (code == REFERENCE_TYPE)
6986 return GEN_INT (reference_type_class);
6987 if (code == OFFSET_TYPE)
6988 return GEN_INT (offset_type_class);
6989 if (code == REAL_TYPE)
6990 return GEN_INT (real_type_class);
6991 if (code == COMPLEX_TYPE)
6992 return GEN_INT (complex_type_class);
6993 if (code == FUNCTION_TYPE)
6994 return GEN_INT (function_type_class);
6995 if (code == METHOD_TYPE)
6996 return GEN_INT (method_type_class);
6997 if (code == RECORD_TYPE)
6998 return GEN_INT (record_type_class);
6999 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7000 return GEN_INT (union_type_class);
7001 if (code == ARRAY_TYPE)
7003 if (TYPE_STRING_FLAG (type))
7004 return GEN_INT (string_type_class);
7006 return GEN_INT (array_type_class);
7008 if (code == SET_TYPE)
7009 return GEN_INT (set_type_class);
7010 if (code == FILE_TYPE)
7011 return GEN_INT (file_type_class);
7012 if (code == LANG_TYPE)
7013 return GEN_INT (lang_type_class);
7015 return GEN_INT (no_type_class);
7017 case BUILT_IN_CONSTANT_P:
7021 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7022 ? const1_rtx : const0_rtx);
7024 case BUILT_IN_FRAME_ADDRESS:
7025 /* The argument must be a nonnegative integer constant.
7026 It counts the number of frames to scan up the stack.
7027 The value is the address of that frame. */
7028 case BUILT_IN_RETURN_ADDRESS:
7029 /* The argument must be a nonnegative integer constant.
7030 It counts the number of frames to scan up the stack.
7031 The value is the return address saved in that frame. */
7033 /* Warning about missing arg was already issued. */
7035 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7037 error ("invalid arg to `__builtin_return_address'");
7040 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7042 error ("invalid arg to `__builtin_return_address'");
7047 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7048 rtx tem = frame_pointer_rtx;
7051 /* Some machines need special handling before we can access arbitrary
7052 frames. For example, on the sparc, we must first flush all
7053 register windows to the stack. */
7054 #ifdef SETUP_FRAME_ADDRESSES
7055 SETUP_FRAME_ADDRESSES ();
7058 /* On the sparc, the return address is not in the frame, it is
7059 in a register. There is no way to access it off of the current
7060 frame pointer, but it can be accessed off the previous frame
7061 pointer by reading the value from the register window save
7063 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7064 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7068 /* Scan back COUNT frames to the specified frame. */
7069 for (i = 0; i < count; i++)
7071 /* Assume the dynamic chain pointer is in the word that
7072 the frame address points to, unless otherwise specified. */
7073 #ifdef DYNAMIC_CHAIN_ADDRESS
7074 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7076 tem = memory_address (Pmode, tem);
7077 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7080 /* For __builtin_frame_address, return what we've got. */
7081 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7084 /* For __builtin_return_address,
7085 Get the return address from that frame. */
7086 #ifdef RETURN_ADDR_RTX
7087 return RETURN_ADDR_RTX (count, tem);
7089 tem = memory_address (Pmode,
7090 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7091 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7095 case BUILT_IN_ALLOCA:
7097 /* Arg could be non-integer if user redeclared this fcn wrong. */
7098 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7100 current_function_calls_alloca = 1;
7101 /* Compute the argument. */
7102 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7104 /* Allocate the desired space. */
7105 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7107 /* Record the new stack level for nonlocal gotos. */
7108 if (nonlocal_goto_handler_slot != 0)
7109 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7113 /* If not optimizing, call the library function. */
7114 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7118 /* Arg could be non-integer if user redeclared this fcn wrong. */
7119 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7122 /* Compute the argument. */
7123 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7124 /* Compute ffs, into TARGET if possible.
7125 Set TARGET to wherever the result comes back. */
7126 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7127 ffs_optab, op0, target, 1);
7132 case BUILT_IN_STRLEN:
7133 /* If not optimizing, call the library function. */
7134 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7138 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7139 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7143 tree src = TREE_VALUE (arglist);
7144 tree len = c_strlen (src);
7147 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7149 rtx result, src_rtx, char_rtx;
7150 enum machine_mode insn_mode = value_mode, char_mode;
7151 enum insn_code icode;
7153 /* If the length is known, just return it. */
7155 return expand_expr (len, target, mode, 0);
7157 /* If SRC is not a pointer type, don't do this operation inline. */
7161 /* Call a function if we can't compute strlen in the right mode. */
7163 while (insn_mode != VOIDmode)
7165 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7166 if (icode != CODE_FOR_nothing)
7169 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7171 if (insn_mode == VOIDmode)
7174 /* Make a place to write the result of the instruction. */
7177 && GET_CODE (result) == REG
7178 && GET_MODE (result) == insn_mode
7179 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7180 result = gen_reg_rtx (insn_mode);
7182 /* Make sure the operands are acceptable to the predicates. */
7184 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7185 result = gen_reg_rtx (insn_mode);
7187 src_rtx = memory_address (BLKmode,
7188 expand_expr (src, NULL_RTX, Pmode,
7190 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7191 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7193 char_rtx = const0_rtx;
7194 char_mode = insn_operand_mode[(int)icode][2];
7195 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7196 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7198 emit_insn (GEN_FCN (icode) (result,
7199 gen_rtx (MEM, BLKmode, src_rtx),
7200 char_rtx, GEN_INT (align)));
7202 /* Return the value in the proper mode for this function. */
7203 if (GET_MODE (result) == value_mode)
7205 else if (target != 0)
7207 convert_move (target, result, 0);
7211 return convert_to_mode (value_mode, result, 0);
7214 case BUILT_IN_STRCPY:
7215 /* If not optimizing, call the library function. */
7216 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7220 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7221 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7222 || TREE_CHAIN (arglist) == 0
7223 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7227 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7232 len = size_binop (PLUS_EXPR, len, integer_one_node);
7234 chainon (arglist, build_tree_list (NULL_TREE, len));
7238 case BUILT_IN_MEMCPY:
7239 /* If not optimizing, call the library function. */
7240 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7244 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7245 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7246 || TREE_CHAIN (arglist) == 0
7247 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7248 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7249 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7253 tree dest = TREE_VALUE (arglist);
7254 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7255 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7258 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7260 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7261 rtx dest_rtx, dest_mem, src_mem;
7263 /* If either SRC or DEST is not a pointer type, don't do
7264 this operation in-line. */
7265 if (src_align == 0 || dest_align == 0)
7267 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7268 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7272 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7273 dest_mem = gen_rtx (MEM, BLKmode,
7274 memory_address (BLKmode, dest_rtx));
7275 src_mem = gen_rtx (MEM, BLKmode,
7276 memory_address (BLKmode,
7277 expand_expr (src, NULL_RTX,
7281 /* Copy word part most expediently. */
7282 emit_block_move (dest_mem, src_mem,
7283 expand_expr (len, NULL_RTX, VOIDmode, 0),
7284 MIN (src_align, dest_align));
7288 /* These comparison functions need an instruction that returns an actual
7289 index. An ordinary compare that just sets the condition codes
7291 #ifdef HAVE_cmpstrsi
7292 case BUILT_IN_STRCMP:
7293 /* If not optimizing, call the library function. */
7294 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7298 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7299 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7300 || TREE_CHAIN (arglist) == 0
7301 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7303 else if (!HAVE_cmpstrsi)
7306 tree arg1 = TREE_VALUE (arglist);
7307 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7311 len = c_strlen (arg1);
7313 len = size_binop (PLUS_EXPR, integer_one_node, len);
7314 len2 = c_strlen (arg2);
7316 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7318 /* If we don't have a constant length for the first, use the length
7319 of the second, if we know it. We don't require a constant for
7320 this case; some cost analysis could be done if both are available
7321 but neither is constant. For now, assume they're equally cheap.
7323 If both strings have constant lengths, use the smaller. This
7324 could arise if optimization results in strcpy being called with
7325 two fixed strings, or if the code was machine-generated. We should
7326 add some code to the `memcmp' handler below to deal with such
7327 situations, someday. */
7328 if (!len || TREE_CODE (len) != INTEGER_CST)
7335 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7337 if (tree_int_cst_lt (len2, len))
7341 chainon (arglist, build_tree_list (NULL_TREE, len));
7345 case BUILT_IN_MEMCMP:
7346 /* If not optimizing, call the library function. */
7347 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7351 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7352 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7353 || TREE_CHAIN (arglist) == 0
7354 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7355 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7356 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7358 else if (!HAVE_cmpstrsi)
7361 tree arg1 = TREE_VALUE (arglist);
7362 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7363 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7367 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7369 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7370 enum machine_mode insn_mode
7371 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7373 /* If we don't have POINTER_TYPE, call the function. */
7374 if (arg1_align == 0 || arg2_align == 0)
7376 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7377 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7381 /* Make a place to write the result of the instruction. */
7384 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7385 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7386 result = gen_reg_rtx (insn_mode);
7388 emit_insn (gen_cmpstrsi (result,
7389 gen_rtx (MEM, BLKmode,
7390 expand_expr (arg1, NULL_RTX, Pmode,
7392 gen_rtx (MEM, BLKmode,
7393 expand_expr (arg2, NULL_RTX, Pmode,
7395 expand_expr (len, NULL_RTX, VOIDmode, 0),
7396 GEN_INT (MIN (arg1_align, arg2_align))));
7398 /* Return the value in the proper mode for this function. */
7399 mode = TYPE_MODE (TREE_TYPE (exp));
7400 if (GET_MODE (result) == mode)
7402 else if (target != 0)
7404 convert_move (target, result, 0);
7408 return convert_to_mode (mode, result, 0);
7411 case BUILT_IN_STRCMP:
7412 case BUILT_IN_MEMCMP:
7416 default: /* just do library call, if unknown builtin */
7417 error ("built-in function `%s' not currently supported",
7418 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7421 /* The switch statement above can drop through to cause the function
7422 to be called normally. */
7424 return expand_call (exp, target, ignore);
7427 /* Built-in functions to perform an untyped call and return. */
7429 /* For each register that may be used for calling a function, this
7430 gives a mode used to copy the register's value. VOIDmode indicates
7431 the register is not used for calling a function. If the machine
7432 has register windows, this gives only the outbound registers.
7433 INCOMING_REGNO gives the corresponding inbound register. */
7434 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7436 /* For each register that may be used for returning values, this gives
7437 a mode used to copy the register's value. VOIDmode indicates the
7438 register is not used for returning values. If the machine has
7439 register windows, this gives only the outbound registers.
7440 INCOMING_REGNO gives the corresponding inbound register. */
7441 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7443 /* For each register that may be used for calling a function, this
7444 gives the offset of that register into the block returned by
7445 __bultin_apply_args. 0 indicates that the register is not
7446 used for calling a function. */
7447 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7449 /* Return the offset of register REGNO into the block returned by
7450 __builtin_apply_args. This is not declared static, since it is
7451 needed in objc-act.c. */
7454 apply_args_register_offset (regno)
7459 /* Arguments are always put in outgoing registers (in the argument
7460 block) if such make sense. */
7461 #ifdef OUTGOING_REGNO
7462 regno = OUTGOING_REGNO(regno);
7464 return apply_args_reg_offset[regno];
7467 /* Return the size required for the block returned by __builtin_apply_args,
7468 and initialize apply_args_mode. */
7473 static int size = -1;
7475 enum machine_mode mode;
7477 /* The values computed by this function never change. */
7480 /* The first value is the incoming arg-pointer. */
7481 size = GET_MODE_SIZE (Pmode);
7483 /* The second value is the structure value address unless this is
7484 passed as an "invisible" first argument. */
7485 if (struct_value_rtx)
7486 size += GET_MODE_SIZE (Pmode);
7488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7489 if (FUNCTION_ARG_REGNO_P (regno))
7491 /* Search for the proper mode for copying this register's
7492 value. I'm not sure this is right, but it works so far. */
7493 enum machine_mode best_mode = VOIDmode;
7495 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7497 mode = GET_MODE_WIDER_MODE (mode))
7498 if (HARD_REGNO_MODE_OK (regno, mode)
7499 && HARD_REGNO_NREGS (regno, mode) == 1)
7502 if (best_mode == VOIDmode)
7503 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7505 mode = GET_MODE_WIDER_MODE (mode))
7506 if (HARD_REGNO_MODE_OK (regno, mode)
7507 && (mov_optab->handlers[(int) mode].insn_code
7508 != CODE_FOR_nothing))
7512 if (mode == VOIDmode)
7515 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7516 if (size % align != 0)
7517 size = CEIL (size, align) * align;
7518 apply_args_reg_offset[regno] = size;
7519 size += GET_MODE_SIZE (mode);
7520 apply_args_mode[regno] = mode;
7524 apply_args_mode[regno] = VOIDmode;
7525 apply_args_reg_offset[regno] = 0;
7531 /* Return the size required for the block returned by __builtin_apply,
7532 and initialize apply_result_mode. */
7535 apply_result_size ()
7537 static int size = -1;
7539 enum machine_mode mode;
7541 /* The values computed by this function never change. */
7546 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7547 if (FUNCTION_VALUE_REGNO_P (regno))
7549 /* Search for the proper mode for copying this register's
7550 value. I'm not sure this is right, but it works so far. */
7551 enum machine_mode best_mode = VOIDmode;
7553 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7555 mode = GET_MODE_WIDER_MODE (mode))
7556 if (HARD_REGNO_MODE_OK (regno, mode))
7559 if (best_mode == VOIDmode)
7560 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7562 mode = GET_MODE_WIDER_MODE (mode))
7563 if (HARD_REGNO_MODE_OK (regno, mode)
7564 && (mov_optab->handlers[(int) mode].insn_code
7565 != CODE_FOR_nothing))
7569 if (mode == VOIDmode)
7572 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7573 if (size % align != 0)
7574 size = CEIL (size, align) * align;
7575 size += GET_MODE_SIZE (mode);
7576 apply_result_mode[regno] = mode;
7579 apply_result_mode[regno] = VOIDmode;
7581 /* Allow targets that use untyped_call and untyped_return to override
7582 the size so that machine-specific information can be stored here. */
7583 #ifdef APPLY_RESULT_SIZE
7584 size = APPLY_RESULT_SIZE;
7590 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7591 /* Create a vector describing the result block RESULT. If SAVEP is true,
7592 the result block is used to save the values; otherwise it is used to
7593 restore the values. */
7596 result_vector (savep, result)
7600 int regno, size, align, nelts;
7601 enum machine_mode mode;
7603 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7607 if ((mode = apply_result_mode[regno]) != VOIDmode)
7609 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7610 if (size % align != 0)
7611 size = CEIL (size, align) * align;
7612 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7613 mem = change_address (result, mode,
7614 plus_constant (XEXP (result, 0), size));
7615 savevec[nelts++] = (savep
7616 ? gen_rtx (SET, VOIDmode, mem, reg)
7617 : gen_rtx (SET, VOIDmode, reg, mem));
7618 size += GET_MODE_SIZE (mode);
7620 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7622 #endif /* HAVE_untyped_call or HAVE_untyped_return */
7624 /* Save the state required to perform an untyped call with the same
7625 arguments as were passed to the current function. */
7628 expand_builtin_apply_args ()
7631 int size, align, regno;
7632 enum machine_mode mode;
7634 /* Create a block where the arg-pointer, structure value address,
7635 and argument registers can be saved. */
7636 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7638 /* Walk past the arg-pointer and structure value address. */
7639 size = GET_MODE_SIZE (Pmode);
7640 if (struct_value_rtx)
7641 size += GET_MODE_SIZE (Pmode);
7643 /* Save each register used in calling a function to the block. */
7644 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7645 if ((mode = apply_args_mode[regno]) != VOIDmode)
7647 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7648 if (size % align != 0)
7649 size = CEIL (size, align) * align;
7650 emit_move_insn (change_address (registers, mode,
7651 plus_constant (XEXP (registers, 0),
7653 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7654 size += GET_MODE_SIZE (mode);
7657 /* Save the arg pointer to the block. */
7658 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7659 copy_to_reg (virtual_incoming_args_rtx));
7660 size = GET_MODE_SIZE (Pmode);
7662 /* Save the structure value address unless this is passed as an
7663 "invisible" first argument. */
7664 if (struct_value_incoming_rtx)
7666 emit_move_insn (change_address (registers, Pmode,
7667 plus_constant (XEXP (registers, 0),
7669 copy_to_reg (struct_value_incoming_rtx));
7670 size += GET_MODE_SIZE (Pmode);
7673 /* Return the address of the block. */
7674 return copy_addr_to_reg (XEXP (registers, 0));
7677 /* Perform an untyped call and save the state required to perform an
7678 untyped return of whatever value was returned by the given function. */
7681 expand_builtin_apply (function, arguments, argsize)
7682 rtx function, arguments, argsize;
7684 int size, align, regno;
7685 enum machine_mode mode;
7686 rtx incoming_args, result, reg, dest, call_insn;
7687 rtx old_stack_level = 0;
7690 /* Create a block where the return registers can be saved. */
7691 result = assign_stack_local (BLKmode, apply_result_size (), -1);
7693 /* ??? The argsize value should be adjusted here. */
7695 /* Fetch the arg pointer from the ARGUMENTS block. */
7696 incoming_args = gen_reg_rtx (Pmode);
7697 emit_move_insn (incoming_args,
7698 gen_rtx (MEM, Pmode, arguments));
7699 #ifndef STACK_GROWS_DOWNWARD
7700 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7701 incoming_args, 0, OPTAB_LIB_WIDEN);
7704 /* Perform postincrements before actually calling the function. */
7707 /* Push a new argument block and copy the arguments. */
7708 do_pending_stack_adjust ();
7709 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
7711 /* Push a block of memory onto the stack to store the memory arguments.
7712 Save the address in a register, and copy the memory arguments. ??? I
7713 haven't figured out how the calling convention macros effect this,
7714 but it's likely that the source and/or destination addresses in
7715 the block copy will need updating in machine specific ways. */
7716 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7717 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7718 gen_rtx (MEM, BLKmode, incoming_args),
7720 PARM_BOUNDARY / BITS_PER_UNIT);
7722 /* Refer to the argument block. */
7724 arguments = gen_rtx (MEM, BLKmode, arguments);
7726 /* Walk past the arg-pointer and structure value address. */
7727 size = GET_MODE_SIZE (Pmode);
7728 if (struct_value_rtx)
7729 size += GET_MODE_SIZE (Pmode);
7731 /* Restore each of the registers previously saved. Make USE insns
7732 for each of these registers for use in making the call. */
7733 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7734 if ((mode = apply_args_mode[regno]) != VOIDmode)
7736 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7737 if (size % align != 0)
7738 size = CEIL (size, align) * align;
7739 reg = gen_rtx (REG, mode, regno);
7740 emit_move_insn (reg,
7741 change_address (arguments, mode,
7742 plus_constant (XEXP (arguments, 0),
7745 push_to_sequence (use_insns);
7746 emit_insn (gen_rtx (USE, VOIDmode, reg));
7747 use_insns = get_insns ();
7749 size += GET_MODE_SIZE (mode);
7752 /* Restore the structure value address unless this is passed as an
7753 "invisible" first argument. */
7754 size = GET_MODE_SIZE (Pmode);
7755 if (struct_value_rtx)
7757 rtx value = gen_reg_rtx (Pmode);
7758 emit_move_insn (value,
7759 change_address (arguments, Pmode,
7760 plus_constant (XEXP (arguments, 0),
7762 emit_move_insn (struct_value_rtx, value);
7763 if (GET_CODE (struct_value_rtx) == REG)
7765 push_to_sequence (use_insns);
7766 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7767 use_insns = get_insns ();
7770 size += GET_MODE_SIZE (Pmode);
7773 /* All arguments and registers used for the call are set up by now! */
7774 function = prepare_call_address (function, NULL_TREE, &use_insns);
7776 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7777 and we don't want to load it into a register as an optimization,
7778 because prepare_call_address already did it if it should be done. */
7779 if (GET_CODE (function) != SYMBOL_REF)
7780 function = memory_address (FUNCTION_MODE, function);
7782 /* Generate the actual call instruction and save the return value. */
7783 #ifdef HAVE_untyped_call
7784 if (HAVE_untyped_call)
7785 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7786 result, result_vector (1, result)));
7789 #ifdef HAVE_call_value
7790 if (HAVE_call_value)
7794 /* Locate the unique return register. It is not possible to
7795 express a call that sets more than one return register using
7796 call_value; use untyped_call for that. In fact, untyped_call
7797 only needs to save the return registers in the given block. */
7798 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7799 if ((mode = apply_result_mode[regno]) != VOIDmode)
7802 abort (); /* HAVE_untyped_call required. */
7803 valreg = gen_rtx (REG, mode, regno);
7806 emit_call_insn (gen_call_value (valreg,
7807 gen_rtx (MEM, FUNCTION_MODE, function),
7808 const0_rtx, NULL_RTX, const0_rtx));
7810 emit_move_insn (change_address (result, GET_MODE (valreg),
7818 /* Find the CALL insn we just emitted and write the USE insns before it. */
7819 for (call_insn = get_last_insn ();
7820 call_insn && GET_CODE (call_insn) != CALL_INSN;
7821 call_insn = PREV_INSN (call_insn))
7827 /* Put the USE insns before the CALL. */
7828 emit_insns_before (use_insns, call_insn);
7830 /* Restore the stack. */
7831 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
7833 /* Return the address of the result block. */
7834 return copy_addr_to_reg (XEXP (result, 0));
7837 /* Perform an untyped return. */
7840 expand_builtin_return (result)
7843 int size, align, regno;
7844 enum machine_mode mode;
7848 apply_result_size ();
7849 result = gen_rtx (MEM, BLKmode, result);
7851 #ifdef HAVE_untyped_return
7852 if (HAVE_untyped_return)
7854 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7860 /* Restore the return value and note that each value is used. */
7862 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7863 if ((mode = apply_result_mode[regno]) != VOIDmode)
7865 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7866 if (size % align != 0)
7867 size = CEIL (size, align) * align;
7868 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7869 emit_move_insn (reg,
7870 change_address (result, mode,
7871 plus_constant (XEXP (result, 0),
7874 push_to_sequence (use_insns);
7875 emit_insn (gen_rtx (USE, VOIDmode, reg));
7876 use_insns = get_insns ();
7878 size += GET_MODE_SIZE (mode);
7881 /* Put the USE insns before the return. */
7882 emit_insns (use_insns);
7884 /* Return whatever values was restored by jumping directly to the end
7886 expand_null_return ();
7889 /* Expand code for a post- or pre- increment or decrement
7890 and return the RTX for the result.
7891 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
7894 expand_increment (exp, post)
7898 register rtx op0, op1;
7899 register rtx temp, value;
7900 register tree incremented = TREE_OPERAND (exp, 0);
7901 optab this_optab = add_optab;
7903 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7904 int op0_is_copy = 0;
7905 int single_insn = 0;
7906 /* 1 means we can't store into OP0 directly,
7907 because it is a subreg narrower than a word,
7908 and we don't dare clobber the rest of the word. */
7911 if (output_bytecode)
7913 bc_expand_expr (exp);
7917 /* Stabilize any component ref that might need to be
7918 evaluated more than once below. */
7920 || TREE_CODE (incremented) == BIT_FIELD_REF
7921 || (TREE_CODE (incremented) == COMPONENT_REF
7922 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7923 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7924 incremented = stabilize_reference (incremented);
7925 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7926 ones into save exprs so that they don't accidentally get evaluated
7927 more than once by the code below. */
7928 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7929 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7930 incremented = save_expr (incremented);
7932 /* Compute the operands as RTX.
7933 Note whether OP0 is the actual lvalue or a copy of it:
7934 I believe it is a copy iff it is a register or subreg
7935 and insns were generated in computing it. */
7937 temp = get_last_insn ();
7938 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
7940 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7941 in place but intead must do sign- or zero-extension during assignment,
7942 so we copy it into a new register and let the code below use it as
7945 Note that we can safely modify this SUBREG since it is know not to be
7946 shared (it was made by the expand_expr call above). */
7948 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7949 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7950 else if (GET_CODE (op0) == SUBREG
7951 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
7953 /* We cannot increment this SUBREG in place. If we are
7954 post-incrementing, get a copy of the old value. Otherwise,
7955 just mark that we cannot increment in place. */
7957 op0 = copy_to_reg (op0);
7962 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7963 && temp != get_last_insn ());
7964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7966 /* Decide whether incrementing or decrementing. */
7967 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7968 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7969 this_optab = sub_optab;
7971 /* Convert decrement by a constant into a negative increment. */
7972 if (this_optab == sub_optab
7973 && GET_CODE (op1) == CONST_INT)
7975 op1 = GEN_INT (- INTVAL (op1));
7976 this_optab = add_optab;
7979 /* For a preincrement, see if we can do this with a single instruction. */
7982 icode = (int) this_optab->handlers[(int) mode].insn_code;
7983 if (icode != (int) CODE_FOR_nothing
7984 /* Make sure that OP0 is valid for operands 0 and 1
7985 of the insn we want to queue. */
7986 && (*insn_operand_predicate[icode][0]) (op0, mode)
7987 && (*insn_operand_predicate[icode][1]) (op0, mode)
7988 && (*insn_operand_predicate[icode][2]) (op1, mode))
7992 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7993 then we cannot just increment OP0. We must therefore contrive to
7994 increment the original value. Then, for postincrement, we can return
7995 OP0 since it is a copy of the old value. For preincrement, expand here
7996 unless we can do it with a single insn.
7998 Likewise if storing directly into OP0 would clobber high bits
7999 we need to preserve (bad_subreg). */
8000 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8002 /* This is the easiest way to increment the value wherever it is.
8003 Problems with multiple evaluation of INCREMENTED are prevented
8004 because either (1) it is a component_ref or preincrement,
8005 in which case it was stabilized above, or (2) it is an array_ref
8006 with constant index in an array in a register, which is
8007 safe to reevaluate. */
8008 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8009 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8010 ? MINUS_EXPR : PLUS_EXPR),
8013 TREE_OPERAND (exp, 1));
8014 temp = expand_assignment (incremented, newexp, ! post, 0);
8015 return post ? op0 : temp;
8020 /* We have a true reference to the value in OP0.
8021 If there is an insn to add or subtract in this mode, queue it.
8022 Queueing the increment insn avoids the register shuffling
8023 that often results if we must increment now and first save
8024 the old value for subsequent use. */
8026 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8027 op0 = stabilize (op0);
8030 icode = (int) this_optab->handlers[(int) mode].insn_code;
8031 if (icode != (int) CODE_FOR_nothing
8032 /* Make sure that OP0 is valid for operands 0 and 1
8033 of the insn we want to queue. */
8034 && (*insn_operand_predicate[icode][0]) (op0, mode)
8035 && (*insn_operand_predicate[icode][1]) (op0, mode))
8037 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8038 op1 = force_reg (mode, op1);
8040 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8044 /* Preincrement, or we can't increment with one simple insn. */
8046 /* Save a copy of the value before inc or dec, to return it later. */
8047 temp = value = copy_to_reg (op0);
8049 /* Arrange to return the incremented value. */
8050 /* Copy the rtx because expand_binop will protect from the queue,
8051 and the results of that would be invalid for us to return
8052 if our caller does emit_queue before using our result. */
8053 temp = copy_rtx (value = op0);
8055 /* Increment however we can. */
8056 op1 = expand_binop (mode, this_optab, value, op1, op0,
8057 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8058 /* Make sure the value is stored into OP0. */
8060 emit_move_insn (op0, op1);
8065 /* Expand all function calls contained within EXP, innermost ones first.
8066 But don't look within expressions that have sequence points.
8067 For each CALL_EXPR, record the rtx for its value
8068 in the CALL_EXPR_RTL field. */
8071 preexpand_calls (exp)
8074 register int nops, i;
8075 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8077 if (! do_preexpand_calls)
8080 /* Only expressions and references can contain calls. */
8082 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8085 switch (TREE_CODE (exp))
8088 /* Do nothing if already expanded. */
8089 if (CALL_EXPR_RTL (exp) != 0)
8092 /* Do nothing to built-in functions. */
8093 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8094 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8095 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8096 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8101 case TRUTH_ANDIF_EXPR:
8102 case TRUTH_ORIF_EXPR:
8103 /* If we find one of these, then we can be sure
8104 the adjust will be done for it (since it makes jumps).
8105 Do it now, so that if this is inside an argument
8106 of a function, we don't get the stack adjustment
8107 after some other args have already been pushed. */
8108 do_pending_stack_adjust ();
8113 case WITH_CLEANUP_EXPR:
8117 if (SAVE_EXPR_RTL (exp) != 0)
8121 nops = tree_code_length[(int) TREE_CODE (exp)];
8122 for (i = 0; i < nops; i++)
8123 if (TREE_OPERAND (exp, i) != 0)
8125 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8126 if (type == 'e' || type == '<' || type == '1' || type == '2'
8128 preexpand_calls (TREE_OPERAND (exp, i));
8132 /* At the start of a function, record that we have no previously-pushed
8133 arguments waiting to be popped. */
8136 init_pending_stack_adjust ()
8138 pending_stack_adjust = 0;
8141 /* When exiting from function, if safe, clear out any pending stack adjust
8142 so the adjustment won't get done. */
8145 clear_pending_stack_adjust ()
8147 #ifdef EXIT_IGNORE_STACK
8148 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8149 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8150 && ! flag_inline_functions)
8151 pending_stack_adjust = 0;
8155 /* Pop any previously-pushed arguments that have not been popped yet. */
8158 do_pending_stack_adjust ()
8160 if (inhibit_defer_pop == 0)
8162 if (pending_stack_adjust != 0)
8163 adjust_stack (GEN_INT (pending_stack_adjust));
8164 pending_stack_adjust = 0;
8168 /* Expand all cleanups up to OLD_CLEANUPS.
8169 Needed here, and also for language-dependent calls. */
8172 expand_cleanups_to (old_cleanups)
8175 while (cleanups_this_call != old_cleanups)
8177 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8178 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8182 /* Expand conditional expressions. */
8184 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8185 LABEL is an rtx of code CODE_LABEL, in this function and all the
8189 jumpifnot (exp, label)
8193 do_jump (exp, label, NULL_RTX);
8196 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8203 do_jump (exp, NULL_RTX, label);
8206 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8207 the result is zero, or IF_TRUE_LABEL if the result is one.
8208 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8209 meaning fall through in that case.
8211 do_jump always does any pending stack adjust except when it does not
8212 actually perform a jump. An example where there is no jump
8213 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8215 This function is responsible for optimizing cases such as
8216 &&, || and comparison operators in EXP. */
8219 do_jump (exp, if_false_label, if_true_label)
8221 rtx if_false_label, if_true_label;
8223 register enum tree_code code = TREE_CODE (exp);
8224 /* Some cases need to create a label to jump to
8225 in order to properly fall through.
8226 These cases set DROP_THROUGH_LABEL nonzero. */
8227 rtx drop_through_label = 0;
8241 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8247 /* This is not true with #pragma weak */
8249 /* The address of something can never be zero. */
8251 emit_jump (if_true_label);
8256 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8257 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8258 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8261 /* If we are narrowing the operand, we have to do the compare in the
8263 if ((TYPE_PRECISION (TREE_TYPE (exp))
8264 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8266 case NON_LVALUE_EXPR:
8267 case REFERENCE_EXPR:
8272 /* These cannot change zero->non-zero or vice versa. */
8273 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8277 /* This is never less insns than evaluating the PLUS_EXPR followed by
8278 a test and can be longer if the test is eliminated. */
8280 /* Reduce to minus. */
8281 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8282 TREE_OPERAND (exp, 0),
8283 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8284 TREE_OPERAND (exp, 1))));
8285 /* Process as MINUS. */
8289 /* Non-zero iff operands of minus differ. */
8290 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8291 TREE_OPERAND (exp, 0),
8292 TREE_OPERAND (exp, 1)),
8297 /* If we are AND'ing with a small constant, do this comparison in the
8298 smallest type that fits. If the machine doesn't have comparisons
8299 that small, it will be converted back to the wider comparison.
8300 This helps if we are testing the sign bit of a narrower object.
8301 combine can't do this for us because it can't know whether a
8302 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8304 if (! SLOW_BYTE_ACCESS
8305 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8306 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8307 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8308 && (type = type_for_size (i + 1, 1)) != 0
8309 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8310 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8311 != CODE_FOR_nothing))
8313 do_jump (convert (type, exp), if_false_label, if_true_label);
8318 case TRUTH_NOT_EXPR:
8319 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8322 case TRUTH_ANDIF_EXPR:
8323 if (if_false_label == 0)
8324 if_false_label = drop_through_label = gen_label_rtx ();
8325 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8326 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8329 case TRUTH_ORIF_EXPR:
8330 if (if_true_label == 0)
8331 if_true_label = drop_through_label = gen_label_rtx ();
8332 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8333 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8342 do_pending_stack_adjust ();
8343 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8350 int bitsize, bitpos, unsignedp;
8351 enum machine_mode mode;
8356 /* Get description of this reference. We don't actually care
8357 about the underlying object here. */
8358 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8359 &mode, &unsignedp, &volatilep);
8361 type = type_for_size (bitsize, unsignedp);
8362 if (! SLOW_BYTE_ACCESS
8363 && type != 0 && bitsize >= 0
8364 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8365 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8366 != CODE_FOR_nothing))
8368 do_jump (convert (type, exp), if_false_label, if_true_label);
8375 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8376 if (integer_onep (TREE_OPERAND (exp, 1))
8377 && integer_zerop (TREE_OPERAND (exp, 2)))
8378 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8380 else if (integer_zerop (TREE_OPERAND (exp, 1))
8381 && integer_onep (TREE_OPERAND (exp, 2)))
8382 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8386 register rtx label1 = gen_label_rtx ();
8387 drop_through_label = gen_label_rtx ();
8388 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8389 /* Now the THEN-expression. */
8390 do_jump (TREE_OPERAND (exp, 1),
8391 if_false_label ? if_false_label : drop_through_label,
8392 if_true_label ? if_true_label : drop_through_label);
8393 /* In case the do_jump just above never jumps. */
8394 do_pending_stack_adjust ();
8395 emit_label (label1);
8396 /* Now the ELSE-expression. */
8397 do_jump (TREE_OPERAND (exp, 2),
8398 if_false_label ? if_false_label : drop_through_label,
8399 if_true_label ? if_true_label : drop_through_label);
8404 if (integer_zerop (TREE_OPERAND (exp, 1)))
8405 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8406 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8409 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8410 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8411 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8412 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8414 comparison = compare (exp, EQ, EQ);
8418 if (integer_zerop (TREE_OPERAND (exp, 1)))
8419 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8420 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8423 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8424 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8425 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
8426 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8428 comparison = compare (exp, NE, NE);
8432 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8434 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8435 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8437 comparison = compare (exp, LT, LTU);
8441 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8443 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8444 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8446 comparison = compare (exp, LE, LEU);
8450 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8452 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8453 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8455 comparison = compare (exp, GT, GTU);
8459 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8461 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8462 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8464 comparison = compare (exp, GE, GEU);
8469 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8471 /* This is not needed any more and causes poor code since it causes
8472 comparisons and tests from non-SI objects to have different code
8474 /* Copy to register to avoid generating bad insns by cse
8475 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8476 if (!cse_not_expected && GET_CODE (temp) == MEM)
8477 temp = copy_to_reg (temp);
8479 do_pending_stack_adjust ();
8480 if (GET_CODE (temp) == CONST_INT)
8481 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8482 else if (GET_CODE (temp) == LABEL_REF)
8483 comparison = const_true_rtx;
8484 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8485 && !can_compare_p (GET_MODE (temp)))
8486 /* Note swapping the labels gives us not-equal. */
8487 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8488 else if (GET_MODE (temp) != VOIDmode)
8489 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8490 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8491 GET_MODE (temp), NULL_RTX, 0);
8496 /* Do any postincrements in the expression that was tested. */
8499 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8500 straight into a conditional jump instruction as the jump condition.
8501 Otherwise, all the work has been done already. */
8503 if (comparison == const_true_rtx)
8506 emit_jump (if_true_label);
8508 else if (comparison == const0_rtx)
8511 emit_jump (if_false_label);
8513 else if (comparison)
8514 do_jump_for_compare (comparison, if_false_label, if_true_label);
8516 if (drop_through_label)
8518 /* If do_jump produces code that might be jumped around,
8519 do any stack adjusts from that code, before the place
8520 where control merges in. */
8521 do_pending_stack_adjust ();
8522 emit_label (drop_through_label);
8526 /* Given a comparison expression EXP for values too wide to be compared
8527 with one insn, test the comparison and jump to the appropriate label.
8528 The code of EXP is ignored; we always test GT if SWAP is 0,
8529 and LT if SWAP is 1. */
8532 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8535 rtx if_false_label, if_true_label;
8537 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8538 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8539 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8540 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8541 rtx drop_through_label = 0;
8542 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8545 if (! if_true_label || ! if_false_label)
8546 drop_through_label = gen_label_rtx ();
8547 if (! if_true_label)
8548 if_true_label = drop_through_label;
8549 if (! if_false_label)
8550 if_false_label = drop_through_label;
8552 /* Compare a word at a time, high order first. */
8553 for (i = 0; i < nwords; i++)
8556 rtx op0_word, op1_word;
8558 if (WORDS_BIG_ENDIAN)
8560 op0_word = operand_subword_force (op0, i, mode);
8561 op1_word = operand_subword_force (op1, i, mode);
8565 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8566 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8569 /* All but high-order word must be compared as unsigned. */
8570 comp = compare_from_rtx (op0_word, op1_word,
8571 (unsignedp || i > 0) ? GTU : GT,
8572 unsignedp, word_mode, NULL_RTX, 0);
8573 if (comp == const_true_rtx)
8574 emit_jump (if_true_label);
8575 else if (comp != const0_rtx)
8576 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8578 /* Consider lower words only if these are equal. */
8579 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8581 if (comp == const_true_rtx)
8582 emit_jump (if_false_label);
8583 else if (comp != const0_rtx)
8584 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8588 emit_jump (if_false_label);
8589 if (drop_through_label)
8590 emit_label (drop_through_label);
8593 /* Compare OP0 with OP1, word at a time, in mode MODE.
8594 UNSIGNEDP says to do unsigned comparison.
8595 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
8598 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8599 enum machine_mode mode;
8602 rtx if_false_label, if_true_label;
8604 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8605 rtx drop_through_label = 0;
8608 if (! if_true_label || ! if_false_label)
8609 drop_through_label = gen_label_rtx ();
8610 if (! if_true_label)
8611 if_true_label = drop_through_label;
8612 if (! if_false_label)
8613 if_false_label = drop_through_label;
8615 /* Compare a word at a time, high order first. */
8616 for (i = 0; i < nwords; i++)
8619 rtx op0_word, op1_word;
8621 if (WORDS_BIG_ENDIAN)
8623 op0_word = operand_subword_force (op0, i, mode);
8624 op1_word = operand_subword_force (op1, i, mode);
8628 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8629 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8632 /* All but high-order word must be compared as unsigned. */
8633 comp = compare_from_rtx (op0_word, op1_word,
8634 (unsignedp || i > 0) ? GTU : GT,
8635 unsignedp, word_mode, NULL_RTX, 0);
8636 if (comp == const_true_rtx)
8637 emit_jump (if_true_label);
8638 else if (comp != const0_rtx)
8639 do_jump_for_compare (comp, NULL_RTX, if_true_label);
8641 /* Consider lower words only if these are equal. */
8642 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8644 if (comp == const_true_rtx)
8645 emit_jump (if_false_label);
8646 else if (comp != const0_rtx)
8647 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8651 emit_jump (if_false_label);
8652 if (drop_through_label)
8653 emit_label (drop_through_label);
8656 /* Given an EQ_EXPR expression EXP for values too wide to be compared
8657 with one insn, test the comparison and jump to the appropriate label. */
8660 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8662 rtx if_false_label, if_true_label;
8664 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8665 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8666 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8667 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8669 rtx drop_through_label = 0;
8671 if (! if_false_label)
8672 drop_through_label = if_false_label = gen_label_rtx ();
8674 for (i = 0; i < nwords; i++)
8676 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8677 operand_subword_force (op1, i, mode),
8678 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8679 word_mode, NULL_RTX, 0);
8680 if (comp == const_true_rtx)
8681 emit_jump (if_false_label);
8682 else if (comp != const0_rtx)
8683 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8687 emit_jump (if_true_label);
8688 if (drop_through_label)
8689 emit_label (drop_through_label);
8692 /* Jump according to whether OP0 is 0.
8693 We assume that OP0 has an integer mode that is too wide
8694 for the available compare insns. */
8697 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8699 rtx if_false_label, if_true_label;
8701 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8703 rtx drop_through_label = 0;
8705 if (! if_false_label)
8706 drop_through_label = if_false_label = gen_label_rtx ();
8708 for (i = 0; i < nwords; i++)
8710 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8712 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8713 if (comp == const_true_rtx)
8714 emit_jump (if_false_label);
8715 else if (comp != const0_rtx)
8716 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8720 emit_jump (if_true_label);
8721 if (drop_through_label)
8722 emit_label (drop_through_label);
8725 /* Given a comparison expression in rtl form, output conditional branches to
8726 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
8729 do_jump_for_compare (comparison, if_false_label, if_true_label)
8730 rtx comparison, if_false_label, if_true_label;
8734 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8735 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8740 emit_jump (if_false_label);
8742 else if (if_false_label)
8745 rtx prev = get_last_insn ();
8749 prev = PREV_INSN (prev);
8751 /* Output the branch with the opposite condition. Then try to invert
8752 what is generated. If more than one insn is a branch, or if the
8753 branch is not the last insn written, abort. If we can't invert
8754 the branch, emit make a true label, redirect this jump to that,
8755 emit a jump to the false label and define the true label. */
8757 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8758 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8762 /* Here we get the insn before what was just emitted.
8763 On some machines, emitting the branch can discard
8764 the previous compare insn and emit a replacement. */
8766 /* If there's only one preceding insn... */
8767 insn = get_insns ();
8769 insn = NEXT_INSN (prev);
8771 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8772 if (GET_CODE (insn) == JUMP_INSN)
8779 if (branch != get_last_insn ())
8782 JUMP_LABEL (branch) = if_false_label;
8783 if (! invert_jump (branch, if_false_label))
8785 if_true_label = gen_label_rtx ();
8786 redirect_jump (branch, if_true_label);
8787 emit_jump (if_false_label);
8788 emit_label (if_true_label);
8793 /* Generate code for a comparison expression EXP
8794 (including code to compute the values to be compared)
8795 and set (CC0) according to the result.
8796 SIGNED_CODE should be the rtx operation for this comparison for
8797 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8799 We force a stack adjustment unless there are currently
8800 things pushed on the stack that aren't yet used. */
8803 compare (exp, signed_code, unsigned_code)
8805 enum rtx_code signed_code, unsigned_code;
8808 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8810 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8811 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8812 register enum machine_mode mode = TYPE_MODE (type);
8813 int unsignedp = TREE_UNSIGNED (type);
8814 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
8816 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8818 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8819 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8822 /* Like compare but expects the values to compare as two rtx's.
8823 The decision as to signed or unsigned comparison must be made by the caller.
8825 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8828 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8829 size of MODE should be used. */
8832 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8833 register rtx op0, op1;
8836 enum machine_mode mode;
8842 /* If one operand is constant, make it the second one. Only do this
8843 if the other operand is not constant as well. */
8845 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8846 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8851 code = swap_condition (code);
8856 op0 = force_not_mem (op0);
8857 op1 = force_not_mem (op1);
8860 do_pending_stack_adjust ();
8862 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8863 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8867 /* There's no need to do this now that combine.c can eliminate lots of
8868 sign extensions. This can be less efficient in certain cases on other
8871 /* If this is a signed equality comparison, we can do it as an
8872 unsigned comparison since zero-extension is cheaper than sign
8873 extension and comparisons with zero are done as unsigned. This is
8874 the case even on machines that can do fast sign extension, since
8875 zero-extension is easier to combine with other operations than
8876 sign-extension is. If we are comparing against a constant, we must
8877 convert it to what it would look like unsigned. */
8878 if ((code == EQ || code == NE) && ! unsignedp
8879 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8881 if (GET_CODE (op1) == CONST_INT
8882 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8883 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8888 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
8890 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
8893 /* Generate code to calculate EXP using a store-flag instruction
8894 and return an rtx for the result. EXP is either a comparison
8895 or a TRUTH_NOT_EXPR whose operand is a comparison.
8897 If TARGET is nonzero, store the result there if convenient.
8899 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8902 Return zero if there is no suitable set-flag instruction
8903 available on this machine.
8905 Once expand_expr has been called on the arguments of the comparison,
8906 we are committed to doing the store flag, since it is not safe to
8907 re-evaluate the expression. We emit the store-flag insn by calling
8908 emit_store_flag, but only expand the arguments if we have a reason
8909 to believe that emit_store_flag will be successful. If we think that
8910 it will, but it isn't, we have to simulate the store-flag with a
8911 set/jump/set sequence. */
8914 do_store_flag (exp, target, mode, only_cheap)
8917 enum machine_mode mode;
8921 tree arg0, arg1, type;
8923 enum machine_mode operand_mode;
8927 enum insn_code icode;
8928 rtx subtarget = target;
8929 rtx result, label, pattern, jump_pat;
8931 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8932 result at the end. We can't simply invert the test since it would
8933 have already been inverted if it were valid. This case occurs for
8934 some floating-point comparisons. */
8936 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8937 invert = 1, exp = TREE_OPERAND (exp, 0);
8939 arg0 = TREE_OPERAND (exp, 0);
8940 arg1 = TREE_OPERAND (exp, 1);
8941 type = TREE_TYPE (arg0);
8942 operand_mode = TYPE_MODE (type);
8943 unsignedp = TREE_UNSIGNED (type);
8945 /* We won't bother with BLKmode store-flag operations because it would mean
8946 passing a lot of information to emit_store_flag. */
8947 if (operand_mode == BLKmode)
8953 /* Get the rtx comparison code to use. We know that EXP is a comparison
8954 operation of some type. Some comparisons against 1 and -1 can be
8955 converted to comparisons with zero. Do so here so that the tests
8956 below will be aware that we have a comparison with zero. These
8957 tests will not catch constants in the first operand, but constants
8958 are rarely passed as the first operand. */
8960 switch (TREE_CODE (exp))
8969 if (integer_onep (arg1))
8970 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8972 code = unsignedp ? LTU : LT;
8975 if (! unsignedp && integer_all_onesp (arg1))
8976 arg1 = integer_zero_node, code = LT;
8978 code = unsignedp ? LEU : LE;
8981 if (! unsignedp && integer_all_onesp (arg1))
8982 arg1 = integer_zero_node, code = GE;
8984 code = unsignedp ? GTU : GT;
8987 if (integer_onep (arg1))
8988 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8990 code = unsignedp ? GEU : GE;
8996 /* Put a constant second. */
8997 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8999 tem = arg0; arg0 = arg1; arg1 = tem;
9000 code = swap_condition (code);
9003 /* If this is an equality or inequality test of a single bit, we can
9004 do this by shifting the bit being tested to the low-order bit and
9005 masking the result with the constant 1. If the condition was EQ,
9006 we xor it with 1. This does not require an scc insn and is faster
9007 than an scc insn even if we have it. */
9009 if ((code == NE || code == EQ)
9010 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9011 && integer_pow2p (TREE_OPERAND (arg0, 1))
9012 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9014 tree inner = TREE_OPERAND (arg0, 0);
9015 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9016 NULL_RTX, VOIDmode, 0)));
9019 /* If INNER is a right shift of a constant and it plus BITNUM does
9020 not overflow, adjust BITNUM and INNER. */
9022 if (TREE_CODE (inner) == RSHIFT_EXPR
9023 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9024 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9025 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9026 < TYPE_PRECISION (type)))
9028 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9029 inner = TREE_OPERAND (inner, 0);
9032 /* If we are going to be able to omit the AND below, we must do our
9033 operations as unsigned. If we must use the AND, we have a choice.
9034 Normally unsigned is faster, but for some machines signed is. */
9035 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9036 #ifdef LOAD_EXTEND_OP
9037 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9043 if (subtarget == 0 || GET_CODE (subtarget) != REG
9044 || GET_MODE (subtarget) != operand_mode
9045 || ! safe_from_p (subtarget, inner))
9048 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9051 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9052 size_int (bitnum), subtarget, ops_unsignedp);
9054 if (GET_MODE (op0) != mode)
9055 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9057 if ((code == EQ && ! invert) || (code == NE && invert))
9058 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9059 ops_unsignedp, OPTAB_LIB_WIDEN);
9061 /* Put the AND last so it can combine with more things. */
9062 if (bitnum != TYPE_PRECISION (type) - 1)
9063 op0 = expand_and (op0, const1_rtx, subtarget);
9068 /* Now see if we are likely to be able to do this. Return if not. */
9069 if (! can_compare_p (operand_mode))
9071 icode = setcc_gen_code[(int) code];
9072 if (icode == CODE_FOR_nothing
9073 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9075 /* We can only do this if it is one of the special cases that
9076 can be handled without an scc insn. */
9077 if ((code == LT && integer_zerop (arg1))
9078 || (! only_cheap && code == GE && integer_zerop (arg1)))
9080 else if (BRANCH_COST >= 0
9081 && ! only_cheap && (code == NE || code == EQ)
9082 && TREE_CODE (type) != REAL_TYPE
9083 && ((abs_optab->handlers[(int) operand_mode].insn_code
9084 != CODE_FOR_nothing)
9085 || (ffs_optab->handlers[(int) operand_mode].insn_code
9086 != CODE_FOR_nothing)))
9092 preexpand_calls (exp);
9093 if (subtarget == 0 || GET_CODE (subtarget) != REG
9094 || GET_MODE (subtarget) != operand_mode
9095 || ! safe_from_p (subtarget, arg1))
9098 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9099 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9102 target = gen_reg_rtx (mode);
9104 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9105 because, if the emit_store_flag does anything it will succeed and
9106 OP0 and OP1 will not be used subsequently. */
9108 result = emit_store_flag (target, code,
9109 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9110 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9111 operand_mode, unsignedp, 1);
9116 result = expand_binop (mode, xor_optab, result, const1_rtx,
9117 result, 0, OPTAB_LIB_WIDEN);
9121 /* If this failed, we have to do this with set/compare/jump/set code. */
9122 if (target == 0 || GET_CODE (target) != REG
9123 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9124 target = gen_reg_rtx (GET_MODE (target));
9126 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9127 result = compare_from_rtx (op0, op1, code, unsignedp,
9128 operand_mode, NULL_RTX, 0);
9129 if (GET_CODE (result) == CONST_INT)
9130 return (((result == const0_rtx && ! invert)
9131 || (result != const0_rtx && invert))
9132 ? const0_rtx : const1_rtx);
9134 label = gen_label_rtx ();
9135 if (bcc_gen_fctn[(int) code] == 0)
9138 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9139 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9145 /* Generate a tablejump instruction (used for switch statements). */
9147 #ifdef HAVE_tablejump
9149 /* INDEX is the value being switched on, with the lowest value
9150 in the table already subtracted.
9151 MODE is its expected mode (needed if INDEX is constant).
9152 RANGE is the length of the jump table.
9153 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9155 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9156 index value is out of range. */
9159 do_tablejump (index, mode, range, table_label, default_label)
9160 rtx index, range, table_label, default_label;
9161 enum machine_mode mode;
9163 register rtx temp, vector;
9165 /* Do an unsigned comparison (in the proper mode) between the index
9166 expression and the value which represents the length of the range.
9167 Since we just finished subtracting the lower bound of the range
9168 from the index expression, this comparison allows us to simultaneously
9169 check that the original index expression value is both greater than
9170 or equal to the minimum value of the range and less than or equal to
9171 the maximum value of the range. */
9173 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9174 emit_jump_insn (gen_bgtu (default_label));
9176 /* If index is in range, it must fit in Pmode.
9177 Convert to Pmode so we can index with it. */
9179 index = convert_to_mode (Pmode, index, 1);
9181 /* Don't let a MEM slip thru, because then INDEX that comes
9182 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9183 and break_out_memory_refs will go to work on it and mess it up. */
9184 #ifdef PIC_CASE_VECTOR_ADDRESS
9185 if (flag_pic && GET_CODE (index) != REG)
9186 index = copy_to_mode_reg (Pmode, index);
9189 /* If flag_force_addr were to affect this address
9190 it could interfere with the tricky assumptions made
9191 about addresses that contain label-refs,
9192 which may be valid only very near the tablejump itself. */
9193 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9194 GET_MODE_SIZE, because this indicates how large insns are. The other
9195 uses should all be Pmode, because they are addresses. This code
9196 could fail if addresses and insns are not the same size. */
9197 index = gen_rtx (PLUS, Pmode,
9198 gen_rtx (MULT, Pmode, index,
9199 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9200 gen_rtx (LABEL_REF, Pmode, table_label));
9201 #ifdef PIC_CASE_VECTOR_ADDRESS
9203 index = PIC_CASE_VECTOR_ADDRESS (index);
9206 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9207 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9208 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9209 RTX_UNCHANGING_P (vector) = 1;
9210 convert_move (temp, vector, 0);
9212 emit_jump_insn (gen_tablejump (temp, table_label));
9214 #ifndef CASE_VECTOR_PC_RELATIVE
9215 /* If we are generating PIC code or if the table is PC-relative, the
9216 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9222 #endif /* HAVE_tablejump */
9225 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9226 to that value is on the top of the stack. The resulting type is TYPE, and
9227 the source declaration is DECL. */
9230 bc_load_memory (type, decl)
9233 enum bytecode_opcode opcode;
9236 /* Bit fields are special. We only know about signed and
9237 unsigned ints, and enums. The latter are treated as
9240 if (DECL_BIT_FIELD (decl))
9241 if (TREE_CODE (type) == ENUMERAL_TYPE
9242 || TREE_CODE (type) == INTEGER_TYPE)
9243 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9247 /* See corresponding comment in bc_store_memory(). */
9248 if (TYPE_MODE (type) == BLKmode
9249 || TYPE_MODE (type) == VOIDmode)
9252 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9254 if (opcode == neverneverland)
9257 bc_emit_bytecode (opcode);
9259 #ifdef DEBUG_PRINT_CODE
9260 fputc ('\n', stderr);
9265 /* Store the contents of the second stack slot to the address in the
9266 top stack slot. DECL is the declaration of the destination and is used
9267 to determine whether we're dealing with a bitfield. */
9270 bc_store_memory (type, decl)
9273 enum bytecode_opcode opcode;
9276 if (DECL_BIT_FIELD (decl))
9278 if (TREE_CODE (type) == ENUMERAL_TYPE
9279 || TREE_CODE (type) == INTEGER_TYPE)
9285 if (TYPE_MODE (type) == BLKmode)
9287 /* Copy structure. This expands to a block copy instruction, storeBLK.
9288 In addition to the arguments expected by the other store instructions,
9289 it also expects a type size (SImode) on top of the stack, which is the
9290 structure size in size units (usually bytes). The two first arguments
9291 are already on the stack; so we just put the size on level 1. For some
9292 other languages, the size may be variable, this is why we don't encode
9293 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9295 bc_expand_expr (TYPE_SIZE (type));
9299 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
9301 if (opcode == neverneverland)
9304 bc_emit_bytecode (opcode);
9306 #ifdef DEBUG_PRINT_CODE
9307 fputc ('\n', stderr);
9312 /* Allocate local stack space sufficient to hold a value of the given
9313 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9314 integral power of 2. A special case is locals of type VOID, which
9315 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9316 remapped into the corresponding attribute of SI. */
9319 bc_allocate_local (size, alignment)
9320 int size, alignment;
9328 /* Normalize size and alignment */
9330 size = UNITS_PER_WORD;
9332 if (alignment < BITS_PER_UNIT)
9333 byte_alignment = 1 << (INT_ALIGN - 1);
9336 byte_alignment = alignment / BITS_PER_UNIT;
9338 if (local_vars_size & (byte_alignment - 1))
9339 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
9341 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9342 local_vars_size += size;
9348 /* Allocate variable-sized local array. Variable-sized arrays are
9349 actually pointers to the address in memory where they are stored. */
9352 bc_allocate_variable_array (size)
9356 const int ptralign = (1 << (PTR_ALIGN - 1));
9359 if (local_vars_size & ptralign)
9360 local_vars_size += ptralign - (local_vars_size & ptralign);
9362 /* Note down local space needed: pointer to block; also return
9365 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9366 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9371 /* Push the machine address for the given external variable offset. */
9373 bc_load_externaddr (externaddr)
9376 bc_emit_bytecode (constP);
9377 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9378 BYTECODE_BC_LABEL (externaddr)->offset);
9380 #ifdef DEBUG_PRINT_CODE
9381 fputc ('\n', stderr);
9390 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9396 /* Like above, but expects an IDENTIFIER. */
9398 bc_load_externaddr_id (id, offset)
9402 if (!IDENTIFIER_POINTER (id))
9405 bc_emit_bytecode (constP);
9406 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
9408 #ifdef DEBUG_PRINT_CODE
9409 fputc ('\n', stderr);
9414 /* Push the machine address for the given local variable offset. */
9416 bc_load_localaddr (localaddr)
9419 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
9423 /* Push the machine address for the given parameter offset.
9424 NOTE: offset is in bits. */
9426 bc_load_parmaddr (parmaddr)
9429 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9434 /* Convert a[i] into *(a + i). */
9436 bc_canonicalize_array_ref (exp)
9439 tree type = TREE_TYPE (exp);
9440 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9441 TREE_OPERAND (exp, 0));
9442 tree index = TREE_OPERAND (exp, 1);
9445 /* Convert the integer argument to a type the same size as a pointer
9446 so the multiply won't overflow spuriously. */
9448 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9449 index = convert (type_for_size (POINTER_SIZE, 0), index);
9451 /* The array address isn't volatile even if the array is.
9452 (Of course this isn't terribly relevant since the bytecode
9453 translator treats nearly everything as volatile anyway.) */
9454 TREE_THIS_VOLATILE (array_adr) = 0;
9456 return build1 (INDIRECT_REF, type,
9457 fold (build (PLUS_EXPR,
9458 TYPE_POINTER_TO (type),
9460 fold (build (MULT_EXPR,
9461 TYPE_POINTER_TO (type),
9463 size_in_bytes (type))))));
9467 /* Load the address of the component referenced by the given
9468 COMPONENT_REF expression.
9470 Returns innermost lvalue. */
9473 bc_expand_component_address (exp)
9477 enum machine_mode mode;
9479 HOST_WIDE_INT SIval;
9482 tem = TREE_OPERAND (exp, 1);
9483 mode = DECL_MODE (tem);
9486 /* Compute cumulative bit offset for nested component refs
9487 and array refs, and find the ultimate containing object. */
9489 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
9491 if (TREE_CODE (tem) == COMPONENT_REF)
9492 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9494 if (TREE_CODE (tem) == ARRAY_REF
9495 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9496 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
9498 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9499 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9500 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9505 bc_expand_expr (tem);
9508 /* For bitfields also push their offset and size */
9509 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9510 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9512 if (SIval = bitpos / BITS_PER_UNIT)
9513 bc_emit_instruction (addconstPSI, SIval);
9515 return (TREE_OPERAND (exp, 1));
9519 /* Emit code to push two SI constants */
9521 bc_push_offset_and_size (offset, size)
9522 HOST_WIDE_INT offset, size;
9524 bc_emit_instruction (constSI, offset);
9525 bc_emit_instruction (constSI, size);
9529 /* Emit byte code to push the address of the given lvalue expression to
9530 the stack. If it's a bit field, we also push offset and size info.
9532 Returns innermost component, which allows us to determine not only
9533 its type, but also whether it's a bitfield. */
9536 bc_expand_address (exp)
9540 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9544 switch (TREE_CODE (exp))
9548 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
9552 return (bc_expand_component_address (exp));
9556 bc_expand_expr (TREE_OPERAND (exp, 0));
9558 /* For variable-sized types: retrieve pointer. Sometimes the
9559 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9560 also make sure we have an operand, just in case... */
9562 if (TREE_OPERAND (exp, 0)
9563 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9564 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9565 bc_emit_instruction (loadP);
9567 /* If packed, also return offset and size */
9568 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9570 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9571 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9573 return (TREE_OPERAND (exp, 0));
9577 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9578 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
9583 bc_load_parmaddr (DECL_RTL (exp));
9585 /* For variable-sized types: retrieve pointer */
9586 if (TYPE_SIZE (TREE_TYPE (exp))
9587 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9588 bc_emit_instruction (loadP);
9590 /* If packed, also return offset and size */
9591 if (DECL_BIT_FIELD (exp))
9592 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9593 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9599 bc_emit_instruction (returnP);
9605 if (BYTECODE_LABEL (DECL_RTL (exp)))
9606 bc_load_externaddr (DECL_RTL (exp));
9609 if (DECL_EXTERNAL (exp))
9610 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9611 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
9613 bc_load_localaddr (DECL_RTL (exp));
9615 /* For variable-sized types: retrieve pointer */
9616 if (TYPE_SIZE (TREE_TYPE (exp))
9617 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9618 bc_emit_instruction (loadP);
9620 /* If packed, also return offset and size */
9621 if (DECL_BIT_FIELD (exp))
9622 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9623 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9631 bc_emit_bytecode (constP);
9632 r = output_constant_def (exp);
9633 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
9635 #ifdef DEBUG_PRINT_CODE
9636 fputc ('\n', stderr);
9647 /* Most lvalues don't have components. */
9652 /* Emit a type code to be used by the runtime support in handling
9653 parameter passing. The type code consists of the machine mode
9654 plus the minimal alignment shifted left 8 bits. */
9657 bc_runtime_type_code (type)
9662 switch (TREE_CODE (type))
9672 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
9684 return build_int_2 (val, 0);
9688 /* Generate constructor label */
9690 bc_gen_constr_label ()
9692 static int label_counter;
9693 static char label[20];
9695 sprintf (label, "*LR%d", label_counter++);
9697 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9701 /* Evaluate constructor CONSTR and return pointer to it on level one. We
9702 expand the constructor data as static data, and push a pointer to it.
9703 The pointer is put in the pointer table and is retrieved by a constP
9704 bytecode instruction. We then loop and store each constructor member in
9705 the corresponding component. Finally, we return the original pointer on
9709 bc_expand_constructor (constr)
9713 HOST_WIDE_INT ptroffs;
9717 /* Literal constructors are handled as constants, whereas
9718 non-literals are evaluated and stored element by element
9719 into the data segment. */
9721 /* Allocate space in proper segment and push pointer to space on stack.
9724 l = bc_gen_constr_label ();
9726 if (TREE_CONSTANT (constr))
9730 bc_emit_const_labeldef (l);
9731 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
9737 bc_emit_data_labeldef (l);
9738 bc_output_data_constructor (constr);
9742 /* Add reference to pointer table and recall pointer to stack;
9743 this code is common for both types of constructors: literals
9744 and non-literals. */
9746 ptroffs = bc_define_pointer (l);
9747 bc_emit_instruction (constP, ptroffs);
9749 /* This is all that has to be done if it's a literal. */
9750 if (TREE_CONSTANT (constr))
9754 /* At this point, we have the pointer to the structure on top of the stack.
9755 Generate sequences of store_memory calls for the constructor. */
9757 /* constructor type is structure */
9758 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
9762 /* If the constructor has fewer fields than the structure,
9763 clear the whole structure first. */
9765 if (list_length (CONSTRUCTOR_ELTS (constr))
9766 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9768 bc_emit_instruction (duplicate);
9769 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9770 bc_emit_instruction (clearBLK);
9773 /* Store each element of the constructor into the corresponding
9776 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9778 register tree field = TREE_PURPOSE (elt);
9779 register enum machine_mode mode;
9784 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9785 mode = DECL_MODE (field);
9786 unsignedp = TREE_UNSIGNED (field);
9788 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9790 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9791 /* The alignment of TARGET is
9792 at least what its type requires. */
9794 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9795 int_size_in_bytes (TREE_TYPE (constr)));
9800 /* Constructor type is array */
9801 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9805 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9806 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9807 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9808 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9810 /* If the constructor has fewer fields than the structure,
9811 clear the whole structure first. */
9813 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9815 bc_emit_instruction (duplicate);
9816 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9817 bc_emit_instruction (clearBLK);
9821 /* Store each element of the constructor into the corresponding
9822 element of TARGET, determined by counting the elements. */
9824 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9826 elt = TREE_CHAIN (elt), i++)
9828 register enum machine_mode mode;
9833 mode = TYPE_MODE (elttype);
9834 bitsize = GET_MODE_BITSIZE (mode);
9835 unsignedp = TREE_UNSIGNED (elttype);
9837 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9838 /* * TYPE_SIZE_UNIT (elttype) */ );
9840 bc_store_field (elt, bitsize, bitpos, mode,
9841 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9842 /* The alignment of TARGET is
9843 at least what its type requires. */
9845 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9846 int_size_in_bytes (TREE_TYPE (constr)));
9853 /* Store the value of EXP (an expression tree) into member FIELD of
9854 structure at address on stack, which has type TYPE, mode MODE and
9855 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9858 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9859 TOTAL_SIZE is its size in bytes, or -1 if variable. */
9862 bc_store_field (field, bitsize, bitpos, mode, exp, type,
9863 value_mode, unsignedp, align, total_size)
9864 int bitsize, bitpos;
9865 enum machine_mode mode;
9866 tree field, exp, type;
9867 enum machine_mode value_mode;
9873 /* Expand expression and copy pointer */
9874 bc_expand_expr (exp);
9875 bc_emit_instruction (over);
9878 /* If the component is a bit field, we cannot use addressing to access
9879 it. Use bit-field techniques to store in it. */
9881 if (DECL_BIT_FIELD (field))
9883 bc_store_bit_field (bitpos, bitsize, unsignedp);
9889 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9891 /* Advance pointer to the desired member */
9893 bc_emit_instruction (addconstPSI, offset);
9896 bc_store_memory (type, field);
9901 /* Store SI/SU in bitfield */
9903 bc_store_bit_field (offset, size, unsignedp)
9904 int offset, size, unsignedp;
9906 /* Push bitfield offset and size */
9907 bc_push_offset_and_size (offset, size);
9910 bc_emit_instruction (sstoreBI);
9914 /* Load SI/SU from bitfield */
9916 bc_load_bit_field (offset, size, unsignedp)
9917 int offset, size, unsignedp;
9919 /* Push bitfield offset and size */
9920 bc_push_offset_and_size (offset, size);
9922 /* Load: sign-extend if signed, else zero-extend */
9923 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9927 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9928 (adjust stack pointer upwards), negative means add that number of
9929 levels (adjust the stack pointer downwards). Only positive values
9930 normally make sense. */
9933 bc_adjust_stack (nlevels)
9942 bc_emit_instruction (drop);
9945 bc_emit_instruction (drop);
9950 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9951 stack_depth -= nlevels;
9954 #if defined (VALIDATE_STACK_FOR_BC)
9955 VALIDATE_STACK_FOR_BC ();