1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
31 #include "insn-codes.h"
33 #include "insn-config.h"
36 #include "typeclass.h"
39 #include "bc-opcode.h"
40 #include "bc-typecd.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
70 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72 /* If this is nonzero, we do not bother generating VOLATILE
73 around volatile memory references, and we are willing to
74 output indirect addresses. If cse is to follow, we reject
75 indirect addresses so a useful potential cse is generated;
76 if it is used only once, instruction combination will produce
77 the same indirect address eventually. */
80 /* Nonzero to generate code for all the subroutines within an
81 expression before generating the upper levels of the expression.
82 Nowadays this is never zero. */
83 int do_preexpand_calls = 1;
85 /* Number of units that we should eventually pop off the stack.
86 These are the arguments to function calls that have already returned. */
87 int pending_stack_adjust;
89 /* Nonzero means stack pops must not be deferred, and deferred stack
90 pops must not be output. It is nonzero inside a function call,
91 inside a conditional expression, inside a statement expression,
92 and in other cases as well. */
93 int inhibit_defer_pop;
95 /* A list of all cleanups which belong to the arguments of
96 function calls being expanded by expand_call. */
97 tree cleanups_this_call;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 int target_temp_slot_level;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
108 static rtx saveregs_value;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
113 /* This structure is used by move_by_pieces to describe the move to
116 struct move_by_pieces
126 int explicit_inc_from;
133 /* Used to generate bytecodes: keep track of size of local variables,
134 as well as depth of arithmetic stack. (Notice that variables are
135 stored on the machine's stack, not the arithmetic stack.) */
137 extern int local_vars_size;
138 extern int stack_depth;
139 extern int max_stack_depth;
140 extern struct obstack permanent_obstack;
143 static rtx enqueue_insn PROTO((rtx, rtx));
144 static int queued_subexp_p PROTO((rtx));
145 static void init_queue PROTO((void));
146 static void move_by_pieces PROTO((rtx, rtx, int, int));
147 static int move_by_pieces_ninsns PROTO((unsigned int, int));
148 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
149 struct move_by_pieces *));
150 static void store_constructor PROTO((tree, rtx));
151 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
152 enum machine_mode, int, int, int));
153 static int get_inner_unaligned_p PROTO((tree));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree));
157 static int fixed_type_p PROTO((tree));
158 static int get_pointer_alignment PROTO((tree, unsigned));
159 static tree string_constant PROTO((tree, tree *));
160 static tree c_strlen PROTO((tree));
161 static rtx expand_builtin PROTO((tree, rtx, rtx,
162 enum machine_mode, int));
163 static int apply_args_size PROTO((void));
164 static int apply_result_size PROTO((void));
165 static rtx result_vector PROTO((int, rtx));
166 static rtx expand_builtin_apply_args PROTO((void));
167 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
168 static void expand_builtin_return PROTO((rtx));
169 static rtx expand_increment PROTO((tree, int));
170 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
171 tree bc_runtime_type_code PROTO((tree));
172 rtx bc_allocate_local PROTO((int, int));
173 void bc_store_memory PROTO((tree, tree));
174 tree bc_expand_component_address PROTO((tree));
175 tree bc_expand_address PROTO((tree));
176 void bc_expand_constructor PROTO((tree));
177 void bc_adjust_stack PROTO((int));
178 tree bc_canonicalize_array_ref PROTO((tree));
179 void bc_load_memory PROTO((tree, tree));
180 void bc_load_externaddr PROTO((rtx));
181 void bc_load_externaddr_id PROTO((tree, int));
182 void bc_load_localaddr PROTO((rtx));
183 void bc_load_parmaddr PROTO((rtx));
184 static void preexpand_calls PROTO((tree));
185 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
186 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
187 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
188 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
189 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
190 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
191 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
192 static tree defer_cleanups_to PROTO((tree));
193 extern void (*interim_eh_hook) PROTO((tree));
195 /* Record for each mode whether we can move a register directly to or
196 from an object of that mode in memory. If we can't, we won't try
197 to use that mode directly when accessing a field of that mode. */
199 static char direct_load[NUM_MACHINE_MODES];
200 static char direct_store[NUM_MACHINE_MODES];
202 /* MOVE_RATIO is the number of move instructions that is better than
206 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
209 /* A value of around 6 would minimize code size; infinity would minimize
211 #define MOVE_RATIO 15
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab[NUM_MACHINE_MODES];
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
224 /* Register mappings for target machines without register windows. */
225 #ifndef INCOMING_REGNO
226 #define INCOMING_REGNO(OUT) (OUT)
228 #ifndef OUTGOING_REGNO
229 #define OUTGOING_REGNO(IN) (IN)
232 /* Maps used to convert modes to const, load, and store bytecodes. */
233 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
234 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
235 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
237 /* Initialize maps used to convert modes to const, load, and store
240 bc_init_mode_to_opcode_maps ()
244 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
245 mode_to_const_map[mode] =
246 mode_to_load_map[mode] =
247 mode_to_store_map[mode] = neverneverland;
249 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
250 mode_to_const_map[(int) SYM] = CONST; \
251 mode_to_load_map[(int) SYM] = LOAD; \
252 mode_to_store_map[(int) SYM] = STORE;
254 #include "modemap.def"
258 /* This is run once per compilation to set up which modes can be used
259 directly in memory and to initialize the block move optab. */
265 enum machine_mode mode;
266 /* Try indexing by frame ptr and try by stack ptr.
267 It is known that on the Convex the stack ptr isn't a valid index.
268 With luck, one or the other is valid on any machine. */
269 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
270 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
273 insn = emit_insn (gen_rtx (SET, 0, 0));
274 pat = PATTERN (insn);
276 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
277 mode = (enum machine_mode) ((int) mode + 1))
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
298 reg = gen_rtx (REG, mode, regno);
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
325 /* This is run at the start of compiling a function. */
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
334 cleanups_this_call = 0;
336 apply_args_value = 0;
340 /* Save all variables describing the current status into the structure *P.
341 This is used before starting a nested function. */
347 /* Instead of saving the postincrement queue, empty it. */
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
352 p->cleanups_this_call = cleanups_this_call;
353 p->saveregs_value = saveregs_value;
354 p->apply_args_value = apply_args_value;
355 p->forced_labels = forced_labels;
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
361 apply_args_value = 0;
365 /* Restore all variables describing the current status from the structure *P.
366 This is used after a nested function. */
369 restore_expr_status (p)
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
374 cleanups_this_call = p->cleanups_this_call;
375 saveregs_value = p->saveregs_value;
376 apply_args_value = p->apply_args_value;
377 forced_labels = p->forced_labels;
380 /* Manage the queue of increment instructions to be output
381 for POSTINCREMENT_EXPR expressions, etc. */
383 static rtx pending_chain;
385 /* Queue up to increment (or change) VAR later. BODY says how:
386 BODY should be the same thing you would pass to emit_insn
387 to increment right away. It will go to emit_insn later on.
389 The value is a QUEUED expression to be used in place of VAR
390 where you want to guarantee the pre-incrementation value of VAR. */
393 enqueue_insn (var, body)
396 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
397 var, NULL_RTX, NULL_RTX, body, pending_chain);
398 return pending_chain;
401 /* Use protect_from_queue to convert a QUEUED expression
402 into something that you can put immediately into an instruction.
403 If the queued incrementation has not happened yet,
404 protect_from_queue returns the variable itself.
405 If the incrementation has happened, protect_from_queue returns a temp
406 that contains a copy of the old value of the variable.
408 Any time an rtx which might possibly be a QUEUED is to be put
409 into an instruction, it must be passed through protect_from_queue first.
410 QUEUED expressions are not meaningful in instructions.
412 Do not pass a value through protect_from_queue and then hold
413 on to it for a while before putting it in an instruction!
414 If the queue is flushed in between, incorrect code will result. */
417 protect_from_queue (x, modify)
421 register RTX_CODE code = GET_CODE (x);
423 #if 0 /* A QUEUED can hang around after the queue is forced out. */
424 /* Shortcut for most common case. */
425 if (pending_chain == 0)
431 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
432 use of autoincrement. Make a copy of the contents of the memory
433 location rather than a copy of the address, but not if the value is
434 of mode BLKmode. Don't modify X in place since it might be
436 if (code == MEM && GET_MODE (x) != BLKmode
437 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
439 register rtx y = XEXP (x, 0);
440 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
442 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
443 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
444 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
448 register rtx temp = gen_reg_rtx (GET_MODE (new));
449 emit_insn_before (gen_move_insn (temp, new),
455 /* Otherwise, recursively protect the subexpressions of all
456 the kinds of rtx's that can contain a QUEUED. */
459 rtx tem = protect_from_queue (XEXP (x, 0), 0);
460 if (tem != XEXP (x, 0))
466 else if (code == PLUS || code == MULT)
468 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
469 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
470 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
479 /* If the increment has not happened, use the variable itself. */
480 if (QUEUED_INSN (x) == 0)
481 return QUEUED_VAR (x);
482 /* If the increment has happened and a pre-increment copy exists,
484 if (QUEUED_COPY (x) != 0)
485 return QUEUED_COPY (x);
486 /* The increment has happened but we haven't set up a pre-increment copy.
487 Set one up now, and use it. */
488 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
489 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
491 return QUEUED_COPY (x);
494 /* Return nonzero if X contains a QUEUED expression:
495 if it contains anything that will be altered by a queued increment.
496 We handle only combinations of MEM, PLUS, MINUS and MULT operators
497 since memory addresses generally contain only those. */
503 register enum rtx_code code = GET_CODE (x);
509 return queued_subexp_p (XEXP (x, 0));
513 return queued_subexp_p (XEXP (x, 0))
514 || queued_subexp_p (XEXP (x, 1));
519 /* Perform all the pending incrementations. */
525 while (p = pending_chain)
527 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
528 pending_chain = QUEUED_NEXT (p);
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
545 convert_move (to, from, unsignedp)
546 register rtx to, from;
549 enum machine_mode to_mode = GET_MODE (to);
550 enum machine_mode from_mode = GET_MODE (from);
551 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
559 to = protect_from_queue (to, 1);
560 from = protect_from_queue (from, 0);
562 if (to_real != from_real)
565 /* If FROM is a SUBREG that indicates that we have already done at least
566 the required extension, strip it. We don't handle such SUBREGs as
569 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
570 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
571 >= GET_MODE_SIZE (to_mode))
572 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
573 from = gen_lowpart (to_mode, from), from_mode = to_mode;
575 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
578 if (to_mode == from_mode
579 || (from_mode == VOIDmode && CONSTANT_P (from)))
581 emit_move_insn (to, from);
589 #ifdef HAVE_extendqfhf2
590 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
592 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
596 #ifdef HAVE_extendqfsf2
597 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
599 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
603 #ifdef HAVE_extendqfdf2
604 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
606 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
610 #ifdef HAVE_extendqfxf2
611 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
613 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
617 #ifdef HAVE_extendqftf2
618 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
620 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
625 #ifdef HAVE_extendhftqf2
626 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
628 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
633 #ifdef HAVE_extendhfsf2
634 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
636 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
640 #ifdef HAVE_extendhfdf2
641 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
643 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
647 #ifdef HAVE_extendhfxf2
648 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
650 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
654 #ifdef HAVE_extendhftf2
655 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
657 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
662 #ifdef HAVE_extendsfdf2
663 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
665 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
669 #ifdef HAVE_extendsfxf2
670 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
672 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
676 #ifdef HAVE_extendsftf2
677 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
679 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
683 #ifdef HAVE_extenddfxf2
684 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
686 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
690 #ifdef HAVE_extenddftf2
691 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
693 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunchfqf2
699 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
701 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncsfqf2
706 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
708 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncdfqf2
713 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
715 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
719 #ifdef HAVE_truncxfqf2
720 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
722 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
726 #ifdef HAVE_trunctfqf2
727 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
729 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
734 #ifdef HAVE_trunctqfhf2
735 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
737 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
741 #ifdef HAVE_truncsfhf2
742 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
744 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncdfhf2
749 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
751 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
755 #ifdef HAVE_truncxfhf2
756 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
758 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
762 #ifdef HAVE_trunctfhf2
763 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
765 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
769 #ifdef HAVE_truncdfsf2
770 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
772 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
776 #ifdef HAVE_truncxfsf2
777 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
779 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
783 #ifdef HAVE_trunctfsf2
784 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
786 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
790 #ifdef HAVE_truncxfdf2
791 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
793 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
797 #ifdef HAVE_trunctfdf2
798 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
800 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
812 libcall = extendsfdf2_libfunc;
816 libcall = extendsfxf2_libfunc;
820 libcall = extendsftf2_libfunc;
829 libcall = truncdfsf2_libfunc;
833 libcall = extenddfxf2_libfunc;
837 libcall = extenddftf2_libfunc;
846 libcall = truncxfsf2_libfunc;
850 libcall = truncxfdf2_libfunc;
859 libcall = trunctfsf2_libfunc;
863 libcall = trunctfdf2_libfunc;
869 if (libcall == (rtx) 0)
870 /* This conversion is not implemented yet. */
873 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
875 emit_move_insn (to, value);
879 /* Now both modes are integers. */
881 /* Handle expanding beyond a word. */
882 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
883 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
890 enum machine_mode lowpart_mode;
891 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
893 /* Try converting directly if the insn is supported. */
894 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
897 /* If FROM is a SUBREG, put it into a register. Do this
898 so that we always generate the same set of insns for
899 better cse'ing; if an intermediate assignment occurred,
900 we won't be doing the operation directly on the SUBREG. */
901 if (optimize > 0 && GET_CODE (from) == SUBREG)
902 from = force_reg (from_mode, from);
903 emit_unop_insn (code, to, from, equiv_code);
906 /* Next, try converting via full word. */
907 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
908 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
909 != CODE_FOR_nothing))
911 if (GET_CODE (to) == REG)
912 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
913 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
914 emit_unop_insn (code, to,
915 gen_lowpart (word_mode, to), equiv_code);
919 /* No special multiword conversion insn; do it by hand. */
922 /* Since we will turn this into a no conflict block, we must ensure
923 that the source does not overlap the target. */
925 if (reg_overlap_mentioned_p (to, from))
926 from = force_reg (from_mode, from);
928 /* Get a copy of FROM widened to a word, if necessary. */
929 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
930 lowpart_mode = word_mode;
932 lowpart_mode = from_mode;
934 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
936 lowpart = gen_lowpart (lowpart_mode, to);
937 emit_move_insn (lowpart, lowfrom);
939 /* Compute the value to put in each remaining word. */
941 fill_value = const0_rtx;
946 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
947 && STORE_FLAG_VALUE == -1)
949 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
951 fill_value = gen_reg_rtx (word_mode);
952 emit_insn (gen_slt (fill_value));
958 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
959 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
961 fill_value = convert_to_mode (word_mode, fill_value, 1);
965 /* Fill the remaining words. */
966 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
968 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
969 rtx subword = operand_subword (to, index, 1, to_mode);
974 if (fill_value != subword)
975 emit_move_insn (subword, fill_value);
978 insns = get_insns ();
981 emit_no_conflict_block (insns, to, from, NULL_RTX,
982 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
986 /* Truncating multi-word to a word or less. */
987 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
988 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
990 if (!((GET_CODE (from) == MEM
991 && ! MEM_VOLATILE_P (from)
992 && direct_load[(int) to_mode]
993 && ! mode_dependent_address_p (XEXP (from, 0)))
994 || GET_CODE (from) == REG
995 || GET_CODE (from) == SUBREG))
996 from = force_reg (from_mode, from);
997 convert_move (to, gen_lowpart (word_mode, from), 0);
1001 /* Handle pointer conversion */ /* SPEE 900220 */
1002 if (to_mode == PSImode)
1004 if (from_mode != SImode)
1005 from = convert_to_mode (SImode, from, unsignedp);
1007 #ifdef HAVE_truncsipsi2
1008 if (HAVE_truncsipsi2)
1010 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1013 #endif /* HAVE_truncsipsi2 */
1017 if (from_mode == PSImode)
1019 if (to_mode != SImode)
1021 from = convert_to_mode (SImode, from, unsignedp);
1026 #ifdef HAVE_extendpsisi2
1027 if (HAVE_extendpsisi2)
1029 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1032 #endif /* HAVE_extendpsisi2 */
1037 if (to_mode == PDImode)
1039 if (from_mode != DImode)
1040 from = convert_to_mode (DImode, from, unsignedp);
1042 #ifdef HAVE_truncdipdi2
1043 if (HAVE_truncdipdi2)
1045 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1048 #endif /* HAVE_truncdipdi2 */
1052 if (from_mode == PDImode)
1054 if (to_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1061 #ifdef HAVE_extendpdidi2
1062 if (HAVE_extendpdidi2)
1064 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1067 #endif /* HAVE_extendpdidi2 */
1072 /* Now follow all the conversions between integers
1073 no more than a word long. */
1075 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1076 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1077 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1078 GET_MODE_BITSIZE (from_mode)))
1080 if (!((GET_CODE (from) == MEM
1081 && ! MEM_VOLATILE_P (from)
1082 && direct_load[(int) to_mode]
1083 && ! mode_dependent_address_p (XEXP (from, 0)))
1084 || GET_CODE (from) == REG
1085 || GET_CODE (from) == SUBREG))
1086 from = force_reg (from_mode, from);
1087 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1088 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1089 from = copy_to_reg (from);
1090 emit_move_insn (to, gen_lowpart (to_mode, from));
1094 /* Handle extension. */
1095 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1097 /* Convert directly if that works. */
1098 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1099 != CODE_FOR_nothing)
1101 emit_unop_insn (code, to, from, equiv_code);
1106 enum machine_mode intermediate;
1108 /* Search for a mode to convert via. */
1109 for (intermediate = from_mode; intermediate != VOIDmode;
1110 intermediate = GET_MODE_WIDER_MODE (intermediate))
1111 if (((can_extend_p (to_mode, intermediate, unsignedp)
1112 != CODE_FOR_nothing)
1113 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1114 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1115 && (can_extend_p (intermediate, from_mode, unsignedp)
1116 != CODE_FOR_nothing))
1118 convert_move (to, convert_to_mode (intermediate, from,
1119 unsignedp), unsignedp);
1123 /* No suitable intermediate mode. */
1128 /* Support special truncate insns for certain modes. */
1130 if (from_mode == DImode && to_mode == SImode)
1132 #ifdef HAVE_truncdisi2
1133 if (HAVE_truncdisi2)
1135 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 if (from_mode == DImode && to_mode == HImode)
1145 #ifdef HAVE_truncdihi2
1146 if (HAVE_truncdihi2)
1148 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 if (from_mode == DImode && to_mode == QImode)
1158 #ifdef HAVE_truncdiqi2
1159 if (HAVE_truncdiqi2)
1161 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 if (from_mode == SImode && to_mode == HImode)
1171 #ifdef HAVE_truncsihi2
1172 if (HAVE_truncsihi2)
1174 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 if (from_mode == SImode && to_mode == QImode)
1184 #ifdef HAVE_truncsiqi2
1185 if (HAVE_truncsiqi2)
1187 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 if (from_mode == HImode && to_mode == QImode)
1197 #ifdef HAVE_trunchiqi2
1198 if (HAVE_trunchiqi2)
1200 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 if (from_mode == TImode && to_mode == DImode)
1210 #ifdef HAVE_trunctidi2
1211 if (HAVE_trunctidi2)
1213 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 if (from_mode == TImode && to_mode == SImode)
1223 #ifdef HAVE_trunctisi2
1224 if (HAVE_trunctisi2)
1226 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1234 if (from_mode == TImode && to_mode == HImode)
1236 #ifdef HAVE_trunctihi2
1237 if (HAVE_trunctihi2)
1239 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1243 convert_move (to, force_reg (from_mode, from), unsignedp);
1247 if (from_mode == TImode && to_mode == QImode)
1249 #ifdef HAVE_trunctiqi2
1250 if (HAVE_trunctiqi2)
1252 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1256 convert_move (to, force_reg (from_mode, from), unsignedp);
1260 /* Handle truncation of volatile memrefs, and so on;
1261 the things that couldn't be truncated directly,
1262 and for which there was no special instruction. */
1263 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1265 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1266 emit_move_insn (to, temp);
1270 /* Mode combination is not recognized. */
1274 /* Return an rtx for a value that would result
1275 from converting X to mode MODE.
1276 Both X and MODE may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1278 This can be done by referring to a part of X in place
1279 or by copying to a new temporary with conversion.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_to_mode (mode, x, unsignedp)
1286 enum machine_mode mode;
1290 return convert_modes (mode, VOIDmode, x, unsignedp);
1293 /* Return an rtx for a value that would result
1294 from converting X from mode OLDMODE to mode MODE.
1295 Both modes may be floating, or both integer.
1296 UNSIGNEDP is nonzero if X is an unsigned value.
1298 This can be done by referring to a part of X in place
1299 or by copying to a new temporary with conversion.
1301 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1303 This function *must not* call protect_from_queue
1304 except when putting X into an insn (in which case convert_move does it). */
1307 convert_modes (mode, oldmode, x, unsignedp)
1308 enum machine_mode mode, oldmode;
1314 /* If FROM is a SUBREG that indicates that we have already done at least
1315 the required extension, strip it. */
1317 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1318 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1319 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1320 x = gen_lowpart (mode, x);
1322 if (GET_MODE (x) != VOIDmode)
1323 oldmode = GET_MODE (x);
1325 if (mode == oldmode)
1328 /* There is one case that we must handle specially: If we are converting
1329 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1330 we are to interpret the constant as unsigned, gen_lowpart will do
1331 the wrong if the constant appears negative. What we want to do is
1332 make the high-order word of the constant zero, not all ones. */
1334 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1335 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1336 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1337 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1339 /* We can do this with a gen_lowpart if both desired and current modes
1340 are integer, and this is either a constant integer, a register, or a
1341 non-volatile MEM. Except for the constant case where MODE is no
1342 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1344 if ((GET_CODE (x) == CONST_INT
1345 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1346 || (GET_MODE_CLASS (mode) == MODE_INT
1347 && GET_MODE_CLASS (oldmode) == MODE_INT
1348 && (GET_CODE (x) == CONST_DOUBLE
1349 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1350 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1351 && direct_load[(int) mode])
1352 || (GET_CODE (x) == REG
1353 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1354 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1356 /* ?? If we don't know OLDMODE, we have to assume here that
1357 X does not need sign- or zero-extension. This may not be
1358 the case, but it's the best we can do. */
1359 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1360 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362 HOST_WIDE_INT val = INTVAL (x);
1363 int width = GET_MODE_BITSIZE (oldmode);
1365 /* We must sign or zero-extend in this case. Start by
1366 zero-extending, then sign extend if we need to. */
1367 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1370 val |= (HOST_WIDE_INT) (-1) << width;
1372 return GEN_INT (val);
1375 return gen_lowpart (mode, x);
1378 temp = gen_reg_rtx (mode);
1379 convert_move (temp, x, unsignedp);
1383 /* Generate several move instructions to copy LEN bytes
1384 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1385 The caller must pass FROM and TO
1386 through protect_from_queue before calling.
1387 ALIGN (in bytes) is maximum alignment we can assume. */
1390 move_by_pieces (to, from, len, align)
1394 struct move_by_pieces data;
1395 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1396 int max_size = MOVE_MAX + 1;
1399 data.to_addr = to_addr;
1400 data.from_addr = from_addr;
1404 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1405 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1408 || GET_CODE (from_addr) == POST_INC
1409 || GET_CODE (from_addr) == POST_DEC);
1411 data.explicit_inc_from = 0;
1412 data.explicit_inc_to = 0;
1414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 if (data.reverse) data.offset = len;
1418 data.to_struct = MEM_IN_STRUCT_P (to);
1419 data.from_struct = MEM_IN_STRUCT_P (from);
1421 /* If copying requires more than two move insns,
1422 copy addresses to registers (to make displacements shorter)
1423 and use post-increment if available. */
1424 if (!(data.autinc_from && data.autinc_to)
1425 && move_by_pieces_ninsns (len, align) > 2)
1427 #ifdef HAVE_PRE_DECREMENT
1428 if (data.reverse && ! data.autinc_from)
1430 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1431 data.autinc_from = 1;
1432 data.explicit_inc_from = -1;
1435 #ifdef HAVE_POST_INCREMENT
1436 if (! data.autinc_from)
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1443 if (!data.autinc_from && CONSTANT_P (from_addr))
1444 data.from_addr = copy_addr_to_reg (from_addr);
1445 #ifdef HAVE_PRE_DECREMENT
1446 if (data.reverse && ! data.autinc_to)
1448 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1450 data.explicit_inc_to = -1;
1453 #ifdef HAVE_POST_INCREMENT
1454 if (! data.reverse && ! data.autinc_to)
1456 data.to_addr = copy_addr_to_reg (to_addr);
1458 data.explicit_inc_to = 1;
1461 if (!data.autinc_to && CONSTANT_P (to_addr))
1462 data.to_addr = copy_addr_to_reg (to_addr);
1465 if (! SLOW_UNALIGNED_ACCESS
1466 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1469 /* First move what we can in the largest integer mode, then go to
1470 successively smaller modes. */
1472 while (max_size > 1)
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1482 if (mode == VOIDmode)
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing
1487 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1488 GET_MODE_SIZE (mode)))
1489 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1491 max_size = GET_MODE_SIZE (mode);
1494 /* The code above should have handled everything. */
1499 /* Return number of insns required to move L bytes by pieces.
1500 ALIGN (in bytes) is maximum alignment we can assume. */
1503 move_by_pieces_ninsns (l, align)
1507 register int n_insns = 0;
1508 int max_size = MOVE_MAX + 1;
1510 if (! SLOW_UNALIGNED_ACCESS
1511 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1514 while (max_size > 1)
1516 enum machine_mode mode = VOIDmode, tmode;
1517 enum insn_code icode;
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1524 if (mode == VOIDmode)
1527 icode = mov_optab->handlers[(int) mode].insn_code;
1528 if (icode != CODE_FOR_nothing
1529 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1530 GET_MODE_SIZE (mode)))
1531 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1533 max_size = GET_MODE_SIZE (mode);
1539 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1540 with move instructions for mode MODE. GENFUN is the gen_... function
1541 to make a move insn for that mode. DATA has all the other info. */
1544 move_by_pieces_1 (genfun, mode, data)
1546 enum machine_mode mode;
1547 struct move_by_pieces *data;
1549 register int size = GET_MODE_SIZE (mode);
1550 register rtx to1, from1;
1552 while (data->len >= size)
1554 if (data->reverse) data->offset -= size;
1556 to1 = (data->autinc_to
1557 ? gen_rtx (MEM, mode, data->to_addr)
1558 : change_address (data->to, mode,
1559 plus_constant (data->to_addr, data->offset)));
1560 MEM_IN_STRUCT_P (to1) = data->to_struct;
1563 ? gen_rtx (MEM, mode, data->from_addr)
1564 : change_address (data->from, mode,
1565 plus_constant (data->from_addr, data->offset)));
1566 MEM_IN_STRUCT_P (from1) = data->from_struct;
1568 #ifdef HAVE_PRE_DECREMENT
1569 if (data->explicit_inc_to < 0)
1570 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1571 if (data->explicit_inc_from < 0)
1572 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1575 emit_insn ((*genfun) (to1, from1));
1576 #ifdef HAVE_POST_INCREMENT
1577 if (data->explicit_inc_to > 0)
1578 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1579 if (data->explicit_inc_from > 0)
1580 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1583 if (! data->reverse) data->offset += size;
1589 /* Emit code to move a block Y to a block X.
1590 This may be done with string-move instructions,
1591 with multiple scalar move instructions, or with a library call.
1593 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 SIZE is an rtx that says how long they are.
1596 ALIGN is the maximum alignment we can assume they have,
1597 measured in bytes. */
1600 emit_block_move (x, y, size, align)
1605 if (GET_MODE (x) != BLKmode)
1608 if (GET_MODE (y) != BLKmode)
1611 x = protect_from_queue (x, 1);
1612 y = protect_from_queue (y, 0);
1613 size = protect_from_queue (size, 0);
1615 if (GET_CODE (x) != MEM)
1617 if (GET_CODE (y) != MEM)
1622 if (GET_CODE (size) == CONST_INT
1623 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1624 move_by_pieces (x, y, INTVAL (size), align);
1627 /* Try the most limited insn first, because there's no point
1628 including more than one in the machine description unless
1629 the more limited one has some advantage. */
1631 rtx opalign = GEN_INT (align);
1632 enum machine_mode mode;
1634 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1635 mode = GET_MODE_WIDER_MODE (mode))
1637 enum insn_code code = movstr_optab[(int) mode];
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= GET_MODE_MASK (mode)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && (insn_operand_predicate[(int) code][0] == 0
1649 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1650 && (insn_operand_predicate[(int) code][1] == 0
1651 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1652 && (insn_operand_predicate[(int) code][3] == 0
1653 || (*insn_operand_predicate[(int) code][3]) (opalign,
1657 rtx last = get_last_insn ();
1660 op2 = convert_to_mode (mode, size, 1);
1661 if (insn_operand_predicate[(int) code][2] != 0
1662 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1663 op2 = copy_to_mode_reg (mode, op2);
1665 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1672 delete_insns_since (last);
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 emit_library_call (memcpy_libfunc, 0,
1678 VOIDmode, 3, XEXP (x, 0), Pmode,
1680 convert_to_mode (TYPE_MODE (sizetype), size,
1681 TREE_UNSIGNED (sizetype)),
1682 TYPE_MODE (sizetype));
1684 emit_library_call (bcopy_libfunc, 0,
1685 VOIDmode, 3, XEXP (y, 0), Pmode,
1687 convert_to_mode (TYPE_MODE (sizetype), size,
1688 TREE_UNSIGNED (sizetype)),
1689 TYPE_MODE (sizetype));
1694 /* Copy all or part of a value X into registers starting at REGNO.
1695 The number of registers to be filled is NREGS. */
1698 move_block_to_reg (regno, x, nregs, mode)
1702 enum machine_mode mode;
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1726 delete_insns_since (last);
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. SIZE indicates the number
1737 of bytes in the object X. */
1741 move_block_from_reg (regno, x, nregs, size)
1750 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1751 to the left before storing to memory. */
1752 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1754 rtx tem = operand_subword (x, 0, 1, BLKmode);
1760 shift = expand_shift (LSHIFT_EXPR, word_mode,
1761 gen_rtx (REG, word_mode, regno),
1762 build_int_2 ((UNITS_PER_WORD - size)
1763 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1764 emit_move_insn (tem, shift);
1768 /* See if the machine can do this with a store multiple insn. */
1769 #ifdef HAVE_store_multiple
1770 if (HAVE_store_multiple)
1772 last = get_last_insn ();
1773 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1781 delete_insns_since (last);
1785 for (i = 0; i < nregs; i++)
1787 rtx tem = operand_subword (x, i, 1, BLKmode);
1792 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1796 /* Add a USE expression for REG to the (possibly empty) list pointed
1797 to by CALL_FUSAGE. REG must denote a hard register. */
1800 use_reg (call_fusage, reg)
1801 rtx *call_fusage, reg;
1803 if (GET_CODE (reg) != REG
1804 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1808 = gen_rtx (EXPR_LIST, VOIDmode,
1809 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1812 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1813 starting at REGNO. All of these registers must be hard registers. */
1816 use_regs (call_fusage, regno, nregs)
1823 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1826 for (i = 0; i < nregs; i++)
1827 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1830 /* Write zeros through the storage of OBJECT.
1831 If OBJECT has BLKmode, SIZE is its length in bytes. */
1834 clear_storage (object, size)
1838 if (GET_MODE (object) == BLKmode)
1840 #ifdef TARGET_MEM_FUNCTIONS
1841 emit_library_call (memset_libfunc, 0,
1843 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1844 GEN_INT (size), ptr_mode);
1846 emit_library_call (bzero_libfunc, 0,
1848 XEXP (object, 0), Pmode,
1849 GEN_INT (size), ptr_mode);
1853 emit_move_insn (object, const0_rtx);
1856 /* Generate code to copy Y into X.
1857 Both Y and X must have the same mode, except that
1858 Y can be a constant with VOIDmode.
1859 This mode cannot be BLKmode; use emit_block_move for that.
1861 Return the last instruction emitted. */
1864 emit_move_insn (x, y)
1867 enum machine_mode mode = GET_MODE (x);
1869 x = protect_from_queue (x, 1);
1870 y = protect_from_queue (y, 0);
1872 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1875 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1876 y = force_const_mem (mode, y);
1878 /* If X or Y are memory references, verify that their addresses are valid
1880 if (GET_CODE (x) == MEM
1881 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1882 && ! push_operand (x, GET_MODE (x)))
1884 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1885 x = change_address (x, VOIDmode, XEXP (x, 0));
1887 if (GET_CODE (y) == MEM
1888 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1890 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1891 y = change_address (y, VOIDmode, XEXP (y, 0));
1893 if (mode == BLKmode)
1896 return emit_move_insn_1 (x, y);
1899 /* Low level part of emit_move_insn.
1900 Called just like emit_move_insn, but assumes X and Y
1901 are basically valid. */
1904 emit_move_insn_1 (x, y)
1907 enum machine_mode mode = GET_MODE (x);
1908 enum machine_mode submode;
1909 enum mode_class class = GET_MODE_CLASS (mode);
1912 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1914 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1916 /* Expand complex moves by moving real part and imag part, if possible. */
1917 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1918 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1920 (class == MODE_COMPLEX_INT
1921 ? MODE_INT : MODE_FLOAT),
1923 && (mov_optab->handlers[(int) submode].insn_code
1924 != CODE_FOR_nothing))
1926 /* Don't split destination if it is a stack push. */
1927 int stack = push_operand (x, GET_MODE (x));
1930 /* If this is a stack, push the highpart first, so it
1931 will be in the argument order.
1933 In that case, change_address is used only to convert
1934 the mode, not to change the address. */
1937 /* Note that the real part always precedes the imag part in memory
1938 regardless of machine's endianness. */
1939 #ifdef STACK_GROWS_DOWNWARD
1940 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1941 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1942 gen_imagpart (submode, y)));
1943 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1944 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1945 gen_realpart (submode, y)));
1947 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1948 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1949 gen_realpart (submode, y)));
1950 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1951 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1952 gen_imagpart (submode, y)));
1957 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1958 (gen_realpart (submode, x), gen_realpart (submode, y)));
1959 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1960 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1963 return get_last_insn ();
1966 /* This will handle any multi-word mode that lacks a move_insn pattern.
1967 However, you will get better code if you define such patterns,
1968 even if they must turn into multiple assembler instructions. */
1969 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1974 #ifdef PUSH_ROUNDING
1976 /* If X is a push on the stack, do the push now and replace
1977 X with a reference to the stack pointer. */
1978 if (push_operand (x, GET_MODE (x)))
1980 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1981 x = change_address (x, VOIDmode, stack_pointer_rtx);
1986 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1989 rtx xpart = operand_subword (x, i, 1, mode);
1990 rtx ypart = operand_subword (y, i, 1, mode);
1992 /* If we can't get a part of Y, put Y into memory if it is a
1993 constant. Otherwise, force it into a register. If we still
1994 can't get a part of Y, abort. */
1995 if (ypart == 0 && CONSTANT_P (y))
1997 y = force_const_mem (mode, y);
1998 ypart = operand_subword (y, i, 1, mode);
2000 else if (ypart == 0)
2001 ypart = operand_subword_force (y, i, mode);
2003 if (xpart == 0 || ypart == 0)
2006 last_insn = emit_move_insn (xpart, ypart);
2015 /* Pushing data onto the stack. */
2017 /* Push a block of length SIZE (perhaps variable)
2018 and return an rtx to address the beginning of the block.
2019 Note that it is not possible for the value returned to be a QUEUED.
2020 The value may be virtual_outgoing_args_rtx.
2022 EXTRA is the number of bytes of padding to push in addition to SIZE.
2023 BELOW nonzero means this padding comes at low addresses;
2024 otherwise, the padding comes at high addresses. */
2027 push_block (size, extra, below)
2033 size = convert_modes (Pmode, ptr_mode, size, 1);
2034 if (CONSTANT_P (size))
2035 anti_adjust_stack (plus_constant (size, extra));
2036 else if (GET_CODE (size) == REG && extra == 0)
2037 anti_adjust_stack (size);
2040 rtx temp = copy_to_mode_reg (Pmode, size);
2042 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2043 temp, 0, OPTAB_LIB_WIDEN);
2044 anti_adjust_stack (temp);
2047 #ifdef STACK_GROWS_DOWNWARD
2048 temp = virtual_outgoing_args_rtx;
2049 if (extra != 0 && below)
2050 temp = plus_constant (temp, extra);
2052 if (GET_CODE (size) == CONST_INT)
2053 temp = plus_constant (virtual_outgoing_args_rtx,
2054 - INTVAL (size) - (below ? 0 : extra));
2055 else if (extra != 0 && !below)
2056 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2057 negate_rtx (Pmode, plus_constant (size, extra)));
2059 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2060 negate_rtx (Pmode, size));
2063 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2069 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2072 /* Generate code to push X onto the stack, assuming it has mode MODE and
2074 MODE is redundant except when X is a CONST_INT (since they don't
2076 SIZE is an rtx for the size of data to be copied (in bytes),
2077 needed only if X is BLKmode.
2079 ALIGN (in bytes) is maximum alignment we can assume.
2081 If PARTIAL and REG are both nonzero, then copy that many of the first
2082 words of X into registers starting with REG, and push the rest of X.
2083 The amount of space pushed is decreased by PARTIAL words,
2084 rounded *down* to a multiple of PARM_BOUNDARY.
2085 REG must be a hard register in this case.
2086 If REG is zero but PARTIAL is not, take any all others actions for an
2087 argument partially in registers, but do not actually load any
2090 EXTRA is the amount in bytes of extra space to leave next to this arg.
2091 This is ignored if an argument block has already been allocated.
2093 On a machine that lacks real push insns, ARGS_ADDR is the address of
2094 the bottom of the argument block for this call. We use indexing off there
2095 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2096 argument block has not been preallocated.
2098 ARGS_SO_FAR is the size of args previously pushed for this call. */
2101 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2102 args_addr, args_so_far)
2104 enum machine_mode mode;
2115 enum direction stack_direction
2116 #ifdef STACK_GROWS_DOWNWARD
2122 /* Decide where to pad the argument: `downward' for below,
2123 `upward' for above, or `none' for don't pad it.
2124 Default is below for small data on big-endian machines; else above. */
2125 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2127 /* Invert direction if stack is post-update. */
2128 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2129 if (where_pad != none)
2130 where_pad = (where_pad == downward ? upward : downward);
2132 xinner = x = protect_from_queue (x, 0);
2134 if (mode == BLKmode)
2136 /* Copy a block into the stack, entirely or partially. */
2139 int used = partial * UNITS_PER_WORD;
2140 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2148 /* USED is now the # of bytes we need not copy to the stack
2149 because registers will take care of them. */
2152 xinner = change_address (xinner, BLKmode,
2153 plus_constant (XEXP (xinner, 0), used));
2155 /* If the partial register-part of the arg counts in its stack size,
2156 skip the part of stack space corresponding to the registers.
2157 Otherwise, start copying to the beginning of the stack space,
2158 by setting SKIP to 0. */
2159 #ifndef REG_PARM_STACK_SPACE
2165 #ifdef PUSH_ROUNDING
2166 /* Do it with several push insns if that doesn't take lots of insns
2167 and if there is no difficulty with push insns that skip bytes
2168 on the stack for alignment purposes. */
2170 && GET_CODE (size) == CONST_INT
2172 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2174 /* Here we avoid the case of a structure whose weak alignment
2175 forces many pushes of a small amount of data,
2176 and such small pushes do rounding that causes trouble. */
2177 && ((! SLOW_UNALIGNED_ACCESS)
2178 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2179 || PUSH_ROUNDING (align) == align)
2180 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2182 /* Push padding now if padding above and stack grows down,
2183 or if padding below and stack grows up.
2184 But if space already allocated, this has already been done. */
2185 if (extra && args_addr == 0
2186 && where_pad != none && where_pad != stack_direction)
2187 anti_adjust_stack (GEN_INT (extra));
2189 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2190 INTVAL (size) - used, align);
2193 #endif /* PUSH_ROUNDING */
2195 /* Otherwise make space on the stack and copy the data
2196 to the address of that space. */
2198 /* Deduct words put into registers from the size we must copy. */
2201 if (GET_CODE (size) == CONST_INT)
2202 size = GEN_INT (INTVAL (size) - used);
2204 size = expand_binop (GET_MODE (size), sub_optab, size,
2205 GEN_INT (used), NULL_RTX, 0,
2209 /* Get the address of the stack space.
2210 In this case, we do not deal with EXTRA separately.
2211 A single stack adjust will do. */
2214 temp = push_block (size, extra, where_pad == downward);
2217 else if (GET_CODE (args_so_far) == CONST_INT)
2218 temp = memory_address (BLKmode,
2219 plus_constant (args_addr,
2220 skip + INTVAL (args_so_far)));
2222 temp = memory_address (BLKmode,
2223 plus_constant (gen_rtx (PLUS, Pmode,
2224 args_addr, args_so_far),
2227 /* TEMP is the address of the block. Copy the data there. */
2228 if (GET_CODE (size) == CONST_INT
2229 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2232 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2233 INTVAL (size), align);
2236 /* Try the most limited insn first, because there's no point
2237 including more than one in the machine description unless
2238 the more limited one has some advantage. */
2239 #ifdef HAVE_movstrqi
2241 && GET_CODE (size) == CONST_INT
2242 && ((unsigned) INTVAL (size)
2243 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2245 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2246 xinner, size, GEN_INT (align));
2254 #ifdef HAVE_movstrhi
2256 && GET_CODE (size) == CONST_INT
2257 && ((unsigned) INTVAL (size)
2258 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2260 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2261 xinner, size, GEN_INT (align));
2269 #ifdef HAVE_movstrsi
2272 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2273 xinner, size, GEN_INT (align));
2281 #ifdef HAVE_movstrdi
2284 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2285 xinner, size, GEN_INT (align));
2294 #ifndef ACCUMULATE_OUTGOING_ARGS
2295 /* If the source is referenced relative to the stack pointer,
2296 copy it to another register to stabilize it. We do not need
2297 to do this if we know that we won't be changing sp. */
2299 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2300 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2301 temp = copy_to_reg (temp);
2304 /* Make inhibit_defer_pop nonzero around the library call
2305 to force it to pop the bcopy-arguments right away. */
2307 #ifdef TARGET_MEM_FUNCTIONS
2308 emit_library_call (memcpy_libfunc, 0,
2309 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2310 convert_to_mode (TYPE_MODE (sizetype),
2311 size, TREE_UNSIGNED (sizetype)),
2312 TYPE_MODE (sizetype));
2314 emit_library_call (bcopy_libfunc, 0,
2315 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2316 convert_to_mode (TYPE_MODE (sizetype),
2317 size, TREE_UNSIGNED (sizetype)),
2318 TYPE_MODE (sizetype));
2323 else if (partial > 0)
2325 /* Scalar partly in registers. */
2327 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2330 /* # words of start of argument
2331 that we must make space for but need not store. */
2332 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2333 int args_offset = INTVAL (args_so_far);
2336 /* Push padding now if padding above and stack grows down,
2337 or if padding below and stack grows up.
2338 But if space already allocated, this has already been done. */
2339 if (extra && args_addr == 0
2340 && where_pad != none && where_pad != stack_direction)
2341 anti_adjust_stack (GEN_INT (extra));
2343 /* If we make space by pushing it, we might as well push
2344 the real data. Otherwise, we can leave OFFSET nonzero
2345 and leave the space uninitialized. */
2349 /* Now NOT_STACK gets the number of words that we don't need to
2350 allocate on the stack. */
2351 not_stack = partial - offset;
2353 /* If the partial register-part of the arg counts in its stack size,
2354 skip the part of stack space corresponding to the registers.
2355 Otherwise, start copying to the beginning of the stack space,
2356 by setting SKIP to 0. */
2357 #ifndef REG_PARM_STACK_SPACE
2363 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2364 x = validize_mem (force_const_mem (mode, x));
2366 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2367 SUBREGs of such registers are not allowed. */
2368 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2369 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2370 x = copy_to_reg (x);
2372 /* Loop over all the words allocated on the stack for this arg. */
2373 /* We can do it by words, because any scalar bigger than a word
2374 has a size a multiple of a word. */
2375 #ifndef PUSH_ARGS_REVERSED
2376 for (i = not_stack; i < size; i++)
2378 for (i = size - 1; i >= not_stack; i--)
2380 if (i >= not_stack + offset)
2381 emit_push_insn (operand_subword_force (x, i, mode),
2382 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2384 GEN_INT (args_offset + ((i - not_stack + skip)
2385 * UNITS_PER_WORD)));
2391 /* Push padding now if padding above and stack grows down,
2392 or if padding below and stack grows up.
2393 But if space already allocated, this has already been done. */
2394 if (extra && args_addr == 0
2395 && where_pad != none && where_pad != stack_direction)
2396 anti_adjust_stack (GEN_INT (extra));
2398 #ifdef PUSH_ROUNDING
2400 addr = gen_push_operand ();
2403 if (GET_CODE (args_so_far) == CONST_INT)
2405 = memory_address (mode,
2406 plus_constant (args_addr, INTVAL (args_so_far)));
2408 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2411 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2415 /* If part should go in registers, copy that part
2416 into the appropriate registers. Do this now, at the end,
2417 since mem-to-mem copies above may do function calls. */
2418 if (partial > 0 && reg != 0)
2419 move_block_to_reg (REGNO (reg), x, partial, mode);
2421 if (extra && args_addr == 0 && where_pad == stack_direction)
2422 anti_adjust_stack (GEN_INT (extra));
2425 /* Expand an assignment that stores the value of FROM into TO.
2426 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2427 (This may contain a QUEUED rtx;
2428 if the value is constant, this rtx is a constant.)
2429 Otherwise, the returned value is NULL_RTX.
2431 SUGGEST_REG is no longer actually used.
2432 It used to mean, copy the value through a register
2433 and return that register, if that is possible.
2434 We now use WANT_VALUE to decide whether to do this. */
2437 expand_assignment (to, from, want_value, suggest_reg)
2442 register rtx to_rtx = 0;
2445 /* Don't crash if the lhs of the assignment was erroneous. */
2447 if (TREE_CODE (to) == ERROR_MARK)
2449 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2450 return want_value ? result : NULL_RTX;
2453 if (output_bytecode)
2455 tree dest_innermost;
2457 bc_expand_expr (from);
2458 bc_emit_instruction (duplicate);
2460 dest_innermost = bc_expand_address (to);
2462 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2463 take care of it here. */
2465 bc_store_memory (TREE_TYPE (to), dest_innermost);
2469 /* Assignment of a structure component needs special treatment
2470 if the structure component's rtx is not simply a MEM.
2471 Assignment of an array element at a constant index, and assignment of
2472 an array element in an unaligned packed structure field, has the same
2475 if (TREE_CODE (to) == COMPONENT_REF
2476 || TREE_CODE (to) == BIT_FIELD_REF
2477 || (TREE_CODE (to) == ARRAY_REF
2478 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2479 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2480 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2482 enum machine_mode mode1;
2492 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2493 &mode1, &unsignedp, &volatilep);
2495 /* If we are going to use store_bit_field and extract_bit_field,
2496 make sure to_rtx will be safe for multiple use. */
2498 if (mode1 == VOIDmode && want_value)
2499 tem = stabilize_reference (tem);
2501 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2502 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2505 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2507 if (GET_CODE (to_rtx) != MEM)
2509 to_rtx = change_address (to_rtx, VOIDmode,
2510 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2511 force_reg (ptr_mode, offset_rtx)));
2512 /* If we have a variable offset, the known alignment
2513 is only that of the innermost structure containing the field.
2514 (Actually, we could sometimes do better by using the
2515 align of an element of the innermost array, but no need.) */
2516 if (TREE_CODE (to) == COMPONENT_REF
2517 || TREE_CODE (to) == BIT_FIELD_REF)
2519 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2523 if (GET_CODE (to_rtx) == MEM)
2525 /* When the offset is zero, to_rtx is the address of the
2526 structure we are storing into, and hence may be shared.
2527 We must make a new MEM before setting the volatile bit. */
2529 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2530 MEM_VOLATILE_P (to_rtx) = 1;
2532 #if 0 /* This was turned off because, when a field is volatile
2533 in an object which is not volatile, the object may be in a register,
2534 and then we would abort over here. */
2540 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2542 /* Spurious cast makes HPUX compiler happy. */
2543 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2546 /* Required alignment of containing datum. */
2548 int_size_in_bytes (TREE_TYPE (tem)));
2549 preserve_temp_slots (result);
2553 /* If the value is meaningful, convert RESULT to the proper mode.
2554 Otherwise, return nothing. */
2555 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2556 TYPE_MODE (TREE_TYPE (from)),
2558 TREE_UNSIGNED (TREE_TYPE (to)))
2562 /* If the rhs is a function call and its value is not an aggregate,
2563 call the function before we start to compute the lhs.
2564 This is needed for correct code for cases such as
2565 val = setjmp (buf) on machines where reference to val
2566 requires loading up part of an address in a separate insn.
2568 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2569 a promoted variable where the zero- or sign- extension needs to be done.
2570 Handling this in the normal way is safe because no computation is done
2572 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2573 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2578 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2580 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2582 if (GET_MODE (to_rtx) == BLKmode)
2584 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2585 emit_block_move (to_rtx, value, expr_size (from), align);
2588 emit_move_insn (to_rtx, value);
2589 preserve_temp_slots (to_rtx);
2592 return want_value ? to_rtx : NULL_RTX;
2595 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2596 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2599 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2601 /* Don't move directly into a return register. */
2602 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2607 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2608 emit_move_insn (to_rtx, temp);
2609 preserve_temp_slots (to_rtx);
2612 return want_value ? to_rtx : NULL_RTX;
2615 /* In case we are returning the contents of an object which overlaps
2616 the place the value is being stored, use a safe function when copying
2617 a value through a pointer into a structure value return block. */
2618 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2619 && current_function_returns_struct
2620 && !current_function_returns_pcc_struct)
2625 size = expr_size (from);
2626 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2628 #ifdef TARGET_MEM_FUNCTIONS
2629 emit_library_call (memcpy_libfunc, 0,
2630 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2631 XEXP (from_rtx, 0), Pmode,
2632 convert_to_mode (TYPE_MODE (sizetype),
2633 size, TREE_UNSIGNED (sizetype)),
2634 TYPE_MODE (sizetype));
2636 emit_library_call (bcopy_libfunc, 0,
2637 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2638 XEXP (to_rtx, 0), Pmode,
2639 convert_to_mode (TYPE_MODE (sizetype),
2640 size, TREE_UNSIGNED (sizetype)),
2641 TYPE_MODE (sizetype));
2644 preserve_temp_slots (to_rtx);
2647 return want_value ? to_rtx : NULL_RTX;
2650 /* Compute FROM and store the value in the rtx we got. */
2653 result = store_expr (from, to_rtx, want_value);
2654 preserve_temp_slots (result);
2657 return want_value ? result : NULL_RTX;
2660 /* Generate code for computing expression EXP,
2661 and storing the value into TARGET.
2662 TARGET may contain a QUEUED rtx.
2664 If WANT_VALUE is nonzero, return a copy of the value
2665 not in TARGET, so that we can be sure to use the proper
2666 value in a containing expression even if TARGET has something
2667 else stored in it. If possible, we copy the value through a pseudo
2668 and return that pseudo. Or, if the value is constant, we try to
2669 return the constant. In some cases, we return a pseudo
2670 copied *from* TARGET.
2672 If the mode is BLKmode then we may return TARGET itself.
2673 It turns out that in BLKmode it doesn't cause a problem.
2674 because C has no operators that could combine two different
2675 assignments into the same BLKmode object with different values
2676 with no sequence point. Will other languages need this to
2679 If WANT_VALUE is 0, we return NULL, to make sure
2680 to catch quickly any cases where the caller uses the value
2681 and fails to set WANT_VALUE. */
2684 store_expr (exp, target, want_value)
2686 register rtx target;
2690 int dont_return_target = 0;
2692 if (TREE_CODE (exp) == COMPOUND_EXPR)
2694 /* Perform first part of compound expression, then assign from second
2696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2698 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2700 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2702 /* For conditional expression, get safe form of the target. Then
2703 test the condition, doing the appropriate assignment on either
2704 side. This avoids the creation of unnecessary temporaries.
2705 For non-BLKmode, it is more efficient not to do this. */
2707 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2710 target = protect_from_queue (target, 1);
2712 do_pending_stack_adjust ();
2714 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2715 store_expr (TREE_OPERAND (exp, 1), target, 0);
2717 emit_jump_insn (gen_jump (lab2));
2720 store_expr (TREE_OPERAND (exp, 2), target, 0);
2724 return want_value ? target : NULL_RTX;
2726 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2727 && GET_MODE (target) != BLKmode)
2728 /* If target is in memory and caller wants value in a register instead,
2729 arrange that. Pass TARGET as target for expand_expr so that,
2730 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2731 We know expand_expr will not use the target in that case.
2732 Don't do this if TARGET is volatile because we are supposed
2733 to write it and then read it. */
2735 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2736 GET_MODE (target), 0);
2737 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2738 temp = copy_to_reg (temp);
2739 dont_return_target = 1;
2741 else if (queued_subexp_p (target))
2742 /* If target contains a postincrement, let's not risk
2743 using it as the place to generate the rhs. */
2745 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2747 /* Expand EXP into a new pseudo. */
2748 temp = gen_reg_rtx (GET_MODE (target));
2749 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2752 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2754 /* If target is volatile, ANSI requires accessing the value
2755 *from* the target, if it is accessed. So make that happen.
2756 In no case return the target itself. */
2757 if (! MEM_VOLATILE_P (target) && want_value)
2758 dont_return_target = 1;
2760 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2761 /* If this is an scalar in a register that is stored in a wider mode
2762 than the declared mode, compute the result into its declared mode
2763 and then convert to the wider mode. Our value is the computed
2766 /* If we don't want a value, we can do the conversion inside EXP,
2767 which will often result in some optimizations. Do the conversion
2768 in two steps: first change the signedness, if needed, then
2772 if (TREE_UNSIGNED (TREE_TYPE (exp))
2773 != SUBREG_PROMOTED_UNSIGNED_P (target))
2776 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2780 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2781 SUBREG_PROMOTED_UNSIGNED_P (target)),
2785 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2787 /* If TEMP is a volatile MEM and we want a result value, make
2788 the access now so it gets done only once. Likewise if
2789 it contains TARGET. */
2790 if (GET_CODE (temp) == MEM && want_value
2791 && (MEM_VOLATILE_P (temp)
2792 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
2793 temp = copy_to_reg (temp);
2795 /* If TEMP is a VOIDmode constant, use convert_modes to make
2796 sure that we properly convert it. */
2797 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2798 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2799 TYPE_MODE (TREE_TYPE (exp)), temp,
2800 SUBREG_PROMOTED_UNSIGNED_P (target));
2802 convert_move (SUBREG_REG (target), temp,
2803 SUBREG_PROMOTED_UNSIGNED_P (target));
2804 return want_value ? temp : NULL_RTX;
2808 temp = expand_expr (exp, target, GET_MODE (target), 0);
2809 /* Return TARGET if it's a specified hardware register.
2810 If TARGET is a volatile mem ref, either return TARGET
2811 or return a reg copied *from* TARGET; ANSI requires this.
2813 Otherwise, if TEMP is not TARGET, return TEMP
2814 if it is constant (for efficiency),
2815 or if we really want the correct value. */
2816 if (!(target && GET_CODE (target) == REG
2817 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2818 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2820 && (CONSTANT_P (temp) || want_value))
2821 dont_return_target = 1;
2824 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2825 the same as that of TARGET, adjust the constant. This is needed, for
2826 example, in case it is a CONST_DOUBLE and we want only a word-sized
2828 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2829 && TREE_CODE (exp) != ERROR_MARK
2830 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2831 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2832 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2834 /* If value was not generated in the target, store it there.
2835 Convert the value to TARGET's type first if nec. */
2837 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2839 target = protect_from_queue (target, 1);
2840 if (GET_MODE (temp) != GET_MODE (target)
2841 && GET_MODE (temp) != VOIDmode)
2843 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2844 if (dont_return_target)
2846 /* In this case, we will return TEMP,
2847 so make sure it has the proper mode.
2848 But don't forget to store the value into TARGET. */
2849 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2850 emit_move_insn (target, temp);
2853 convert_move (target, temp, unsignedp);
2856 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2858 /* Handle copying a string constant into an array.
2859 The string constant may be shorter than the array.
2860 So copy just the string's actual length, and clear the rest. */
2864 /* Get the size of the data type of the string,
2865 which is actually the size of the target. */
2866 size = expr_size (exp);
2867 if (GET_CODE (size) == CONST_INT
2868 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2869 emit_block_move (target, temp, size,
2870 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2873 /* Compute the size of the data to copy from the string. */
2875 = size_binop (MIN_EXPR,
2876 make_tree (sizetype, size),
2878 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2879 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2883 /* Copy that much. */
2884 emit_block_move (target, temp, copy_size_rtx,
2885 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2887 /* Figure out how much is left in TARGET that we have to clear.
2888 Do all calculations in ptr_mode. */
2890 addr = XEXP (target, 0);
2891 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2893 if (GET_CODE (copy_size_rtx) == CONST_INT)
2895 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2896 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2900 addr = force_reg (ptr_mode, addr);
2901 addr = expand_binop (ptr_mode, add_optab, addr,
2902 copy_size_rtx, NULL_RTX, 0,
2905 size = expand_binop (ptr_mode, sub_optab, size,
2906 copy_size_rtx, NULL_RTX, 0,
2909 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2910 GET_MODE (size), 0, 0);
2911 label = gen_label_rtx ();
2912 emit_jump_insn (gen_blt (label));
2915 if (size != const0_rtx)
2917 #ifdef TARGET_MEM_FUNCTIONS
2918 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2919 Pmode, const0_rtx, Pmode, size, ptr_mode);
2921 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2922 addr, Pmode, size, ptr_mode);
2930 else if (GET_MODE (temp) == BLKmode)
2931 emit_block_move (target, temp, expr_size (exp),
2932 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2934 emit_move_insn (target, temp);
2937 /* If we don't want a value, return NULL_RTX. */
2941 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2942 ??? The latter test doesn't seem to make sense. */
2943 else if (dont_return_target && GET_CODE (temp) != MEM)
2946 /* Return TARGET itself if it is a hard register. */
2947 else if (want_value && GET_MODE (target) != BLKmode
2948 && ! (GET_CODE (target) == REG
2949 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2950 return copy_to_reg (target);
2956 /* Store the value of constructor EXP into the rtx TARGET.
2957 TARGET is either a REG or a MEM. */
2960 store_constructor (exp, target)
2964 tree type = TREE_TYPE (exp);
2966 /* We know our target cannot conflict, since safe_from_p has been called. */
2968 /* Don't try copying piece by piece into a hard register
2969 since that is vulnerable to being clobbered by EXP.
2970 Instead, construct in a pseudo register and then copy it all. */
2971 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2973 rtx temp = gen_reg_rtx (GET_MODE (target));
2974 store_constructor (exp, temp);
2975 emit_move_insn (target, temp);
2980 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2981 || TREE_CODE (type) == QUAL_UNION_TYPE)
2985 /* Inform later passes that the whole union value is dead. */
2986 if (TREE_CODE (type) == UNION_TYPE
2987 || TREE_CODE (type) == QUAL_UNION_TYPE)
2988 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2990 /* If we are building a static constructor into a register,
2991 set the initial value as zero so we can fold the value into
2992 a constant. But if more than one register is involved,
2993 this probably loses. */
2994 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
2995 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
2996 emit_move_insn (target, const0_rtx);
2998 /* If the constructor has fewer fields than the structure,
2999 clear the whole structure first. */
3000 else if (list_length (CONSTRUCTOR_ELTS (exp))
3001 != list_length (TYPE_FIELDS (type)))
3002 clear_storage (target, int_size_in_bytes (type));
3004 /* Inform later passes that the old value is dead. */
3005 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3007 /* Store each element of the constructor into
3008 the corresponding field of TARGET. */
3010 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3012 register tree field = TREE_PURPOSE (elt);
3013 register enum machine_mode mode;
3017 tree pos, constant = 0, offset = 0;
3018 rtx to_rtx = target;
3020 /* Just ignore missing fields.
3021 We cleared the whole structure, above,
3022 if any fields are missing. */
3026 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3027 unsignedp = TREE_UNSIGNED (field);
3028 mode = DECL_MODE (field);
3029 if (DECL_BIT_FIELD (field))
3032 pos = DECL_FIELD_BITPOS (field);
3033 if (TREE_CODE (pos) == INTEGER_CST)
3035 else if (TREE_CODE (pos) == PLUS_EXPR
3036 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3037 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3042 bitpos = TREE_INT_CST_LOW (constant);
3048 if (contains_placeholder_p (offset))
3049 offset = build (WITH_RECORD_EXPR, sizetype,
3052 offset = size_binop (FLOOR_DIV_EXPR, offset,
3053 size_int (BITS_PER_UNIT));
3055 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3056 if (GET_CODE (to_rtx) != MEM)
3060 = change_address (to_rtx, VOIDmode,
3061 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3062 force_reg (ptr_mode, offset_rtx)));
3065 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3066 /* The alignment of TARGET is
3067 at least what its type requires. */
3069 TYPE_ALIGN (type) / BITS_PER_UNIT,
3070 int_size_in_bytes (type));
3073 else if (TREE_CODE (type) == ARRAY_TYPE)
3077 tree domain = TYPE_DOMAIN (type);
3078 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3079 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3080 tree elttype = TREE_TYPE (type);
3082 /* If the constructor has fewer fields than the structure,
3083 clear the whole structure first. Similarly if this this is
3084 static constructor of a non-BLKmode object. */
3086 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3087 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3088 clear_storage (target, int_size_in_bytes (type));
3090 /* Inform later passes that the old value is dead. */
3091 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3093 /* Store each element of the constructor into
3094 the corresponding element of TARGET, determined
3095 by counting the elements. */
3096 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3098 elt = TREE_CHAIN (elt), i++)
3100 register enum machine_mode mode;
3104 tree index = TREE_PURPOSE (elt);
3105 rtx xtarget = target;
3107 mode = TYPE_MODE (elttype);
3108 bitsize = GET_MODE_BITSIZE (mode);
3109 unsignedp = TREE_UNSIGNED (elttype);
3111 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3112 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3114 rtx pos_rtx, addr, xtarget;
3118 index = size_int (i);
3120 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3121 size_int (BITS_PER_UNIT));
3122 position = size_binop (MULT_EXPR, index, position);
3123 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3124 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3125 xtarget = change_address (target, mode, addr);
3126 store_expr (TREE_VALUE (elt), xtarget, 0);
3131 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3132 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3134 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3136 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3137 /* The alignment of TARGET is
3138 at least what its type requires. */
3140 TYPE_ALIGN (type) / BITS_PER_UNIT,
3141 int_size_in_bytes (type));
3145 /* set constructor assignments */
3146 else if (TREE_CODE (type) == SET_TYPE)
3149 rtx xtarget = XEXP (target, 0);
3150 int set_word_size = TYPE_ALIGN (type);
3151 int nbytes = int_size_in_bytes (type);
3152 tree non_const_elements;
3153 int need_to_clear_first;
3154 tree domain = TYPE_DOMAIN (type);
3155 tree domain_min, domain_max, bitlength;
3157 /* The default implementation strategy is to extract the constant
3158 parts of the constructor, use that to initialize the target,
3159 and then "or" in whatever non-constant ranges we need in addition.
3161 If a large set is all zero or all ones, it is
3162 probably better to set it using memset (if available) or bzero.
3163 Also, if a large set has just a single range, it may also be
3164 better to first clear all the first clear the set (using
3165 bzero/memset), and set the bits we want. */
3167 /* Check for all zeros. */
3168 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3170 clear_storage (target, nbytes);
3177 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3178 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3179 bitlength = size_binop (PLUS_EXPR,
3180 size_binop (MINUS_EXPR, domain_max, domain_min),
3183 /* Check for range all ones, or at most a single range.
3184 (This optimization is only a win for big sets.) */
3185 if (GET_MODE (target) == BLKmode && nbytes > 16
3186 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3188 need_to_clear_first = 1;
3189 non_const_elements = CONSTRUCTOR_ELTS (exp);
3193 int nbits = nbytes * BITS_PER_UNIT;
3194 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3195 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3196 char *bit_buffer = (char*) alloca (nbits);
3197 HOST_WIDE_INT word = 0;
3200 int offset = 0; /* In bytes from beginning of set. */
3201 non_const_elements = get_set_constructor_bits (exp,
3205 if (bit_buffer[ibit])
3207 if (BYTES_BIG_ENDIAN)
3208 word |= (1 << (set_word_size - 1 - bit_pos));
3210 word |= 1 << bit_pos;
3213 if (bit_pos >= set_word_size || ibit == nbits)
3215 rtx datum = GEN_INT (word);
3217 /* The assumption here is that it is safe to use XEXP if
3218 the set is multi-word, but not if it's single-word. */
3219 if (GET_CODE (target) == MEM)
3220 to_rtx = change_address (target, mode,
3221 plus_constant (XEXP (target, 0),
3223 else if (offset == 0)
3227 emit_move_insn (to_rtx, datum);
3232 offset += set_word_size / BITS_PER_UNIT;
3235 need_to_clear_first = 0;
3238 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3240 /* start of range of element or NULL */
3241 tree startbit = TREE_PURPOSE (elt);
3242 /* end of range of element, or element value */
3243 tree endbit = TREE_VALUE (elt);
3244 HOST_WIDE_INT startb, endb;
3245 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3247 bitlength_rtx = expand_expr (bitlength,
3248 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3250 /* handle non-range tuple element like [ expr ] */
3251 if (startbit == NULL_TREE)
3253 startbit = save_expr (endbit);
3256 startbit = convert (sizetype, startbit);
3257 endbit = convert (sizetype, endbit);
3258 if (! integer_zerop (domain_min))
3260 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3261 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3263 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3264 EXPAND_CONST_ADDRESS);
3265 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3266 EXPAND_CONST_ADDRESS);
3270 targetx = assign_stack_temp (GET_MODE (target),
3271 GET_MODE_SIZE (GET_MODE (target)),
3273 emit_move_insn (targetx, target);
3275 else if (GET_CODE (target) == MEM)
3280 #ifdef TARGET_MEM_FUNCTIONS
3281 /* Optimization: If startbit and endbit are
3282 constants divisible by BITS_PER_UNIT,
3283 call memset instead. */
3284 if (TREE_CODE (startbit) == INTEGER_CST
3285 && TREE_CODE (endbit) == INTEGER_CST
3286 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3287 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3290 if (need_to_clear_first
3291 && endb - startb != nbytes * BITS_PER_UNIT)
3292 clear_storage (target, nbytes);
3293 need_to_clear_first = 0;
3294 emit_library_call (memset_libfunc, 0,
3296 plus_constant (XEXP (targetx, 0), startb),
3299 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3305 if (need_to_clear_first)
3307 clear_storage (target, nbytes);
3308 need_to_clear_first = 0;
3310 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3311 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3312 bitlength_rtx, TYPE_MODE (sizetype),
3313 startbit_rtx, TYPE_MODE (sizetype),
3314 endbit_rtx, TYPE_MODE (sizetype));
3317 emit_move_insn (target, targetx);
3325 /* Store the value of EXP (an expression tree)
3326 into a subfield of TARGET which has mode MODE and occupies
3327 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3328 If MODE is VOIDmode, it means that we are storing into a bit-field.
3330 If VALUE_MODE is VOIDmode, return nothing in particular.
3331 UNSIGNEDP is not used in this case.
3333 Otherwise, return an rtx for the value stored. This rtx
3334 has mode VALUE_MODE if that is convenient to do.
3335 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3337 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3338 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3341 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3342 unsignedp, align, total_size)
3344 int bitsize, bitpos;
3345 enum machine_mode mode;
3347 enum machine_mode value_mode;
3352 HOST_WIDE_INT width_mask = 0;
3354 if (bitsize < HOST_BITS_PER_WIDE_INT)
3355 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3357 /* If we are storing into an unaligned field of an aligned union that is
3358 in a register, we may have the mode of TARGET being an integer mode but
3359 MODE == BLKmode. In that case, get an aligned object whose size and
3360 alignment are the same as TARGET and store TARGET into it (we can avoid
3361 the store if the field being stored is the entire width of TARGET). Then
3362 call ourselves recursively to store the field into a BLKmode version of
3363 that object. Finally, load from the object into TARGET. This is not
3364 very efficient in general, but should only be slightly more expensive
3365 than the otherwise-required unaligned accesses. Perhaps this can be
3366 cleaned up later. */
3369 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3371 rtx object = assign_stack_temp (GET_MODE (target),
3372 GET_MODE_SIZE (GET_MODE (target)), 0);
3373 rtx blk_object = copy_rtx (object);
3375 MEM_IN_STRUCT_P (object) = 1;
3376 MEM_IN_STRUCT_P (blk_object) = 1;
3377 PUT_MODE (blk_object, BLKmode);
3379 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3380 emit_move_insn (object, target);
3382 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3385 /* Even though we aren't returning target, we need to
3386 give it the updated value. */
3387 emit_move_insn (target, object);
3392 /* If the structure is in a register or if the component
3393 is a bit field, we cannot use addressing to access it.
3394 Use bit-field techniques or SUBREG to store in it. */
3396 if (mode == VOIDmode
3397 || (mode != BLKmode && ! direct_store[(int) mode])
3398 || GET_CODE (target) == REG
3399 || GET_CODE (target) == SUBREG
3400 /* If the field isn't aligned enough to store as an ordinary memref,
3401 store it as a bit field. */
3402 || (SLOW_UNALIGNED_ACCESS
3403 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3404 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3406 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3408 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3410 if (mode != VOIDmode && mode != BLKmode
3411 && mode != TYPE_MODE (TREE_TYPE (exp)))
3412 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3414 /* Store the value in the bitfield. */
3415 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3416 if (value_mode != VOIDmode)
3418 /* The caller wants an rtx for the value. */
3419 /* If possible, avoid refetching from the bitfield itself. */
3421 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3424 enum machine_mode tmode;
3427 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3428 tmode = GET_MODE (temp);
3429 if (tmode == VOIDmode)
3431 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3432 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3433 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3435 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3436 NULL_RTX, value_mode, 0, align,
3443 rtx addr = XEXP (target, 0);
3446 /* If a value is wanted, it must be the lhs;
3447 so make the address stable for multiple use. */
3449 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3450 && ! CONSTANT_ADDRESS_P (addr)
3451 /* A frame-pointer reference is already stable. */
3452 && ! (GET_CODE (addr) == PLUS
3453 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3454 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3455 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3456 addr = copy_to_reg (addr);
3458 /* Now build a reference to just the desired component. */
3460 to_rtx = change_address (target, mode,
3461 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3462 MEM_IN_STRUCT_P (to_rtx) = 1;
3464 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3468 /* Return true if any object containing the innermost array is an unaligned
3469 packed structure field. */
3472 get_inner_unaligned_p (exp)
3475 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3479 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3481 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3485 else if (TREE_CODE (exp) != ARRAY_REF
3486 && TREE_CODE (exp) != NON_LVALUE_EXPR
3487 && ! ((TREE_CODE (exp) == NOP_EXPR
3488 || TREE_CODE (exp) == CONVERT_EXPR)
3489 && (TYPE_MODE (TREE_TYPE (exp))
3490 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3493 exp = TREE_OPERAND (exp, 0);
3499 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3500 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3501 ARRAY_REFs and find the ultimate containing object, which we return.
3503 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3504 bit position, and *PUNSIGNEDP to the signedness of the field.
3505 If the position of the field is variable, we store a tree
3506 giving the variable offset (in units) in *POFFSET.
3507 This offset is in addition to the bit position.
3508 If the position is not variable, we store 0 in *POFFSET.
3510 If any of the extraction expressions is volatile,
3511 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3513 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3514 is a mode that can be used to access the field. In that case, *PBITSIZE
3517 If the field describes a variable-sized object, *PMODE is set to
3518 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3519 this case, but the address of the object can be found. */
3522 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3523 punsignedp, pvolatilep)
3528 enum machine_mode *pmode;
3532 tree orig_exp = exp;
3534 enum machine_mode mode = VOIDmode;
3535 tree offset = integer_zero_node;
3537 if (TREE_CODE (exp) == COMPONENT_REF)
3539 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3540 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3541 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3542 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3544 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3546 size_tree = TREE_OPERAND (exp, 1);
3547 *punsignedp = TREE_UNSIGNED (exp);
3551 mode = TYPE_MODE (TREE_TYPE (exp));
3552 *pbitsize = GET_MODE_BITSIZE (mode);
3553 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3558 if (TREE_CODE (size_tree) != INTEGER_CST)
3559 mode = BLKmode, *pbitsize = -1;
3561 *pbitsize = TREE_INT_CST_LOW (size_tree);
3564 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3565 and find the ultimate containing object. */
3571 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3573 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3574 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3575 : TREE_OPERAND (exp, 2));
3576 tree constant = integer_zero_node, var = pos;
3578 /* If this field hasn't been filled in yet, don't go
3579 past it. This should only happen when folding expressions
3580 made during type construction. */
3584 /* Assume here that the offset is a multiple of a unit.
3585 If not, there should be an explicitly added constant. */
3586 if (TREE_CODE (pos) == PLUS_EXPR
3587 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3588 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3589 else if (TREE_CODE (pos) == INTEGER_CST)
3590 constant = pos, var = integer_zero_node;
3592 *pbitpos += TREE_INT_CST_LOW (constant);
3595 offset = size_binop (PLUS_EXPR, offset,
3596 size_binop (EXACT_DIV_EXPR, var,
3597 size_int (BITS_PER_UNIT)));
3600 else if (TREE_CODE (exp) == ARRAY_REF)
3602 /* This code is based on the code in case ARRAY_REF in expand_expr
3603 below. We assume here that the size of an array element is
3604 always an integral multiple of BITS_PER_UNIT. */
3606 tree index = TREE_OPERAND (exp, 1);
3607 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3609 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3610 tree index_type = TREE_TYPE (index);
3612 if (! integer_zerop (low_bound))
3613 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3615 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3617 index = convert (type_for_size (POINTER_SIZE, 0), index);
3618 index_type = TREE_TYPE (index);
3621 index = fold (build (MULT_EXPR, index_type, index,
3622 TYPE_SIZE (TREE_TYPE (exp))));
3624 if (TREE_CODE (index) == INTEGER_CST
3625 && TREE_INT_CST_HIGH (index) == 0)
3626 *pbitpos += TREE_INT_CST_LOW (index);
3628 offset = size_binop (PLUS_EXPR, offset,
3629 size_binop (FLOOR_DIV_EXPR, index,
3630 size_int (BITS_PER_UNIT)));
3632 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3633 && ! ((TREE_CODE (exp) == NOP_EXPR
3634 || TREE_CODE (exp) == CONVERT_EXPR)
3635 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3636 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3638 && (TYPE_MODE (TREE_TYPE (exp))
3639 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3642 /* If any reference in the chain is volatile, the effect is volatile. */
3643 if (TREE_THIS_VOLATILE (exp))
3645 exp = TREE_OPERAND (exp, 0);
3648 /* If this was a bit-field, see if there is a mode that allows direct
3649 access in case EXP is in memory. */
3650 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3652 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3653 if (mode == BLKmode)
3657 if (integer_zerop (offset))
3660 if (offset != 0 && contains_placeholder_p (offset))
3661 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3668 /* Given an rtx VALUE that may contain additions and multiplications,
3669 return an equivalent value that just refers to a register or memory.
3670 This is done by generating instructions to perform the arithmetic
3671 and returning a pseudo-register containing the value.
3673 The returned value may be a REG, SUBREG, MEM or constant. */
3676 force_operand (value, target)
3679 register optab binoptab = 0;
3680 /* Use a temporary to force order of execution of calls to
3684 /* Use subtarget as the target for operand 0 of a binary operation. */
3685 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3687 if (GET_CODE (value) == PLUS)
3688 binoptab = add_optab;
3689 else if (GET_CODE (value) == MINUS)
3690 binoptab = sub_optab;
3691 else if (GET_CODE (value) == MULT)
3693 op2 = XEXP (value, 1);
3694 if (!CONSTANT_P (op2)
3695 && !(GET_CODE (op2) == REG && op2 != subtarget))
3697 tmp = force_operand (XEXP (value, 0), subtarget);
3698 return expand_mult (GET_MODE (value), tmp,
3699 force_operand (op2, NULL_RTX),
3705 op2 = XEXP (value, 1);
3706 if (!CONSTANT_P (op2)
3707 && !(GET_CODE (op2) == REG && op2 != subtarget))
3709 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3711 binoptab = add_optab;
3712 op2 = negate_rtx (GET_MODE (value), op2);
3715 /* Check for an addition with OP2 a constant integer and our first
3716 operand a PLUS of a virtual register and something else. In that
3717 case, we want to emit the sum of the virtual register and the
3718 constant first and then add the other value. This allows virtual
3719 register instantiation to simply modify the constant rather than
3720 creating another one around this addition. */
3721 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3722 && GET_CODE (XEXP (value, 0)) == PLUS
3723 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3724 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3725 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3727 rtx temp = expand_binop (GET_MODE (value), binoptab,
3728 XEXP (XEXP (value, 0), 0), op2,
3729 subtarget, 0, OPTAB_LIB_WIDEN);
3730 return expand_binop (GET_MODE (value), binoptab, temp,
3731 force_operand (XEXP (XEXP (value, 0), 1), 0),
3732 target, 0, OPTAB_LIB_WIDEN);
3735 tmp = force_operand (XEXP (value, 0), subtarget);
3736 return expand_binop (GET_MODE (value), binoptab, tmp,
3737 force_operand (op2, NULL_RTX),
3738 target, 0, OPTAB_LIB_WIDEN);
3739 /* We give UNSIGNEDP = 0 to expand_binop
3740 because the only operations we are expanding here are signed ones. */
3745 /* Subroutine of expand_expr:
3746 save the non-copied parts (LIST) of an expr (LHS), and return a list
3747 which can restore these values to their previous values,
3748 should something modify their storage. */
3751 save_noncopied_parts (lhs, list)
3758 for (tail = list; tail; tail = TREE_CHAIN (tail))
3759 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3760 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3763 tree part = TREE_VALUE (tail);
3764 tree part_type = TREE_TYPE (part);
3765 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3766 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3767 int_size_in_bytes (part_type), 0);
3768 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3769 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3770 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3771 parts = tree_cons (to_be_saved,
3772 build (RTL_EXPR, part_type, NULL_TREE,
3775 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3780 /* Subroutine of expand_expr:
3781 record the non-copied parts (LIST) of an expr (LHS), and return a list
3782 which specifies the initial values of these parts. */
3785 init_noncopied_parts (lhs, list)
3792 for (tail = list; tail; tail = TREE_CHAIN (tail))
3793 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3794 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3797 tree part = TREE_VALUE (tail);
3798 tree part_type = TREE_TYPE (part);
3799 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3800 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3805 /* Subroutine of expand_expr: return nonzero iff there is no way that
3806 EXP can reference X, which is being modified. */
3809 safe_from_p (x, exp)
3817 /* If EXP has varying size, we MUST use a target since we currently
3818 have no way of allocating temporaries of variable size. So we
3819 assume here that something at a higher level has prevented a
3820 clash. This is somewhat bogus, but the best we can do. Only
3821 do this when X is BLKmode. */
3822 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3823 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
3824 && GET_MODE (x) == BLKmode))
3827 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3828 find the underlying pseudo. */
3829 if (GET_CODE (x) == SUBREG)
3832 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3836 /* If X is a location in the outgoing argument area, it is always safe. */
3837 if (GET_CODE (x) == MEM
3838 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3839 || (GET_CODE (XEXP (x, 0)) == PLUS
3840 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3843 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3846 exp_rtl = DECL_RTL (exp);
3853 if (TREE_CODE (exp) == TREE_LIST)
3854 return ((TREE_VALUE (exp) == 0
3855 || safe_from_p (x, TREE_VALUE (exp)))
3856 && (TREE_CHAIN (exp) == 0
3857 || safe_from_p (x, TREE_CHAIN (exp))));
3862 return safe_from_p (x, TREE_OPERAND (exp, 0));
3866 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3867 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3871 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3872 the expression. If it is set, we conflict iff we are that rtx or
3873 both are in memory. Otherwise, we check all operands of the
3874 expression recursively. */
3876 switch (TREE_CODE (exp))
3879 return (staticp (TREE_OPERAND (exp, 0))
3880 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3883 if (GET_CODE (x) == MEM)
3888 exp_rtl = CALL_EXPR_RTL (exp);
3891 /* Assume that the call will clobber all hard registers and
3893 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3894 || GET_CODE (x) == MEM)
3901 exp_rtl = RTL_EXPR_RTL (exp);
3903 /* We don't know what this can modify. */
3908 case WITH_CLEANUP_EXPR:
3909 exp_rtl = RTL_EXPR_RTL (exp);
3912 case CLEANUP_POINT_EXPR:
3913 return safe_from_p (x, TREE_OPERAND (exp, 0));
3916 exp_rtl = SAVE_EXPR_RTL (exp);
3920 /* The only operand we look at is operand 1. The rest aren't
3921 part of the expression. */
3922 return safe_from_p (x, TREE_OPERAND (exp, 1));
3924 case METHOD_CALL_EXPR:
3925 /* This takes a rtx argument, but shouldn't appear here. */
3929 /* If we have an rtx, we do not need to scan our operands. */
3933 nops = tree_code_length[(int) TREE_CODE (exp)];
3934 for (i = 0; i < nops; i++)
3935 if (TREE_OPERAND (exp, i) != 0
3936 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3940 /* If we have an rtl, find any enclosed object. Then see if we conflict
3944 if (GET_CODE (exp_rtl) == SUBREG)
3946 exp_rtl = SUBREG_REG (exp_rtl);
3947 if (GET_CODE (exp_rtl) == REG
3948 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3952 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3953 are memory and EXP is not readonly. */
3954 return ! (rtx_equal_p (x, exp_rtl)
3955 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3956 && ! TREE_READONLY (exp)));
3959 /* If we reach here, it is safe. */
3963 /* Subroutine of expand_expr: return nonzero iff EXP is an
3964 expression whose type is statically determinable. */
3970 if (TREE_CODE (exp) == PARM_DECL
3971 || TREE_CODE (exp) == VAR_DECL
3972 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3973 || TREE_CODE (exp) == COMPONENT_REF
3974 || TREE_CODE (exp) == ARRAY_REF)
3979 /* expand_expr: generate code for computing expression EXP.
3980 An rtx for the computed value is returned. The value is never null.
3981 In the case of a void EXP, const0_rtx is returned.
3983 The value may be stored in TARGET if TARGET is nonzero.
3984 TARGET is just a suggestion; callers must assume that
3985 the rtx returned may not be the same as TARGET.
3987 If TARGET is CONST0_RTX, it means that the value will be ignored.
3989 If TMODE is not VOIDmode, it suggests generating the
3990 result in mode TMODE. But this is done only when convenient.
3991 Otherwise, TMODE is ignored and the value generated in its natural mode.
3992 TMODE is just a suggestion; callers must assume that
3993 the rtx returned may not have mode TMODE.
3995 Note that TARGET may have neither TMODE nor MODE. In that case, it
3996 probably will not be used.
3998 If MODIFIER is EXPAND_SUM then when EXP is an addition
3999 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4000 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4001 products as above, or REG or MEM, or constant.
4002 Ordinarily in such cases we would output mul or add instructions
4003 and then return a pseudo reg containing the sum.
4005 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4006 it also marks a label as absolutely required (it can't be dead).
4007 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4008 This is used for outputting expressions used in initializers.
4010 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4011 with a constant address even if that address is not normally legitimate.
4012 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4015 expand_expr (exp, target, tmode, modifier)
4018 enum machine_mode tmode;
4019 enum expand_modifier modifier;
4021 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4022 This is static so it will be accessible to our recursive callees. */
4023 static tree placeholder_list = 0;
4024 register rtx op0, op1, temp;
4025 tree type = TREE_TYPE (exp);
4026 int unsignedp = TREE_UNSIGNED (type);
4027 register enum machine_mode mode = TYPE_MODE (type);
4028 register enum tree_code code = TREE_CODE (exp);
4030 /* Use subtarget as the target for operand 0 of a binary operation. */
4031 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4032 rtx original_target = target;
4033 /* Maybe defer this until sure not doing bytecode? */
4034 int ignore = (target == const0_rtx
4035 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4036 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4037 || code == COND_EXPR)
4038 && TREE_CODE (type) == VOID_TYPE));
4042 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4044 bc_expand_expr (exp);
4048 /* Don't use hard regs as subtargets, because the combiner
4049 can only handle pseudo regs. */
4050 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4052 /* Avoid subtargets inside loops,
4053 since they hide some invariant expressions. */
4054 if (preserve_subexpressions_p ())
4057 /* If we are going to ignore this result, we need only do something
4058 if there is a side-effect somewhere in the expression. If there
4059 is, short-circuit the most common cases here. Note that we must
4060 not call expand_expr with anything but const0_rtx in case this
4061 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4065 if (! TREE_SIDE_EFFECTS (exp))
4068 /* Ensure we reference a volatile object even if value is ignored. */
4069 if (TREE_THIS_VOLATILE (exp)
4070 && TREE_CODE (exp) != FUNCTION_DECL
4071 && mode != VOIDmode && mode != BLKmode)
4073 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4074 if (GET_CODE (temp) == MEM)
4075 temp = copy_to_reg (temp);
4079 if (TREE_CODE_CLASS (code) == '1')
4080 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4081 VOIDmode, modifier);
4082 else if (TREE_CODE_CLASS (code) == '2'
4083 || TREE_CODE_CLASS (code) == '<')
4085 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4086 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4089 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4090 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4091 /* If the second operand has no side effects, just evaluate
4093 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4094 VOIDmode, modifier);
4099 /* If will do cse, generate all results into pseudo registers
4100 since 1) that allows cse to find more things
4101 and 2) otherwise cse could produce an insn the machine
4104 if (! cse_not_expected && mode != BLKmode && target
4105 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4112 tree function = decl_function_context (exp);
4113 /* Handle using a label in a containing function. */
4114 if (function != current_function_decl && function != 0)
4116 struct function *p = find_function_data (function);
4117 /* Allocate in the memory associated with the function
4118 that the label is in. */
4119 push_obstacks (p->function_obstack,
4120 p->function_maybepermanent_obstack);
4122 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4123 label_rtx (exp), p->forced_labels);
4126 else if (modifier == EXPAND_INITIALIZER)
4127 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4128 label_rtx (exp), forced_labels);
4129 temp = gen_rtx (MEM, FUNCTION_MODE,
4130 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4131 if (function != current_function_decl && function != 0)
4132 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4137 if (DECL_RTL (exp) == 0)
4139 error_with_decl (exp, "prior parameter's size depends on `%s'");
4140 return CONST0_RTX (mode);
4143 /* ... fall through ... */
4146 /* If a static var's type was incomplete when the decl was written,
4147 but the type is complete now, lay out the decl now. */
4148 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4149 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4151 push_obstacks_nochange ();
4152 end_temporary_allocation ();
4153 layout_decl (exp, 0);
4154 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4158 /* ... fall through ... */
4162 if (DECL_RTL (exp) == 0)
4165 /* Ensure variable marked as used even if it doesn't go through
4166 a parser. If it hasn't be used yet, write out an external
4168 if (! TREE_USED (exp))
4170 assemble_external (exp);
4171 TREE_USED (exp) = 1;
4174 /* Handle variables inherited from containing functions. */
4175 context = decl_function_context (exp);
4177 /* We treat inline_function_decl as an alias for the current function
4178 because that is the inline function whose vars, types, etc.
4179 are being merged into the current function.
4180 See expand_inline_function. */
4182 if (context != 0 && context != current_function_decl
4183 && context != inline_function_decl
4184 /* If var is static, we don't need a static chain to access it. */
4185 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4186 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4190 /* Mark as non-local and addressable. */
4191 DECL_NONLOCAL (exp) = 1;
4192 mark_addressable (exp);
4193 if (GET_CODE (DECL_RTL (exp)) != MEM)
4195 addr = XEXP (DECL_RTL (exp), 0);
4196 if (GET_CODE (addr) == MEM)
4197 addr = gen_rtx (MEM, Pmode,
4198 fix_lexical_addr (XEXP (addr, 0), exp));
4200 addr = fix_lexical_addr (addr, exp);
4201 return change_address (DECL_RTL (exp), mode, addr);
4204 /* This is the case of an array whose size is to be determined
4205 from its initializer, while the initializer is still being parsed.
4208 if (GET_CODE (DECL_RTL (exp)) == MEM
4209 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4210 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4211 XEXP (DECL_RTL (exp), 0));
4213 /* If DECL_RTL is memory, we are in the normal case and either
4214 the address is not valid or it is not a register and -fforce-addr
4215 is specified, get the address into a register. */
4217 if (GET_CODE (DECL_RTL (exp)) == MEM
4218 && modifier != EXPAND_CONST_ADDRESS
4219 && modifier != EXPAND_SUM
4220 && modifier != EXPAND_INITIALIZER
4221 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4223 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4224 return change_address (DECL_RTL (exp), VOIDmode,
4225 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4227 /* If the mode of DECL_RTL does not match that of the decl, it
4228 must be a promoted value. We return a SUBREG of the wanted mode,
4229 but mark it so that we know that it was already extended. */
4231 if (GET_CODE (DECL_RTL (exp)) == REG
4232 && GET_MODE (DECL_RTL (exp)) != mode)
4234 /* Get the signedness used for this variable. Ensure we get the
4235 same mode we got when the variable was declared. */
4236 if (GET_MODE (DECL_RTL (exp))
4237 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4240 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4241 SUBREG_PROMOTED_VAR_P (temp) = 1;
4242 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4246 return DECL_RTL (exp);
4249 return immed_double_const (TREE_INT_CST_LOW (exp),
4250 TREE_INT_CST_HIGH (exp),
4254 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4257 /* If optimized, generate immediate CONST_DOUBLE
4258 which will be turned into memory by reload if necessary.
4260 We used to force a register so that loop.c could see it. But
4261 this does not allow gen_* patterns to perform optimizations with
4262 the constants. It also produces two insns in cases like "x = 1.0;".
4263 On most machines, floating-point constants are not permitted in
4264 many insns, so we'd end up copying it to a register in any case.
4266 Now, we do the copying in expand_binop, if appropriate. */
4267 return immed_real_const (exp);
4271 if (! TREE_CST_RTL (exp))
4272 output_constant_def (exp);
4274 /* TREE_CST_RTL probably contains a constant address.
4275 On RISC machines where a constant address isn't valid,
4276 make some insns to get that address into a register. */
4277 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4278 && modifier != EXPAND_CONST_ADDRESS
4279 && modifier != EXPAND_INITIALIZER
4280 && modifier != EXPAND_SUM
4281 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4283 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4284 return change_address (TREE_CST_RTL (exp), VOIDmode,
4285 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4286 return TREE_CST_RTL (exp);
4289 context = decl_function_context (exp);
4291 /* We treat inline_function_decl as an alias for the current function
4292 because that is the inline function whose vars, types, etc.
4293 are being merged into the current function.
4294 See expand_inline_function. */
4295 if (context == current_function_decl || context == inline_function_decl)
4298 /* If this is non-local, handle it. */
4301 temp = SAVE_EXPR_RTL (exp);
4302 if (temp && GET_CODE (temp) == REG)
4304 put_var_into_stack (exp);
4305 temp = SAVE_EXPR_RTL (exp);
4307 if (temp == 0 || GET_CODE (temp) != MEM)
4309 return change_address (temp, mode,
4310 fix_lexical_addr (XEXP (temp, 0), exp));
4312 if (SAVE_EXPR_RTL (exp) == 0)
4314 if (mode == BLKmode)
4317 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4318 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4320 else if (mode == VOIDmode)
4323 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4325 SAVE_EXPR_RTL (exp) = temp;
4326 if (!optimize && GET_CODE (temp) == REG)
4327 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4330 /* If the mode of TEMP does not match that of the expression, it
4331 must be a promoted value. We pass store_expr a SUBREG of the
4332 wanted mode but mark it so that we know that it was already
4333 extended. Note that `unsignedp' was modified above in
4336 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4338 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4339 SUBREG_PROMOTED_VAR_P (temp) = 1;
4340 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4343 if (temp == const0_rtx)
4344 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4346 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4349 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4350 must be a promoted value. We return a SUBREG of the wanted mode,
4351 but mark it so that we know that it was already extended. */
4353 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4354 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4356 /* Compute the signedness and make the proper SUBREG. */
4357 promote_mode (type, mode, &unsignedp, 0);
4358 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4359 SUBREG_PROMOTED_VAR_P (temp) = 1;
4360 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4364 return SAVE_EXPR_RTL (exp);
4366 case PLACEHOLDER_EXPR:
4367 /* If there is an object on the head of the placeholder list,
4368 see if some object in it's references is of type TYPE. For
4369 further information, see tree.def. */
4370 if (placeholder_list)
4373 tree old_list = placeholder_list;
4375 for (object = TREE_PURPOSE (placeholder_list);
4376 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4377 != TYPE_MAIN_VARIANT (type))
4378 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4379 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4380 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4381 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4382 object = TREE_OPERAND (object, 0))
4386 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4387 == TYPE_MAIN_VARIANT (type)))
4389 /* Expand this object skipping the list entries before
4390 it was found in case it is also a PLACEHOLDER_EXPR.
4391 In that case, we want to translate it using subsequent
4393 placeholder_list = TREE_CHAIN (placeholder_list);
4394 temp = expand_expr (object, original_target, tmode, modifier);
4395 placeholder_list = old_list;
4400 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4403 case WITH_RECORD_EXPR:
4404 /* Put the object on the placeholder list, expand our first operand,
4405 and pop the list. */
4406 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4408 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4410 placeholder_list = TREE_CHAIN (placeholder_list);
4414 expand_exit_loop_if_false (NULL_PTR,
4415 invert_truthvalue (TREE_OPERAND (exp, 0)));
4420 expand_start_loop (1);
4421 expand_expr_stmt (TREE_OPERAND (exp, 0));
4429 tree vars = TREE_OPERAND (exp, 0);
4430 int vars_need_expansion = 0;
4432 /* Need to open a binding contour here because
4433 if there are any cleanups they most be contained here. */
4434 expand_start_bindings (0);
4436 /* Mark the corresponding BLOCK for output in its proper place. */
4437 if (TREE_OPERAND (exp, 2) != 0
4438 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4439 insert_block (TREE_OPERAND (exp, 2));
4441 /* If VARS have not yet been expanded, expand them now. */
4444 if (DECL_RTL (vars) == 0)
4446 vars_need_expansion = 1;
4449 expand_decl_init (vars);
4450 vars = TREE_CHAIN (vars);
4453 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4455 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4461 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4463 emit_insns (RTL_EXPR_SEQUENCE (exp));
4464 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4465 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4466 free_temps_for_rtl_expr (exp);
4467 return RTL_EXPR_RTL (exp);
4470 /* If we don't need the result, just ensure we evaluate any
4475 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4476 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4480 /* All elts simple constants => refer to a constant in memory. But
4481 if this is a non-BLKmode mode, let it store a field at a time
4482 since that should make a CONST_INT or CONST_DOUBLE when we
4483 fold. Likewise, if we have a target we can use, it is best to
4484 store directly into the target unless the type is large enough
4485 that memcpy will be used. If we are making an initializer and
4486 all operands are constant, put it in memory as well. */
4487 else if ((TREE_STATIC (exp)
4488 && ((mode == BLKmode
4489 && ! (target != 0 && safe_from_p (target, exp)))
4490 || TREE_ADDRESSABLE (exp)
4491 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4492 && (move_by_pieces_ninsns
4493 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4494 TYPE_ALIGN (type) / BITS_PER_UNIT)
4496 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4498 rtx constructor = output_constant_def (exp);
4499 if (modifier != EXPAND_CONST_ADDRESS
4500 && modifier != EXPAND_INITIALIZER
4501 && modifier != EXPAND_SUM
4502 && (! memory_address_p (GET_MODE (constructor),
4503 XEXP (constructor, 0))
4505 && GET_CODE (XEXP (constructor, 0)) != REG)))
4506 constructor = change_address (constructor, VOIDmode,
4507 XEXP (constructor, 0));
4513 if (target == 0 || ! safe_from_p (target, exp))
4515 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4516 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4520 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4521 if (AGGREGATE_TYPE_P (type))
4522 MEM_IN_STRUCT_P (target) = 1;
4525 store_constructor (exp, target);
4531 tree exp1 = TREE_OPERAND (exp, 0);
4534 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4535 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4536 This code has the same general effect as simply doing
4537 expand_expr on the save expr, except that the expression PTR
4538 is computed for use as a memory address. This means different
4539 code, suitable for indexing, may be generated. */
4540 if (TREE_CODE (exp1) == SAVE_EXPR
4541 && SAVE_EXPR_RTL (exp1) == 0
4542 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4544 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4545 VOIDmode, EXPAND_SUM);
4546 op0 = memory_address (mode, temp);
4547 op0 = copy_all_regs (op0);
4548 SAVE_EXPR_RTL (exp1) = op0;
4552 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4553 op0 = memory_address (mode, op0);
4556 temp = gen_rtx (MEM, mode, op0);
4557 /* If address was computed by addition,
4558 mark this as an element of an aggregate. */
4559 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4560 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4561 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4562 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4563 || (TREE_CODE (exp1) == ADDR_EXPR
4564 && (exp2 = TREE_OPERAND (exp1, 0))
4565 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4566 MEM_IN_STRUCT_P (temp) = 1;
4567 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4569 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4570 here, because, in C and C++, the fact that a location is accessed
4571 through a pointer to const does not mean that the value there can
4572 never change. Languages where it can never change should
4573 also set TREE_STATIC. */
4574 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) | TREE_STATIC (exp);
4579 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4583 tree array = TREE_OPERAND (exp, 0);
4584 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4585 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4586 tree index = TREE_OPERAND (exp, 1);
4587 tree index_type = TREE_TYPE (index);
4590 if (TREE_CODE (low_bound) != INTEGER_CST
4591 && contains_placeholder_p (low_bound))
4592 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4594 /* Optimize the special-case of a zero lower bound.
4596 We convert the low_bound to sizetype to avoid some problems
4597 with constant folding. (E.g. suppose the lower bound is 1,
4598 and its mode is QI. Without the conversion, (ARRAY
4599 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4600 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4602 But sizetype isn't quite right either (especially if
4603 the lowbound is negative). FIXME */
4605 if (! integer_zerop (low_bound))
4606 index = fold (build (MINUS_EXPR, index_type, index,
4607 convert (sizetype, low_bound)));
4609 if ((TREE_CODE (index) != INTEGER_CST
4610 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4611 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4613 /* Nonconstant array index or nonconstant element size, and
4614 not an array in an unaligned (packed) structure field.
4615 Generate the tree for *(&array+index) and expand that,
4616 except do it in a language-independent way
4617 and don't complain about non-lvalue arrays.
4618 `mark_addressable' should already have been called
4619 for any array for which this case will be reached. */
4621 /* Don't forget the const or volatile flag from the array
4623 tree variant_type = build_type_variant (type,
4624 TREE_READONLY (exp),
4625 TREE_THIS_VOLATILE (exp));
4626 tree array_adr = build1 (ADDR_EXPR,
4627 build_pointer_type (variant_type), array);
4629 tree size = size_in_bytes (type);
4631 /* Convert the integer argument to a type the same size as a
4632 pointer so the multiply won't overflow spuriously. */
4633 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4634 index = convert (type_for_size (POINTER_SIZE, 0), index);
4636 if (TREE_CODE (size) != INTEGER_CST
4637 && contains_placeholder_p (size))
4638 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4640 /* Don't think the address has side effects
4641 just because the array does.
4642 (In some cases the address might have side effects,
4643 and we fail to record that fact here. However, it should not
4644 matter, since expand_expr should not care.) */
4645 TREE_SIDE_EFFECTS (array_adr) = 0;
4647 elt = build1 (INDIRECT_REF, type,
4648 fold (build (PLUS_EXPR,
4649 TYPE_POINTER_TO (variant_type),
4651 fold (build (MULT_EXPR,
4652 TYPE_POINTER_TO (variant_type),
4655 /* Volatility, etc., of new expression is same as old
4657 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4658 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4659 TREE_READONLY (elt) = TREE_READONLY (exp);
4661 return expand_expr (elt, target, tmode, modifier);
4664 /* Fold an expression like: "foo"[2].
4665 This is not done in fold so it won't happen inside &.
4666 Don't fold if this is for wide characters since it's too
4667 difficult to do correctly and this is a very rare case. */
4669 if (TREE_CODE (array) == STRING_CST
4670 && TREE_CODE (index) == INTEGER_CST
4671 && !TREE_INT_CST_HIGH (index)
4672 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4673 && GET_MODE_CLASS (mode) == MODE_INT
4674 && GET_MODE_SIZE (mode) == 1)
4675 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4677 /* If this is a constant index into a constant array,
4678 just get the value from the array. Handle both the cases when
4679 we have an explicit constructor and when our operand is a variable
4680 that was declared const. */
4682 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4684 if (TREE_CODE (index) == INTEGER_CST
4685 && TREE_INT_CST_HIGH (index) == 0)
4687 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4689 i = TREE_INT_CST_LOW (index);
4691 elem = TREE_CHAIN (elem);
4693 return expand_expr (fold (TREE_VALUE (elem)), target,
4698 else if (optimize >= 1
4699 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4700 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4701 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4703 if (TREE_CODE (index) == INTEGER_CST
4704 && TREE_INT_CST_HIGH (index) == 0)
4706 tree init = DECL_INITIAL (array);
4708 i = TREE_INT_CST_LOW (index);
4709 if (TREE_CODE (init) == CONSTRUCTOR)
4711 tree elem = CONSTRUCTOR_ELTS (init);
4714 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4715 elem = TREE_CHAIN (elem);
4717 return expand_expr (fold (TREE_VALUE (elem)), target,
4720 else if (TREE_CODE (init) == STRING_CST
4721 && i < TREE_STRING_LENGTH (init))
4722 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4727 /* Treat array-ref with constant index as a component-ref. */
4731 /* If the operand is a CONSTRUCTOR, we can just extract the
4732 appropriate field if it is present. Don't do this if we have
4733 already written the data since we want to refer to that copy
4734 and varasm.c assumes that's what we'll do. */
4735 if (code != ARRAY_REF
4736 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4737 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4741 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4742 elt = TREE_CHAIN (elt))
4743 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4744 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4748 enum machine_mode mode1;
4753 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4754 &mode1, &unsignedp, &volatilep);
4757 /* If we got back the original object, something is wrong. Perhaps
4758 we are evaluating an expression too early. In any event, don't
4759 infinitely recurse. */
4763 /* In some cases, we will be offsetting OP0's address by a constant.
4764 So get it as a sum, if possible. If we will be using it
4765 directly in an insn, we validate it. */
4766 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4768 /* If this is a constant, put it into a register if it is a
4769 legitimate constant and memory if it isn't. */
4770 if (CONSTANT_P (op0))
4772 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4773 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4774 op0 = force_reg (mode, op0);
4776 op0 = validize_mem (force_const_mem (mode, op0));
4779 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4782 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4784 if (GET_CODE (op0) != MEM)
4786 op0 = change_address (op0, VOIDmode,
4787 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4788 force_reg (ptr_mode, offset_rtx)));
4789 /* If we have a variable offset, the known alignment
4790 is only that of the innermost structure containing the field.
4791 (Actually, we could sometimes do better by using the
4792 size of an element of the innermost array, but no need.) */
4793 if (TREE_CODE (exp) == COMPONENT_REF
4794 || TREE_CODE (exp) == BIT_FIELD_REF)
4795 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4799 /* Don't forget about volatility even if this is a bitfield. */
4800 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4802 op0 = copy_rtx (op0);
4803 MEM_VOLATILE_P (op0) = 1;
4806 /* In cases where an aligned union has an unaligned object
4807 as a field, we might be extracting a BLKmode value from
4808 an integer-mode (e.g., SImode) object. Handle this case
4809 by doing the extract into an object as wide as the field
4810 (which we know to be the width of a basic mode), then
4811 storing into memory, and changing the mode to BLKmode. */
4812 if (mode1 == VOIDmode
4813 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4814 || (modifier != EXPAND_CONST_ADDRESS
4815 && modifier != EXPAND_SUM
4816 && modifier != EXPAND_INITIALIZER
4817 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4818 /* If the field isn't aligned enough to fetch as a memref,
4819 fetch it as a bit field. */
4820 || (SLOW_UNALIGNED_ACCESS
4821 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4822 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4824 enum machine_mode ext_mode = mode;
4826 if (ext_mode == BLKmode)
4827 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4829 if (ext_mode == BLKmode)
4832 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4833 unsignedp, target, ext_mode, ext_mode,
4835 int_size_in_bytes (TREE_TYPE (tem)));
4836 if (mode == BLKmode)
4838 rtx new = assign_stack_temp (ext_mode,
4839 bitsize / BITS_PER_UNIT, 0);
4841 emit_move_insn (new, op0);
4842 op0 = copy_rtx (new);
4843 PUT_MODE (op0, BLKmode);
4844 MEM_IN_STRUCT_P (op0) = 1;
4850 /* Get a reference to just this component. */
4851 if (modifier == EXPAND_CONST_ADDRESS
4852 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4853 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4854 (bitpos / BITS_PER_UNIT)));
4856 op0 = change_address (op0, mode1,
4857 plus_constant (XEXP (op0, 0),
4858 (bitpos / BITS_PER_UNIT)));
4859 MEM_IN_STRUCT_P (op0) = 1;
4860 MEM_VOLATILE_P (op0) |= volatilep;
4861 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4864 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4865 convert_move (target, op0, unsignedp);
4871 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4872 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4873 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4874 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4875 MEM_IN_STRUCT_P (temp) = 1;
4876 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4877 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4878 a location is accessed through a pointer to const does not mean
4879 that the value there can never change. */
4880 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4885 /* Intended for a reference to a buffer of a file-object in Pascal.
4886 But it's not certain that a special tree code will really be
4887 necessary for these. INDIRECT_REF might work for them. */
4893 /* Pascal set IN expression.
4896 rlo = set_low - (set_low%bits_per_word);
4897 the_word = set [ (index - rlo)/bits_per_word ];
4898 bit_index = index % bits_per_word;
4899 bitmask = 1 << bit_index;
4900 return !!(the_word & bitmask); */
4902 tree set = TREE_OPERAND (exp, 0);
4903 tree index = TREE_OPERAND (exp, 1);
4904 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4905 tree set_type = TREE_TYPE (set);
4906 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4907 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4908 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4909 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4910 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4911 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4912 rtx setaddr = XEXP (setval, 0);
4913 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4915 rtx diff, quo, rem, addr, bit, result;
4917 preexpand_calls (exp);
4919 /* If domain is empty, answer is no. Likewise if index is constant
4920 and out of bounds. */
4921 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4922 && TREE_CODE (set_low_bound) == INTEGER_CST
4923 && tree_int_cst_lt (set_high_bound, set_low_bound)
4924 || (TREE_CODE (index) == INTEGER_CST
4925 && TREE_CODE (set_low_bound) == INTEGER_CST
4926 && tree_int_cst_lt (index, set_low_bound))
4927 || (TREE_CODE (set_high_bound) == INTEGER_CST
4928 && TREE_CODE (index) == INTEGER_CST
4929 && tree_int_cst_lt (set_high_bound, index))))
4933 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4935 /* If we get here, we have to generate the code for both cases
4936 (in range and out of range). */
4938 op0 = gen_label_rtx ();
4939 op1 = gen_label_rtx ();
4941 if (! (GET_CODE (index_val) == CONST_INT
4942 && GET_CODE (lo_r) == CONST_INT))
4944 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4945 GET_MODE (index_val), iunsignedp, 0);
4946 emit_jump_insn (gen_blt (op1));
4949 if (! (GET_CODE (index_val) == CONST_INT
4950 && GET_CODE (hi_r) == CONST_INT))
4952 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4953 GET_MODE (index_val), iunsignedp, 0);
4954 emit_jump_insn (gen_bgt (op1));
4957 /* Calculate the element number of bit zero in the first word
4959 if (GET_CODE (lo_r) == CONST_INT)
4960 rlow = GEN_INT (INTVAL (lo_r)
4961 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4963 rlow = expand_binop (index_mode, and_optab, lo_r,
4964 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4965 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4967 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4968 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4970 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4971 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4972 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4973 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4975 addr = memory_address (byte_mode,
4976 expand_binop (index_mode, add_optab, diff,
4977 setaddr, NULL_RTX, iunsignedp,
4980 /* Extract the bit we want to examine */
4981 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4982 gen_rtx (MEM, byte_mode, addr),
4983 make_tree (TREE_TYPE (index), rem),
4985 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4986 GET_MODE (target) == byte_mode ? target : 0,
4987 1, OPTAB_LIB_WIDEN);
4989 if (result != target)
4990 convert_move (target, result, 1);
4992 /* Output the code to handle the out-of-range case. */
4995 emit_move_insn (target, const0_rtx);
5000 case WITH_CLEANUP_EXPR:
5001 if (RTL_EXPR_RTL (exp) == 0)
5004 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5006 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5007 /* That's it for this cleanup. */
5008 TREE_OPERAND (exp, 2) = 0;
5009 (*interim_eh_hook) (NULL_TREE);
5011 return RTL_EXPR_RTL (exp);
5013 case CLEANUP_POINT_EXPR:
5015 extern int temp_slot_level;
5016 tree old_cleanups = cleanups_this_call;
5017 int old_temp_level = target_temp_slot_level;
5019 target_temp_slot_level = temp_slot_level;
5020 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5021 /* If we're going to use this value, load it up now. */
5023 op0 = force_not_mem (op0);
5024 expand_cleanups_to (old_cleanups);
5025 preserve_temp_slots (op0);
5028 target_temp_slot_level = old_temp_level;
5033 /* Check for a built-in function. */
5034 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5035 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5037 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5038 return expand_builtin (exp, target, subtarget, tmode, ignore);
5040 /* If this call was expanded already by preexpand_calls,
5041 just return the result we got. */
5042 if (CALL_EXPR_RTL (exp) != 0)
5043 return CALL_EXPR_RTL (exp);
5045 return expand_call (exp, target, ignore);
5047 case NON_LVALUE_EXPR:
5050 case REFERENCE_EXPR:
5051 if (TREE_CODE (type) == UNION_TYPE)
5053 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5056 if (mode == BLKmode)
5058 if (TYPE_SIZE (type) == 0
5059 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5061 target = assign_stack_temp (BLKmode,
5062 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5063 + BITS_PER_UNIT - 1)
5064 / BITS_PER_UNIT, 0);
5065 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5068 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5071 if (GET_CODE (target) == MEM)
5072 /* Store data into beginning of memory target. */
5073 store_expr (TREE_OPERAND (exp, 0),
5074 change_address (target, TYPE_MODE (valtype), 0), 0);
5076 else if (GET_CODE (target) == REG)
5077 /* Store this field into a union of the proper type. */
5078 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5079 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5081 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5085 /* Return the entire union. */
5089 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5091 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5094 /* If the signedness of the conversion differs and OP0 is
5095 a promoted SUBREG, clear that indication since we now
5096 have to do the proper extension. */
5097 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5098 && GET_CODE (op0) == SUBREG)
5099 SUBREG_PROMOTED_VAR_P (op0) = 0;
5104 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5105 if (GET_MODE (op0) == mode)
5108 /* If OP0 is a constant, just convert it into the proper mode. */
5109 if (CONSTANT_P (op0))
5111 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5112 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5114 if (modifier == EXPAND_INITIALIZER)
5115 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5117 if (flag_force_mem && GET_CODE (op0) == MEM)
5118 op0 = copy_to_reg (op0);
5122 convert_to_mode (mode, op0,
5123 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5125 convert_move (target, op0,
5126 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5130 /* We come here from MINUS_EXPR when the second operand is a constant. */
5132 this_optab = add_optab;
5134 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5135 something else, make sure we add the register to the constant and
5136 then to the other thing. This case can occur during strength
5137 reduction and doing it this way will produce better code if the
5138 frame pointer or argument pointer is eliminated.
5140 fold-const.c will ensure that the constant is always in the inner
5141 PLUS_EXPR, so the only case we need to do anything about is if
5142 sp, ap, or fp is our second argument, in which case we must swap
5143 the innermost first argument and our second argument. */
5145 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5146 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5147 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5148 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5149 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5150 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5152 tree t = TREE_OPERAND (exp, 1);
5154 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5155 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5158 /* If the result is to be ptr_mode and we are adding an integer to
5159 something, we might be forming a constant. So try to use
5160 plus_constant. If it produces a sum and we can't accept it,
5161 use force_operand. This allows P = &ARR[const] to generate
5162 efficient code on machines where a SYMBOL_REF is not a valid
5165 If this is an EXPAND_SUM call, always return the sum. */
5166 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5167 || mode == ptr_mode)
5169 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5170 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5171 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5173 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5175 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5176 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5177 op1 = force_operand (op1, target);
5181 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5182 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5183 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5187 if (! CONSTANT_P (op0))
5189 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5190 VOIDmode, modifier);
5191 /* Don't go to both_summands if modifier
5192 says it's not right to return a PLUS. */
5193 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5197 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5198 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5199 op0 = force_operand (op0, target);
5204 /* No sense saving up arithmetic to be done
5205 if it's all in the wrong mode to form part of an address.
5206 And force_operand won't know whether to sign-extend or
5208 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5209 || mode != ptr_mode)
5212 preexpand_calls (exp);
5213 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5217 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5220 /* Make sure any term that's a sum with a constant comes last. */
5221 if (GET_CODE (op0) == PLUS
5222 && CONSTANT_P (XEXP (op0, 1)))
5228 /* If adding to a sum including a constant,
5229 associate it to put the constant outside. */
5230 if (GET_CODE (op1) == PLUS
5231 && CONSTANT_P (XEXP (op1, 1)))
5233 rtx constant_term = const0_rtx;
5235 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5238 /* Ensure that MULT comes first if there is one. */
5239 else if (GET_CODE (op0) == MULT)
5240 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5242 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5244 /* Let's also eliminate constants from op0 if possible. */
5245 op0 = eliminate_constant_term (op0, &constant_term);
5247 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5248 their sum should be a constant. Form it into OP1, since the
5249 result we want will then be OP0 + OP1. */
5251 temp = simplify_binary_operation (PLUS, mode, constant_term,
5256 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5259 /* Put a constant term last and put a multiplication first. */
5260 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5261 temp = op1, op1 = op0, op0 = temp;
5263 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5264 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5267 /* For initializers, we are allowed to return a MINUS of two
5268 symbolic constants. Here we handle all cases when both operands
5270 /* Handle difference of two symbolic constants,
5271 for the sake of an initializer. */
5272 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5273 && really_constant_p (TREE_OPERAND (exp, 0))
5274 && really_constant_p (TREE_OPERAND (exp, 1)))
5276 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5277 VOIDmode, modifier);
5278 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5279 VOIDmode, modifier);
5281 /* If the last operand is a CONST_INT, use plus_constant of
5282 the negated constant. Else make the MINUS. */
5283 if (GET_CODE (op1) == CONST_INT)
5284 return plus_constant (op0, - INTVAL (op1));
5286 return gen_rtx (MINUS, mode, op0, op1);
5288 /* Convert A - const to A + (-const). */
5289 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5291 tree negated = fold (build1 (NEGATE_EXPR, type,
5292 TREE_OPERAND (exp, 1)));
5294 /* Deal with the case where we can't negate the constant
5296 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5298 tree newtype = signed_type (type);
5299 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5300 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5301 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5303 if (! TREE_OVERFLOW (newneg))
5304 return expand_expr (convert (type,
5305 build (PLUS_EXPR, newtype,
5307 target, tmode, modifier);
5311 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5315 this_optab = sub_optab;
5319 preexpand_calls (exp);
5320 /* If first operand is constant, swap them.
5321 Thus the following special case checks need only
5322 check the second operand. */
5323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5325 register tree t1 = TREE_OPERAND (exp, 0);
5326 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5327 TREE_OPERAND (exp, 1) = t1;
5330 /* Attempt to return something suitable for generating an
5331 indexed address, for machines that support that. */
5333 if (modifier == EXPAND_SUM && mode == ptr_mode
5334 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5335 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5337 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5339 /* Apply distributive law if OP0 is x+c. */
5340 if (GET_CODE (op0) == PLUS
5341 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5342 return gen_rtx (PLUS, mode,
5343 gen_rtx (MULT, mode, XEXP (op0, 0),
5344 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5345 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5346 * INTVAL (XEXP (op0, 1))));
5348 if (GET_CODE (op0) != REG)
5349 op0 = force_operand (op0, NULL_RTX);
5350 if (GET_CODE (op0) != REG)
5351 op0 = copy_to_mode_reg (mode, op0);
5353 return gen_rtx (MULT, mode, op0,
5354 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5357 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5360 /* Check for multiplying things that have been extended
5361 from a narrower type. If this machine supports multiplying
5362 in that narrower type with a result in the desired type,
5363 do it that way, and avoid the explicit type-conversion. */
5364 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5365 && TREE_CODE (type) == INTEGER_TYPE
5366 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5367 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5368 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5369 && int_fits_type_p (TREE_OPERAND (exp, 1),
5370 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5371 /* Don't use a widening multiply if a shift will do. */
5372 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5373 > HOST_BITS_PER_WIDE_INT)
5374 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5376 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5379 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5380 /* If both operands are extended, they must either both
5381 be zero-extended or both be sign-extended. */
5382 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5384 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5386 enum machine_mode innermode
5387 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5388 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5389 ? umul_widen_optab : smul_widen_optab);
5390 if (mode == GET_MODE_WIDER_MODE (innermode)
5391 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5393 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5394 NULL_RTX, VOIDmode, 0);
5395 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5399 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5400 NULL_RTX, VOIDmode, 0);
5404 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5405 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5406 return expand_mult (mode, op0, op1, target, unsignedp);
5408 case TRUNC_DIV_EXPR:
5409 case FLOOR_DIV_EXPR:
5411 case ROUND_DIV_EXPR:
5412 case EXACT_DIV_EXPR:
5413 preexpand_calls (exp);
5414 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5416 /* Possible optimization: compute the dividend with EXPAND_SUM
5417 then if the divisor is constant can optimize the case
5418 where some terms of the dividend have coeffs divisible by it. */
5419 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5420 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5421 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5424 this_optab = flodiv_optab;
5427 case TRUNC_MOD_EXPR:
5428 case FLOOR_MOD_EXPR:
5430 case ROUND_MOD_EXPR:
5431 preexpand_calls (exp);
5432 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5434 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5435 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5436 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5438 case FIX_ROUND_EXPR:
5439 case FIX_FLOOR_EXPR:
5441 abort (); /* Not used for C. */
5443 case FIX_TRUNC_EXPR:
5444 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5446 target = gen_reg_rtx (mode);
5447 expand_fix (target, op0, unsignedp);
5451 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5453 target = gen_reg_rtx (mode);
5454 /* expand_float can't figure out what to do if FROM has VOIDmode.
5455 So give it the correct mode. With -O, cse will optimize this. */
5456 if (GET_MODE (op0) == VOIDmode)
5457 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5459 expand_float (target, op0,
5460 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5465 temp = expand_unop (mode, neg_optab, op0, target, 0);
5471 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5473 /* Handle complex values specially. */
5474 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5475 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5476 return expand_complex_abs (mode, op0, target, unsignedp);
5478 /* Unsigned abs is simply the operand. Testing here means we don't
5479 risk generating incorrect code below. */
5480 if (TREE_UNSIGNED (type))
5483 return expand_abs (mode, op0, target, unsignedp,
5484 safe_from_p (target, TREE_OPERAND (exp, 0)));
5488 target = original_target;
5489 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5490 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5491 || GET_MODE (target) != mode
5492 || (GET_CODE (target) == REG
5493 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5494 target = gen_reg_rtx (mode);
5495 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5496 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5498 /* First try to do it with a special MIN or MAX instruction.
5499 If that does not win, use a conditional jump to select the proper
5501 this_optab = (TREE_UNSIGNED (type)
5502 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5503 : (code == MIN_EXPR ? smin_optab : smax_optab));
5505 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5510 /* At this point, a MEM target is no longer useful; we will get better
5513 if (GET_CODE (target) == MEM)
5514 target = gen_reg_rtx (mode);
5517 emit_move_insn (target, op0);
5519 op0 = gen_label_rtx ();
5521 /* If this mode is an integer too wide to compare properly,
5522 compare word by word. Rely on cse to optimize constant cases. */
5523 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5525 if (code == MAX_EXPR)
5526 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5527 target, op1, NULL_RTX, op0);
5529 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5530 op1, target, NULL_RTX, op0);
5531 emit_move_insn (target, op1);
5535 if (code == MAX_EXPR)
5536 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5537 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5538 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5540 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5541 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5542 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5543 if (temp == const0_rtx)
5544 emit_move_insn (target, op1);
5545 else if (temp != const_true_rtx)
5547 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5548 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5551 emit_move_insn (target, op1);
5558 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5559 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5565 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5566 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5571 /* ??? Can optimize bitwise operations with one arg constant.
5572 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5573 and (a bitwise1 b) bitwise2 b (etc)
5574 but that is probably not worth while. */
5576 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5577 boolean values when we want in all cases to compute both of them. In
5578 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5579 as actual zero-or-1 values and then bitwise anding. In cases where
5580 there cannot be any side effects, better code would be made by
5581 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5582 how to recognize those cases. */
5584 case TRUTH_AND_EXPR:
5586 this_optab = and_optab;
5591 this_optab = ior_optab;
5594 case TRUTH_XOR_EXPR:
5596 this_optab = xor_optab;
5603 preexpand_calls (exp);
5604 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5606 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5607 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5610 /* Could determine the answer when only additive constants differ. Also,
5611 the addition of one can be handled by changing the condition. */
5618 preexpand_calls (exp);
5619 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5623 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5624 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5626 && GET_CODE (original_target) == REG
5627 && (GET_MODE (original_target)
5628 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5630 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5633 if (temp != original_target)
5634 temp = copy_to_reg (temp);
5636 op1 = gen_label_rtx ();
5637 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5638 GET_MODE (temp), unsignedp, 0);
5639 emit_jump_insn (gen_beq (op1));
5640 emit_move_insn (temp, const1_rtx);
5645 /* If no set-flag instruction, must generate a conditional
5646 store into a temporary variable. Drop through
5647 and handle this like && and ||. */
5649 case TRUTH_ANDIF_EXPR:
5650 case TRUTH_ORIF_EXPR:
5652 && (target == 0 || ! safe_from_p (target, exp)
5653 /* Make sure we don't have a hard reg (such as function's return
5654 value) live across basic blocks, if not optimizing. */
5655 || (!optimize && GET_CODE (target) == REG
5656 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5657 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5660 emit_clr_insn (target);
5662 op1 = gen_label_rtx ();
5663 jumpifnot (exp, op1);
5666 emit_0_to_1_insn (target);
5669 return ignore ? const0_rtx : target;
5671 case TRUTH_NOT_EXPR:
5672 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5673 /* The parser is careful to generate TRUTH_NOT_EXPR
5674 only with operands that are always zero or one. */
5675 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5676 target, 1, OPTAB_LIB_WIDEN);
5682 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5684 return expand_expr (TREE_OPERAND (exp, 1),
5685 (ignore ? const0_rtx : target),
5690 rtx flag = NULL_RTX;
5691 tree left_cleanups = NULL_TREE;
5692 tree right_cleanups = NULL_TREE;
5694 /* Used to save a pointer to the place to put the setting of
5695 the flag that indicates if this side of the conditional was
5696 taken. We backpatch the code, if we find out later that we
5697 have any conditional cleanups that need to be performed. */
5698 rtx dest_right_flag = NULL_RTX;
5699 rtx dest_left_flag = NULL_RTX;
5701 /* Note that COND_EXPRs whose type is a structure or union
5702 are required to be constructed to contain assignments of
5703 a temporary variable, so that we can evaluate them here
5704 for side effect only. If type is void, we must do likewise. */
5706 /* If an arm of the branch requires a cleanup,
5707 only that cleanup is performed. */
5710 tree binary_op = 0, unary_op = 0;
5711 tree old_cleanups = cleanups_this_call;
5713 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5714 convert it to our mode, if necessary. */
5715 if (integer_onep (TREE_OPERAND (exp, 1))
5716 && integer_zerop (TREE_OPERAND (exp, 2))
5717 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5721 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5726 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5727 if (GET_MODE (op0) == mode)
5731 target = gen_reg_rtx (mode);
5732 convert_move (target, op0, unsignedp);
5736 /* If we are not to produce a result, we have no target. Otherwise,
5737 if a target was specified use it; it will not be used as an
5738 intermediate target unless it is safe. If no target, use a
5743 else if (original_target
5744 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5745 && GET_MODE (original_target) == mode
5746 && ! (GET_CODE (original_target) == MEM
5747 && MEM_VOLATILE_P (original_target)))
5748 temp = original_target;
5749 else if (mode == BLKmode)
5751 if (TYPE_SIZE (type) == 0
5752 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5755 temp = assign_stack_temp (BLKmode,
5756 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5757 + BITS_PER_UNIT - 1)
5758 / BITS_PER_UNIT, 0);
5759 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5762 temp = gen_reg_rtx (mode);
5764 /* Check for X ? A + B : A. If we have this, we can copy
5765 A to the output and conditionally add B. Similarly for unary
5766 operations. Don't do this if X has side-effects because
5767 those side effects might affect A or B and the "?" operation is
5768 a sequence point in ANSI. (We test for side effects later.) */
5770 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5771 && operand_equal_p (TREE_OPERAND (exp, 2),
5772 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5773 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5774 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5775 && operand_equal_p (TREE_OPERAND (exp, 1),
5776 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5777 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5778 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5779 && operand_equal_p (TREE_OPERAND (exp, 2),
5780 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5781 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5782 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5783 && operand_equal_p (TREE_OPERAND (exp, 1),
5784 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5785 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5787 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5788 operation, do this as A + (X != 0). Similarly for other simple
5789 binary operators. */
5790 if (temp && singleton && binary_op
5791 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5792 && (TREE_CODE (binary_op) == PLUS_EXPR
5793 || TREE_CODE (binary_op) == MINUS_EXPR
5794 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5795 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5796 && integer_onep (TREE_OPERAND (binary_op, 1))
5797 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5800 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5801 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5802 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5805 /* If we had X ? A : A + 1, do this as A + (X == 0).
5807 We have to invert the truth value here and then put it
5808 back later if do_store_flag fails. We cannot simply copy
5809 TREE_OPERAND (exp, 0) to another variable and modify that
5810 because invert_truthvalue can modify the tree pointed to
5812 if (singleton == TREE_OPERAND (exp, 1))
5813 TREE_OPERAND (exp, 0)
5814 = invert_truthvalue (TREE_OPERAND (exp, 0));
5816 result = do_store_flag (TREE_OPERAND (exp, 0),
5817 (safe_from_p (temp, singleton)
5819 mode, BRANCH_COST <= 1);
5823 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5824 return expand_binop (mode, boptab, op1, result, temp,
5825 unsignedp, OPTAB_LIB_WIDEN);
5827 else if (singleton == TREE_OPERAND (exp, 1))
5828 TREE_OPERAND (exp, 0)
5829 = invert_truthvalue (TREE_OPERAND (exp, 0));
5832 do_pending_stack_adjust ();
5834 op0 = gen_label_rtx ();
5836 flag = gen_reg_rtx (word_mode);
5837 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5841 /* If the target conflicts with the other operand of the
5842 binary op, we can't use it. Also, we can't use the target
5843 if it is a hard register, because evaluating the condition
5844 might clobber it. */
5846 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5847 || (GET_CODE (temp) == REG
5848 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5849 temp = gen_reg_rtx (mode);
5850 store_expr (singleton, temp, 0);
5853 expand_expr (singleton,
5854 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5855 dest_left_flag = get_last_insn ();
5856 if (singleton == TREE_OPERAND (exp, 1))
5857 jumpif (TREE_OPERAND (exp, 0), op0);
5859 jumpifnot (TREE_OPERAND (exp, 0), op0);
5861 /* Allows cleanups up to here. */
5862 old_cleanups = cleanups_this_call;
5863 if (binary_op && temp == 0)
5864 /* Just touch the other operand. */
5865 expand_expr (TREE_OPERAND (binary_op, 1),
5866 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5868 store_expr (build (TREE_CODE (binary_op), type,
5869 make_tree (type, temp),
5870 TREE_OPERAND (binary_op, 1)),
5873 store_expr (build1 (TREE_CODE (unary_op), type,
5874 make_tree (type, temp)),
5877 dest_right_flag = get_last_insn ();
5880 /* This is now done in jump.c and is better done there because it
5881 produces shorter register lifetimes. */
5883 /* Check for both possibilities either constants or variables
5884 in registers (but not the same as the target!). If so, can
5885 save branches by assigning one, branching, and assigning the
5887 else if (temp && GET_MODE (temp) != BLKmode
5888 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5889 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5890 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5891 && DECL_RTL (TREE_OPERAND (exp, 1))
5892 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5893 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5894 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5895 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5896 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5897 && DECL_RTL (TREE_OPERAND (exp, 2))
5898 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5899 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5901 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5902 temp = gen_reg_rtx (mode);
5903 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5904 dest_left_flag = get_last_insn ();
5905 jumpifnot (TREE_OPERAND (exp, 0), op0);
5907 /* Allows cleanups up to here. */
5908 old_cleanups = cleanups_this_call;
5909 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5911 dest_right_flag = get_last_insn ();
5914 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5915 comparison operator. If we have one of these cases, set the
5916 output to A, branch on A (cse will merge these two references),
5917 then set the output to FOO. */
5919 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5920 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5921 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5922 TREE_OPERAND (exp, 1), 0)
5923 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5924 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5926 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5927 temp = gen_reg_rtx (mode);
5928 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5929 dest_left_flag = get_last_insn ();
5930 jumpif (TREE_OPERAND (exp, 0), op0);
5932 /* Allows cleanups up to here. */
5933 old_cleanups = cleanups_this_call;
5934 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5936 dest_right_flag = get_last_insn ();
5939 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5940 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5941 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5942 TREE_OPERAND (exp, 2), 0)
5943 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5944 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5946 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5947 temp = gen_reg_rtx (mode);
5948 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5949 dest_left_flag = get_last_insn ();
5950 jumpifnot (TREE_OPERAND (exp, 0), op0);
5952 /* Allows cleanups up to here. */
5953 old_cleanups = cleanups_this_call;
5954 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5956 dest_right_flag = get_last_insn ();
5960 op1 = gen_label_rtx ();
5961 jumpifnot (TREE_OPERAND (exp, 0), op0);
5963 /* Allows cleanups up to here. */
5964 old_cleanups = cleanups_this_call;
5966 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5968 expand_expr (TREE_OPERAND (exp, 1),
5969 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5970 dest_left_flag = get_last_insn ();
5972 /* Handle conditional cleanups, if any. */
5973 left_cleanups = defer_cleanups_to (old_cleanups);
5976 emit_jump_insn (gen_jump (op1));
5980 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5982 expand_expr (TREE_OPERAND (exp, 2),
5983 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5984 dest_right_flag = get_last_insn ();
5987 /* Handle conditional cleanups, if any. */
5988 right_cleanups = defer_cleanups_to (old_cleanups);
5994 /* Add back in, any conditional cleanups. */
5995 if (left_cleanups || right_cleanups)
6001 /* Now that we know that a flag is needed, go back and add in the
6002 setting of the flag. */
6004 /* Do the left side flag. */
6005 last = get_last_insn ();
6006 /* Flag left cleanups as needed. */
6007 emit_move_insn (flag, const1_rtx);
6008 /* ??? deprecated, use sequences instead. */
6009 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6011 /* Do the right side flag. */
6012 last = get_last_insn ();
6013 /* Flag left cleanups as needed. */
6014 emit_move_insn (flag, const0_rtx);
6015 /* ??? deprecated, use sequences instead. */
6016 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6018 /* convert flag, which is an rtx, into a tree. */
6019 cond = make_node (RTL_EXPR);
6020 TREE_TYPE (cond) = integer_type_node;
6021 RTL_EXPR_RTL (cond) = flag;
6022 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6023 cond = save_expr (cond);
6025 if (! left_cleanups)
6026 left_cleanups = integer_zero_node;
6027 if (! right_cleanups)
6028 right_cleanups = integer_zero_node;
6029 new_cleanups = build (COND_EXPR, void_type_node,
6030 truthvalue_conversion (cond),
6031 left_cleanups, right_cleanups);
6032 new_cleanups = fold (new_cleanups);
6034 /* Now add in the conditionalized cleanups. */
6036 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6037 (*interim_eh_hook) (NULL_TREE);
6044 int need_exception_region = 0;
6045 /* Something needs to be initialized, but we didn't know
6046 where that thing was when building the tree. For example,
6047 it could be the return value of a function, or a parameter
6048 to a function which lays down in the stack, or a temporary
6049 variable which must be passed by reference.
6051 We guarantee that the expression will either be constructed
6052 or copied into our original target. */
6054 tree slot = TREE_OPERAND (exp, 0);
6058 if (TREE_CODE (slot) != VAR_DECL)
6062 target = original_target;
6066 if (DECL_RTL (slot) != 0)
6068 target = DECL_RTL (slot);
6069 /* If we have already expanded the slot, so don't do
6071 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6076 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6077 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6078 /* All temp slots at this level must not conflict. */
6079 preserve_temp_slots (target);
6080 DECL_RTL (slot) = target;
6082 /* Since SLOT is not known to the called function
6083 to belong to its stack frame, we must build an explicit
6084 cleanup. This case occurs when we must build up a reference
6085 to pass the reference as an argument. In this case,
6086 it is very likely that such a reference need not be
6089 if (TREE_OPERAND (exp, 2) == 0)
6090 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6091 if (TREE_OPERAND (exp, 2))
6093 cleanups_this_call = tree_cons (NULL_TREE,
6094 TREE_OPERAND (exp, 2),
6095 cleanups_this_call);
6096 need_exception_region = 1;
6102 /* This case does occur, when expanding a parameter which
6103 needs to be constructed on the stack. The target
6104 is the actual stack address that we want to initialize.
6105 The function we call will perform the cleanup in this case. */
6107 /* If we have already assigned it space, use that space,
6108 not target that we were passed in, as our target
6109 parameter is only a hint. */
6110 if (DECL_RTL (slot) != 0)
6112 target = DECL_RTL (slot);
6113 /* If we have already expanded the slot, so don't do
6115 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6119 DECL_RTL (slot) = target;
6122 exp1 = TREE_OPERAND (exp, 1);
6123 /* Mark it as expanded. */
6124 TREE_OPERAND (exp, 1) = NULL_TREE;
6126 temp = expand_expr (exp1, target, tmode, modifier);
6128 if (need_exception_region)
6129 (*interim_eh_hook) (NULL_TREE);
6136 tree lhs = TREE_OPERAND (exp, 0);
6137 tree rhs = TREE_OPERAND (exp, 1);
6138 tree noncopied_parts = 0;
6139 tree lhs_type = TREE_TYPE (lhs);
6141 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6142 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6143 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6144 TYPE_NONCOPIED_PARTS (lhs_type));
6145 while (noncopied_parts != 0)
6147 expand_assignment (TREE_VALUE (noncopied_parts),
6148 TREE_PURPOSE (noncopied_parts), 0, 0);
6149 noncopied_parts = TREE_CHAIN (noncopied_parts);
6156 /* If lhs is complex, expand calls in rhs before computing it.
6157 That's so we don't compute a pointer and save it over a call.
6158 If lhs is simple, compute it first so we can give it as a
6159 target if the rhs is just a call. This avoids an extra temp and copy
6160 and that prevents a partial-subsumption which makes bad code.
6161 Actually we could treat component_ref's of vars like vars. */
6163 tree lhs = TREE_OPERAND (exp, 0);
6164 tree rhs = TREE_OPERAND (exp, 1);
6165 tree noncopied_parts = 0;
6166 tree lhs_type = TREE_TYPE (lhs);
6170 if (TREE_CODE (lhs) != VAR_DECL
6171 && TREE_CODE (lhs) != RESULT_DECL
6172 && TREE_CODE (lhs) != PARM_DECL)
6173 preexpand_calls (exp);
6175 /* Check for |= or &= of a bitfield of size one into another bitfield
6176 of size 1. In this case, (unless we need the result of the
6177 assignment) we can do this more efficiently with a
6178 test followed by an assignment, if necessary.
6180 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6181 things change so we do, this code should be enhanced to
6184 && TREE_CODE (lhs) == COMPONENT_REF
6185 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6186 || TREE_CODE (rhs) == BIT_AND_EXPR)
6187 && TREE_OPERAND (rhs, 0) == lhs
6188 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6189 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6190 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6192 rtx label = gen_label_rtx ();
6194 do_jump (TREE_OPERAND (rhs, 1),
6195 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6196 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6197 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6198 (TREE_CODE (rhs) == BIT_IOR_EXPR
6200 : integer_zero_node)),
6202 do_pending_stack_adjust ();
6207 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6208 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6209 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6210 TYPE_NONCOPIED_PARTS (lhs_type));
6212 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6213 while (noncopied_parts != 0)
6215 expand_assignment (TREE_PURPOSE (noncopied_parts),
6216 TREE_VALUE (noncopied_parts), 0, 0);
6217 noncopied_parts = TREE_CHAIN (noncopied_parts);
6222 case PREINCREMENT_EXPR:
6223 case PREDECREMENT_EXPR:
6224 return expand_increment (exp, 0);
6226 case POSTINCREMENT_EXPR:
6227 case POSTDECREMENT_EXPR:
6228 /* Faster to treat as pre-increment if result is not used. */
6229 return expand_increment (exp, ! ignore);
6232 /* If nonzero, TEMP will be set to the address of something that might
6233 be a MEM corresponding to a stack slot. */
6236 /* Are we taking the address of a nested function? */
6237 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6238 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6240 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6241 op0 = force_operand (op0, target);
6243 /* If we are taking the address of something erroneous, just
6245 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6249 /* We make sure to pass const0_rtx down if we came in with
6250 ignore set, to avoid doing the cleanups twice for something. */
6251 op0 = expand_expr (TREE_OPERAND (exp, 0),
6252 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6253 (modifier == EXPAND_INITIALIZER
6254 ? modifier : EXPAND_CONST_ADDRESS));
6256 /* If we are going to ignore the result, OP0 will have been set
6257 to const0_rtx, so just return it. Don't get confused and
6258 think we are taking the address of the constant. */
6262 /* We would like the object in memory. If it is a constant,
6263 we can have it be statically allocated into memory. For
6264 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6265 memory and store the value into it. */
6267 if (CONSTANT_P (op0))
6268 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6270 else if (GET_CODE (op0) == MEM)
6272 mark_temp_addr_taken (op0);
6273 temp = XEXP (op0, 0);
6276 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6277 || GET_CODE (op0) == CONCAT)
6279 /* If this object is in a register, it must be not
6281 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6282 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6284 = assign_stack_temp (inner_mode,
6285 int_size_in_bytes (inner_type), 1);
6286 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6288 mark_temp_addr_taken (memloc);
6289 emit_move_insn (memloc, op0);
6293 if (GET_CODE (op0) != MEM)
6296 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6298 temp = XEXP (op0, 0);
6299 #ifdef POINTERS_EXTEND_UNSIGNED
6300 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6301 && mode == ptr_mode)
6302 temp = convert_memory_address (ptr_mode, temp);
6307 op0 = force_operand (XEXP (op0, 0), target);
6310 if (flag_force_addr && GET_CODE (op0) != REG)
6311 op0 = force_reg (Pmode, op0);
6313 if (GET_CODE (op0) == REG)
6314 mark_reg_pointer (op0);
6316 /* If we might have had a temp slot, add an equivalent address
6319 update_temp_slot_address (temp, op0);
6321 #ifdef POINTERS_EXTEND_UNSIGNED
6322 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6323 && mode == ptr_mode)
6324 op0 = convert_memory_address (ptr_mode, op0);
6329 case ENTRY_VALUE_EXPR:
6332 /* COMPLEX type for Extended Pascal & Fortran */
6335 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6338 /* Get the rtx code of the operands. */
6339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6340 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6343 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6347 /* Move the real (op0) and imaginary (op1) parts to their location. */
6348 emit_move_insn (gen_realpart (mode, target), op0);
6349 emit_move_insn (gen_imagpart (mode, target), op1);
6351 insns = get_insns ();
6354 /* Complex construction should appear as a single unit. */
6355 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6356 each with a separate pseudo as destination.
6357 It's not correct for flow to treat them as a unit. */
6358 if (GET_CODE (target) != CONCAT)
6359 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6367 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6368 return gen_realpart (mode, op0);
6371 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6372 return gen_imagpart (mode, op0);
6376 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6380 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6383 target = gen_reg_rtx (mode);
6387 /* Store the realpart and the negated imagpart to target. */
6388 emit_move_insn (gen_realpart (partmode, target),
6389 gen_realpart (partmode, op0));
6391 imag_t = gen_imagpart (partmode, target);
6392 temp = expand_unop (partmode, neg_optab,
6393 gen_imagpart (partmode, op0), imag_t, 0);
6395 emit_move_insn (imag_t, temp);
6397 insns = get_insns ();
6400 /* Conjugate should appear as a single unit
6401 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6402 each with a separate pseudo as destination.
6403 It's not correct for flow to treat them as a unit. */
6404 if (GET_CODE (target) != CONCAT)
6405 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6413 op0 = CONST0_RTX (tmode);
6419 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6422 /* Here to do an ordinary binary operator, generating an instruction
6423 from the optab already placed in `this_optab'. */
6425 preexpand_calls (exp);
6426 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6428 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6429 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6431 temp = expand_binop (mode, this_optab, op0, op1, target,
6432 unsignedp, OPTAB_LIB_WIDEN);
6439 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6441 bc_expand_expr (exp)
6444 enum tree_code code;
6447 struct binary_operator *binoptab;
6448 struct unary_operator *unoptab;
6449 struct increment_operator *incroptab;
6450 struct bc_label *lab, *lab1;
6451 enum bytecode_opcode opcode;
6454 code = TREE_CODE (exp);
6460 if (DECL_RTL (exp) == 0)
6462 error_with_decl (exp, "prior parameter's size depends on `%s'");
6466 bc_load_parmaddr (DECL_RTL (exp));
6467 bc_load_memory (TREE_TYPE (exp), exp);
6473 if (DECL_RTL (exp) == 0)
6477 if (BYTECODE_LABEL (DECL_RTL (exp)))
6478 bc_load_externaddr (DECL_RTL (exp));
6480 bc_load_localaddr (DECL_RTL (exp));
6482 if (TREE_PUBLIC (exp))
6483 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6484 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6486 bc_load_localaddr (DECL_RTL (exp));
6488 bc_load_memory (TREE_TYPE (exp), exp);
6493 #ifdef DEBUG_PRINT_CODE
6494 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6496 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6498 : TYPE_MODE (TREE_TYPE (exp)))],
6499 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6505 #ifdef DEBUG_PRINT_CODE
6506 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6508 /* FIX THIS: find a better way to pass real_cst's. -bson */
6509 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6510 (double) TREE_REAL_CST (exp));
6519 /* We build a call description vector describing the type of
6520 the return value and of the arguments; this call vector,
6521 together with a pointer to a location for the return value
6522 and the base of the argument list, is passed to the low
6523 level machine dependent call subroutine, which is responsible
6524 for putting the arguments wherever real functions expect
6525 them, as well as getting the return value back. */
6527 tree calldesc = 0, arg;
6531 /* Push the evaluated args on the evaluation stack in reverse
6532 order. Also make an entry for each arg in the calldesc
6533 vector while we're at it. */
6535 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6537 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6540 bc_expand_expr (TREE_VALUE (arg));
6542 calldesc = tree_cons ((tree) 0,
6543 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6545 calldesc = tree_cons ((tree) 0,
6546 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6550 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6552 /* Allocate a location for the return value and push its
6553 address on the evaluation stack. Also make an entry
6554 at the front of the calldesc for the return value type. */
6556 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6557 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6558 bc_load_localaddr (retval);
6560 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6561 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6563 /* Prepend the argument count. */
6564 calldesc = tree_cons ((tree) 0,
6565 build_int_2 (nargs, 0),
6568 /* Push the address of the call description vector on the stack. */
6569 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6570 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6571 build_index_type (build_int_2 (nargs * 2, 0)));
6572 r = output_constant_def (calldesc);
6573 bc_load_externaddr (r);
6575 /* Push the address of the function to be called. */
6576 bc_expand_expr (TREE_OPERAND (exp, 0));
6578 /* Call the function, popping its address and the calldesc vector
6579 address off the evaluation stack in the process. */
6580 bc_emit_instruction (call);
6582 /* Pop the arguments off the stack. */
6583 bc_adjust_stack (nargs);
6585 /* Load the return value onto the stack. */
6586 bc_load_localaddr (retval);
6587 bc_load_memory (type, TREE_OPERAND (exp, 0));
6593 if (!SAVE_EXPR_RTL (exp))
6595 /* First time around: copy to local variable */
6596 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6597 TYPE_ALIGN (TREE_TYPE(exp)));
6598 bc_expand_expr (TREE_OPERAND (exp, 0));
6599 bc_emit_instruction (duplicate);
6601 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6602 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6606 /* Consecutive reference: use saved copy */
6607 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6608 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6613 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6614 how are they handled instead? */
6617 TREE_USED (exp) = 1;
6618 bc_expand_expr (STMT_BODY (exp));
6625 bc_expand_expr (TREE_OPERAND (exp, 0));
6626 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6631 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6636 bc_expand_address (TREE_OPERAND (exp, 0));
6641 bc_expand_expr (TREE_OPERAND (exp, 0));
6642 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6647 bc_expand_expr (bc_canonicalize_array_ref (exp));
6652 bc_expand_component_address (exp);
6654 /* If we have a bitfield, generate a proper load */
6655 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6660 bc_expand_expr (TREE_OPERAND (exp, 0));
6661 bc_emit_instruction (drop);
6662 bc_expand_expr (TREE_OPERAND (exp, 1));
6667 bc_expand_expr (TREE_OPERAND (exp, 0));
6668 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6669 lab = bc_get_bytecode_label ();
6670 bc_emit_bytecode (xjumpifnot);
6671 bc_emit_bytecode_labelref (lab);
6673 #ifdef DEBUG_PRINT_CODE
6674 fputc ('\n', stderr);
6676 bc_expand_expr (TREE_OPERAND (exp, 1));
6677 lab1 = bc_get_bytecode_label ();
6678 bc_emit_bytecode (jump);
6679 bc_emit_bytecode_labelref (lab1);
6681 #ifdef DEBUG_PRINT_CODE
6682 fputc ('\n', stderr);
6685 bc_emit_bytecode_labeldef (lab);
6686 bc_expand_expr (TREE_OPERAND (exp, 2));
6687 bc_emit_bytecode_labeldef (lab1);
6690 case TRUTH_ANDIF_EXPR:
6692 opcode = xjumpifnot;
6695 case TRUTH_ORIF_EXPR:
6702 binoptab = optab_plus_expr;
6707 binoptab = optab_minus_expr;
6712 binoptab = optab_mult_expr;
6715 case TRUNC_DIV_EXPR:
6716 case FLOOR_DIV_EXPR:
6718 case ROUND_DIV_EXPR:
6719 case EXACT_DIV_EXPR:
6721 binoptab = optab_trunc_div_expr;
6724 case TRUNC_MOD_EXPR:
6725 case FLOOR_MOD_EXPR:
6727 case ROUND_MOD_EXPR:
6729 binoptab = optab_trunc_mod_expr;
6732 case FIX_ROUND_EXPR:
6733 case FIX_FLOOR_EXPR:
6735 abort (); /* Not used for C. */
6737 case FIX_TRUNC_EXPR:
6744 abort (); /* FIXME */
6748 binoptab = optab_rdiv_expr;
6753 binoptab = optab_bit_and_expr;
6758 binoptab = optab_bit_ior_expr;
6763 binoptab = optab_bit_xor_expr;
6768 binoptab = optab_lshift_expr;
6773 binoptab = optab_rshift_expr;
6776 case TRUTH_AND_EXPR:
6778 binoptab = optab_truth_and_expr;
6783 binoptab = optab_truth_or_expr;
6788 binoptab = optab_lt_expr;
6793 binoptab = optab_le_expr;
6798 binoptab = optab_ge_expr;
6803 binoptab = optab_gt_expr;
6808 binoptab = optab_eq_expr;
6813 binoptab = optab_ne_expr;
6818 unoptab = optab_negate_expr;
6823 unoptab = optab_bit_not_expr;
6826 case TRUTH_NOT_EXPR:
6828 unoptab = optab_truth_not_expr;
6831 case PREDECREMENT_EXPR:
6833 incroptab = optab_predecrement_expr;
6836 case PREINCREMENT_EXPR:
6838 incroptab = optab_preincrement_expr;
6841 case POSTDECREMENT_EXPR:
6843 incroptab = optab_postdecrement_expr;
6846 case POSTINCREMENT_EXPR:
6848 incroptab = optab_postincrement_expr;
6853 bc_expand_constructor (exp);
6863 tree vars = TREE_OPERAND (exp, 0);
6864 int vars_need_expansion = 0;
6866 /* Need to open a binding contour here because
6867 if there are any cleanups they most be contained here. */
6868 expand_start_bindings (0);
6870 /* Mark the corresponding BLOCK for output. */
6871 if (TREE_OPERAND (exp, 2) != 0)
6872 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6874 /* If VARS have not yet been expanded, expand them now. */
6877 if (DECL_RTL (vars) == 0)
6879 vars_need_expansion = 1;
6882 expand_decl_init (vars);
6883 vars = TREE_CHAIN (vars);
6886 bc_expand_expr (TREE_OPERAND (exp, 1));
6888 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6898 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6899 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6905 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6911 bc_expand_expr (TREE_OPERAND (exp, 0));
6912 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6913 lab = bc_get_bytecode_label ();
6915 bc_emit_instruction (duplicate);
6916 bc_emit_bytecode (opcode);
6917 bc_emit_bytecode_labelref (lab);
6919 #ifdef DEBUG_PRINT_CODE
6920 fputc ('\n', stderr);
6923 bc_emit_instruction (drop);
6925 bc_expand_expr (TREE_OPERAND (exp, 1));
6926 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6927 bc_emit_bytecode_labeldef (lab);
6933 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6935 /* Push the quantum. */
6936 bc_expand_expr (TREE_OPERAND (exp, 1));
6938 /* Convert it to the lvalue's type. */
6939 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6941 /* Push the address of the lvalue */
6942 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6944 /* Perform actual increment */
6945 bc_expand_increment (incroptab, type);
6949 /* Return the alignment in bits of EXP, a pointer valued expression.
6950 But don't return more than MAX_ALIGN no matter what.
6951 The alignment returned is, by default, the alignment of the thing that
6952 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6954 Otherwise, look at the expression to see if we can do better, i.e., if the
6955 expression is actually pointing at an object whose alignment is tighter. */
6958 get_pointer_alignment (exp, max_align)
6962 unsigned align, inner;
6964 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6967 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6968 align = MIN (align, max_align);
6972 switch (TREE_CODE (exp))
6976 case NON_LVALUE_EXPR:
6977 exp = TREE_OPERAND (exp, 0);
6978 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6980 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6981 align = MIN (inner, max_align);
6985 /* If sum of pointer + int, restrict our maximum alignment to that
6986 imposed by the integer. If not, we can't do any better than
6988 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6991 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6996 exp = TREE_OPERAND (exp, 0);
7000 /* See what we are pointing at and look at its alignment. */
7001 exp = TREE_OPERAND (exp, 0);
7002 if (TREE_CODE (exp) == FUNCTION_DECL)
7003 align = FUNCTION_BOUNDARY;
7004 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7005 align = DECL_ALIGN (exp);
7006 #ifdef CONSTANT_ALIGNMENT
7007 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7008 align = CONSTANT_ALIGNMENT (exp, align);
7010 return MIN (align, max_align);
7018 /* Return the tree node and offset if a given argument corresponds to
7019 a string constant. */
7022 string_constant (arg, ptr_offset)
7028 if (TREE_CODE (arg) == ADDR_EXPR
7029 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7031 *ptr_offset = integer_zero_node;
7032 return TREE_OPERAND (arg, 0);
7034 else if (TREE_CODE (arg) == PLUS_EXPR)
7036 tree arg0 = TREE_OPERAND (arg, 0);
7037 tree arg1 = TREE_OPERAND (arg, 1);
7042 if (TREE_CODE (arg0) == ADDR_EXPR
7043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7046 return TREE_OPERAND (arg0, 0);
7048 else if (TREE_CODE (arg1) == ADDR_EXPR
7049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7052 return TREE_OPERAND (arg1, 0);
7059 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7060 way, because it could contain a zero byte in the middle.
7061 TREE_STRING_LENGTH is the size of the character array, not the string.
7063 Unfortunately, string_constant can't access the values of const char
7064 arrays with initializers, so neither can we do so here. */
7074 src = string_constant (src, &offset_node);
7077 max = TREE_STRING_LENGTH (src);
7078 ptr = TREE_STRING_POINTER (src);
7079 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7081 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7082 compute the offset to the following null if we don't know where to
7083 start searching for it. */
7085 for (i = 0; i < max; i++)
7088 /* We don't know the starting offset, but we do know that the string
7089 has no internal zero bytes. We can assume that the offset falls
7090 within the bounds of the string; otherwise, the programmer deserves
7091 what he gets. Subtract the offset from the length of the string,
7093 /* This would perhaps not be valid if we were dealing with named
7094 arrays in addition to literal string constants. */
7095 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7098 /* We have a known offset into the string. Start searching there for
7099 a null character. */
7100 if (offset_node == 0)
7104 /* Did we get a long long offset? If so, punt. */
7105 if (TREE_INT_CST_HIGH (offset_node) != 0)
7107 offset = TREE_INT_CST_LOW (offset_node);
7109 /* If the offset is known to be out of bounds, warn, and call strlen at
7111 if (offset < 0 || offset > max)
7113 warning ("offset outside bounds of constant string");
7116 /* Use strlen to search for the first zero byte. Since any strings
7117 constructed with build_string will have nulls appended, we win even
7118 if we get handed something like (char[4])"abcd".
7120 Since OFFSET is our starting index into the string, no further
7121 calculation is needed. */
7122 return size_int (strlen (ptr + offset));
7125 /* Expand an expression EXP that calls a built-in function,
7126 with result going to TARGET if that's convenient
7127 (and in mode MODE if that's convenient).
7128 SUBTARGET may be used as the target for computing one of EXP's operands.
7129 IGNORE is nonzero if the value is to be ignored. */
7131 #define CALLED_AS_BUILT_IN(NODE) \
7132 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7135 expand_builtin (exp, target, subtarget, mode, ignore)
7139 enum machine_mode mode;
7142 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7143 tree arglist = TREE_OPERAND (exp, 1);
7146 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7147 optab builtin_optab;
7149 switch (DECL_FUNCTION_CODE (fndecl))
7154 /* build_function_call changes these into ABS_EXPR. */
7159 /* Treat these like sqrt, but only if the user asks for them. */
7160 if (! flag_fast_math)
7162 case BUILT_IN_FSQRT:
7163 /* If not optimizing, call the library function. */
7168 /* Arg could be wrong type if user redeclared this fcn wrong. */
7169 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7172 /* Stabilize and compute the argument. */
7173 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7174 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7176 exp = copy_node (exp);
7177 arglist = copy_node (arglist);
7178 TREE_OPERAND (exp, 1) = arglist;
7179 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7181 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7183 /* Make a suitable register to place result in. */
7184 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7189 switch (DECL_FUNCTION_CODE (fndecl))
7192 builtin_optab = sin_optab; break;
7194 builtin_optab = cos_optab; break;
7195 case BUILT_IN_FSQRT:
7196 builtin_optab = sqrt_optab; break;
7201 /* Compute into TARGET.
7202 Set TARGET to wherever the result comes back. */
7203 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7204 builtin_optab, op0, target, 0);
7206 /* If we were unable to expand via the builtin, stop the
7207 sequence (without outputting the insns) and break, causing
7208 a call the the library function. */
7215 /* Check the results by default. But if flag_fast_math is turned on,
7216 then assume sqrt will always be called with valid arguments. */
7218 if (! flag_fast_math)
7220 /* Don't define the builtin FP instructions
7221 if your machine is not IEEE. */
7222 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7225 lab1 = gen_label_rtx ();
7227 /* Test the result; if it is NaN, set errno=EDOM because
7228 the argument was not in the domain. */
7229 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7230 emit_jump_insn (gen_beq (lab1));
7234 #ifdef GEN_ERRNO_RTX
7235 rtx errno_rtx = GEN_ERRNO_RTX;
7238 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7241 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7244 /* We can't set errno=EDOM directly; let the library call do it.
7245 Pop the arguments right away in case the call gets deleted. */
7247 expand_call (exp, target, 0);
7254 /* Output the entire sequence. */
7255 insns = get_insns ();
7261 /* __builtin_apply_args returns block of memory allocated on
7262 the stack into which is stored the arg pointer, structure
7263 value address, static chain, and all the registers that might
7264 possibly be used in performing a function call. The code is
7265 moved to the start of the function so the incoming values are
7267 case BUILT_IN_APPLY_ARGS:
7268 /* Don't do __builtin_apply_args more than once in a function.
7269 Save the result of the first call and reuse it. */
7270 if (apply_args_value != 0)
7271 return apply_args_value;
7273 /* When this function is called, it means that registers must be
7274 saved on entry to this function. So we migrate the
7275 call to the first insn of this function. */
7280 temp = expand_builtin_apply_args ();
7284 apply_args_value = temp;
7286 /* Put the sequence after the NOTE that starts the function.
7287 If this is inside a SEQUENCE, make the outer-level insn
7288 chain current, so the code is placed at the start of the
7290 push_topmost_sequence ();
7291 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7292 pop_topmost_sequence ();
7296 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7297 FUNCTION with a copy of the parameters described by
7298 ARGUMENTS, and ARGSIZE. It returns a block of memory
7299 allocated on the stack into which is stored all the registers
7300 that might possibly be used for returning the result of a
7301 function. ARGUMENTS is the value returned by
7302 __builtin_apply_args. ARGSIZE is the number of bytes of
7303 arguments that must be copied. ??? How should this value be
7304 computed? We'll also need a safe worst case value for varargs
7306 case BUILT_IN_APPLY:
7308 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7309 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7310 || TREE_CHAIN (arglist) == 0
7311 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7312 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7313 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7321 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7322 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7324 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7327 /* __builtin_return (RESULT) causes the function to return the
7328 value described by RESULT. RESULT is address of the block of
7329 memory returned by __builtin_apply. */
7330 case BUILT_IN_RETURN:
7332 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7333 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7334 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7335 NULL_RTX, VOIDmode, 0));
7338 case BUILT_IN_SAVEREGS:
7339 /* Don't do __builtin_saveregs more than once in a function.
7340 Save the result of the first call and reuse it. */
7341 if (saveregs_value != 0)
7342 return saveregs_value;
7344 /* When this function is called, it means that registers must be
7345 saved on entry to this function. So we migrate the
7346 call to the first insn of this function. */
7350 /* Now really call the function. `expand_call' does not call
7351 expand_builtin, so there is no danger of infinite recursion here. */
7354 #ifdef EXPAND_BUILTIN_SAVEREGS
7355 /* Do whatever the machine needs done in this case. */
7356 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7358 /* The register where the function returns its value
7359 is likely to have something else in it, such as an argument.
7360 So preserve that register around the call. */
7362 if (value_mode != VOIDmode)
7364 rtx valreg = hard_libcall_value (value_mode);
7365 rtx saved_valreg = gen_reg_rtx (value_mode);
7367 emit_move_insn (saved_valreg, valreg);
7368 temp = expand_call (exp, target, ignore);
7369 emit_move_insn (valreg, saved_valreg);
7372 /* Generate the call, putting the value in a pseudo. */
7373 temp = expand_call (exp, target, ignore);
7379 saveregs_value = temp;
7381 /* Put the sequence after the NOTE that starts the function.
7382 If this is inside a SEQUENCE, make the outer-level insn
7383 chain current, so the code is placed at the start of the
7385 push_topmost_sequence ();
7386 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7387 pop_topmost_sequence ();
7391 /* __builtin_args_info (N) returns word N of the arg space info
7392 for the current function. The number and meanings of words
7393 is controlled by the definition of CUMULATIVE_ARGS. */
7394 case BUILT_IN_ARGS_INFO:
7396 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7398 int *word_ptr = (int *) ¤t_function_args_info;
7399 tree type, elts, result;
7401 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7402 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7403 __FILE__, __LINE__);
7407 tree arg = TREE_VALUE (arglist);
7408 if (TREE_CODE (arg) != INTEGER_CST)
7409 error ("argument of `__builtin_args_info' must be constant");
7412 int wordnum = TREE_INT_CST_LOW (arg);
7414 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7415 error ("argument of `__builtin_args_info' out of range");
7417 return GEN_INT (word_ptr[wordnum]);
7421 error ("missing argument in `__builtin_args_info'");
7426 for (i = 0; i < nwords; i++)
7427 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7429 type = build_array_type (integer_type_node,
7430 build_index_type (build_int_2 (nwords, 0)));
7431 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7432 TREE_CONSTANT (result) = 1;
7433 TREE_STATIC (result) = 1;
7434 result = build (INDIRECT_REF, build_pointer_type (type), result);
7435 TREE_CONSTANT (result) = 1;
7436 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7440 /* Return the address of the first anonymous stack arg. */
7441 case BUILT_IN_NEXT_ARG:
7443 tree fntype = TREE_TYPE (current_function_decl);
7445 if ((TYPE_ARG_TYPES (fntype) == 0
7446 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7448 && ! current_function_varargs)
7450 error ("`va_start' used in function with fixed args");
7456 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7457 tree arg = TREE_VALUE (arglist);
7459 /* Strip off all nops for the sake of the comparison. This
7460 is not quite the same as STRIP_NOPS. It does more. */
7461 while (TREE_CODE (arg) == NOP_EXPR
7462 || TREE_CODE (arg) == CONVERT_EXPR
7463 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7464 arg = TREE_OPERAND (arg, 0);
7465 if (arg != last_parm)
7466 warning ("second parameter of `va_start' not last named argument");
7468 else if (! current_function_varargs)
7469 /* Evidently an out of date version of <stdarg.h>; can't validate
7470 va_start's second argument, but can still work as intended. */
7471 warning ("`__builtin_next_arg' called without an argument");
7474 return expand_binop (Pmode, add_optab,
7475 current_function_internal_arg_pointer,
7476 current_function_arg_offset_rtx,
7477 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7479 case BUILT_IN_CLASSIFY_TYPE:
7482 tree type = TREE_TYPE (TREE_VALUE (arglist));
7483 enum tree_code code = TREE_CODE (type);
7484 if (code == VOID_TYPE)
7485 return GEN_INT (void_type_class);
7486 if (code == INTEGER_TYPE)
7487 return GEN_INT (integer_type_class);
7488 if (code == CHAR_TYPE)
7489 return GEN_INT (char_type_class);
7490 if (code == ENUMERAL_TYPE)
7491 return GEN_INT (enumeral_type_class);
7492 if (code == BOOLEAN_TYPE)
7493 return GEN_INT (boolean_type_class);
7494 if (code == POINTER_TYPE)
7495 return GEN_INT (pointer_type_class);
7496 if (code == REFERENCE_TYPE)
7497 return GEN_INT (reference_type_class);
7498 if (code == OFFSET_TYPE)
7499 return GEN_INT (offset_type_class);
7500 if (code == REAL_TYPE)
7501 return GEN_INT (real_type_class);
7502 if (code == COMPLEX_TYPE)
7503 return GEN_INT (complex_type_class);
7504 if (code == FUNCTION_TYPE)
7505 return GEN_INT (function_type_class);
7506 if (code == METHOD_TYPE)
7507 return GEN_INT (method_type_class);
7508 if (code == RECORD_TYPE)
7509 return GEN_INT (record_type_class);
7510 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7511 return GEN_INT (union_type_class);
7512 if (code == ARRAY_TYPE)
7514 if (TYPE_STRING_FLAG (type))
7515 return GEN_INT (string_type_class);
7517 return GEN_INT (array_type_class);
7519 if (code == SET_TYPE)
7520 return GEN_INT (set_type_class);
7521 if (code == FILE_TYPE)
7522 return GEN_INT (file_type_class);
7523 if (code == LANG_TYPE)
7524 return GEN_INT (lang_type_class);
7526 return GEN_INT (no_type_class);
7528 case BUILT_IN_CONSTANT_P:
7533 tree arg = TREE_VALUE (arglist);
7536 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7537 || (TREE_CODE (arg) == ADDR_EXPR
7538 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7539 ? const1_rtx : const0_rtx);
7542 case BUILT_IN_FRAME_ADDRESS:
7543 /* The argument must be a nonnegative integer constant.
7544 It counts the number of frames to scan up the stack.
7545 The value is the address of that frame. */
7546 case BUILT_IN_RETURN_ADDRESS:
7547 /* The argument must be a nonnegative integer constant.
7548 It counts the number of frames to scan up the stack.
7549 The value is the return address saved in that frame. */
7551 /* Warning about missing arg was already issued. */
7553 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7555 error ("invalid arg to `__builtin_return_address'");
7558 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7560 error ("invalid arg to `__builtin_return_address'");
7565 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7566 rtx tem = frame_pointer_rtx;
7569 /* Some machines need special handling before we can access arbitrary
7570 frames. For example, on the sparc, we must first flush all
7571 register windows to the stack. */
7572 #ifdef SETUP_FRAME_ADDRESSES
7573 SETUP_FRAME_ADDRESSES ();
7576 /* On the sparc, the return address is not in the frame, it is
7577 in a register. There is no way to access it off of the current
7578 frame pointer, but it can be accessed off the previous frame
7579 pointer by reading the value from the register window save
7581 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7586 /* Scan back COUNT frames to the specified frame. */
7587 for (i = 0; i < count; i++)
7589 /* Assume the dynamic chain pointer is in the word that
7590 the frame address points to, unless otherwise specified. */
7591 #ifdef DYNAMIC_CHAIN_ADDRESS
7592 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7594 tem = memory_address (Pmode, tem);
7595 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7598 /* For __builtin_frame_address, return what we've got. */
7599 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7602 /* For __builtin_return_address,
7603 Get the return address from that frame. */
7604 #ifdef RETURN_ADDR_RTX
7605 return RETURN_ADDR_RTX (count, tem);
7607 tem = memory_address (Pmode,
7608 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7609 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7613 case BUILT_IN_ALLOCA:
7615 /* Arg could be non-integer if user redeclared this fcn wrong. */
7616 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7619 /* Compute the argument. */
7620 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7622 /* Allocate the desired space. */
7623 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7626 /* If not optimizing, call the library function. */
7627 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7631 /* Arg could be non-integer if user redeclared this fcn wrong. */
7632 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7635 /* Compute the argument. */
7636 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7637 /* Compute ffs, into TARGET if possible.
7638 Set TARGET to wherever the result comes back. */
7639 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7640 ffs_optab, op0, target, 1);
7645 case BUILT_IN_STRLEN:
7646 /* If not optimizing, call the library function. */
7647 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7651 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7652 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7656 tree src = TREE_VALUE (arglist);
7657 tree len = c_strlen (src);
7660 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7662 rtx result, src_rtx, char_rtx;
7663 enum machine_mode insn_mode = value_mode, char_mode;
7664 enum insn_code icode;
7666 /* If the length is known, just return it. */
7668 return expand_expr (len, target, mode, 0);
7670 /* If SRC is not a pointer type, don't do this operation inline. */
7674 /* Call a function if we can't compute strlen in the right mode. */
7676 while (insn_mode != VOIDmode)
7678 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7679 if (icode != CODE_FOR_nothing)
7682 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7684 if (insn_mode == VOIDmode)
7687 /* Make a place to write the result of the instruction. */
7690 && GET_CODE (result) == REG
7691 && GET_MODE (result) == insn_mode
7692 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7693 result = gen_reg_rtx (insn_mode);
7695 /* Make sure the operands are acceptable to the predicates. */
7697 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7698 result = gen_reg_rtx (insn_mode);
7700 src_rtx = memory_address (BLKmode,
7701 expand_expr (src, NULL_RTX, ptr_mode,
7703 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7704 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7706 char_rtx = const0_rtx;
7707 char_mode = insn_operand_mode[(int)icode][2];
7708 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7709 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7711 emit_insn (GEN_FCN (icode) (result,
7712 gen_rtx (MEM, BLKmode, src_rtx),
7713 char_rtx, GEN_INT (align)));
7715 /* Return the value in the proper mode for this function. */
7716 if (GET_MODE (result) == value_mode)
7718 else if (target != 0)
7720 convert_move (target, result, 0);
7724 return convert_to_mode (value_mode, result, 0);
7727 case BUILT_IN_STRCPY:
7728 /* If not optimizing, call the library function. */
7729 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7733 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7734 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7735 || TREE_CHAIN (arglist) == 0
7736 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7740 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7745 len = size_binop (PLUS_EXPR, len, integer_one_node);
7747 chainon (arglist, build_tree_list (NULL_TREE, len));
7751 case BUILT_IN_MEMCPY:
7752 /* If not optimizing, call the library function. */
7753 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7757 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7758 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7759 || TREE_CHAIN (arglist) == 0
7760 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7761 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7762 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7766 tree dest = TREE_VALUE (arglist);
7767 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7768 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7772 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7774 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7775 rtx dest_rtx, dest_mem, src_mem;
7777 /* If either SRC or DEST is not a pointer type, don't do
7778 this operation in-line. */
7779 if (src_align == 0 || dest_align == 0)
7781 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7782 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7786 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7787 dest_mem = gen_rtx (MEM, BLKmode,
7788 memory_address (BLKmode, dest_rtx));
7789 /* There could be a void* cast on top of the object. */
7790 while (TREE_CODE (dest) == NOP_EXPR)
7791 dest = TREE_OPERAND (dest, 0);
7792 type = TREE_TYPE (TREE_TYPE (dest));
7793 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7794 src_mem = gen_rtx (MEM, BLKmode,
7795 memory_address (BLKmode,
7796 expand_expr (src, NULL_RTX,
7799 /* There could be a void* cast on top of the object. */
7800 while (TREE_CODE (src) == NOP_EXPR)
7801 src = TREE_OPERAND (src, 0);
7802 type = TREE_TYPE (TREE_TYPE (src));
7803 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7805 /* Copy word part most expediently. */
7806 emit_block_move (dest_mem, src_mem,
7807 expand_expr (len, NULL_RTX, VOIDmode, 0),
7808 MIN (src_align, dest_align));
7809 return force_operand (dest_rtx, NULL_RTX);
7812 /* These comparison functions need an instruction that returns an actual
7813 index. An ordinary compare that just sets the condition codes
7815 #ifdef HAVE_cmpstrsi
7816 case BUILT_IN_STRCMP:
7817 /* If not optimizing, call the library function. */
7818 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7822 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7823 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7824 || TREE_CHAIN (arglist) == 0
7825 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7827 else if (!HAVE_cmpstrsi)
7830 tree arg1 = TREE_VALUE (arglist);
7831 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7835 len = c_strlen (arg1);
7837 len = size_binop (PLUS_EXPR, integer_one_node, len);
7838 len2 = c_strlen (arg2);
7840 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7842 /* If we don't have a constant length for the first, use the length
7843 of the second, if we know it. We don't require a constant for
7844 this case; some cost analysis could be done if both are available
7845 but neither is constant. For now, assume they're equally cheap.
7847 If both strings have constant lengths, use the smaller. This
7848 could arise if optimization results in strcpy being called with
7849 two fixed strings, or if the code was machine-generated. We should
7850 add some code to the `memcmp' handler below to deal with such
7851 situations, someday. */
7852 if (!len || TREE_CODE (len) != INTEGER_CST)
7859 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7861 if (tree_int_cst_lt (len2, len))
7865 chainon (arglist, build_tree_list (NULL_TREE, len));
7869 case BUILT_IN_MEMCMP:
7870 /* If not optimizing, call the library function. */
7871 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7875 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7876 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7877 || TREE_CHAIN (arglist) == 0
7878 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7879 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7880 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7882 else if (!HAVE_cmpstrsi)
7885 tree arg1 = TREE_VALUE (arglist);
7886 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7887 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7891 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7893 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7894 enum machine_mode insn_mode
7895 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7897 /* If we don't have POINTER_TYPE, call the function. */
7898 if (arg1_align == 0 || arg2_align == 0)
7900 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7901 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7905 /* Make a place to write the result of the instruction. */
7908 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7909 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7910 result = gen_reg_rtx (insn_mode);
7912 emit_insn (gen_cmpstrsi (result,
7913 gen_rtx (MEM, BLKmode,
7914 expand_expr (arg1, NULL_RTX,
7917 gen_rtx (MEM, BLKmode,
7918 expand_expr (arg2, NULL_RTX,
7921 expand_expr (len, NULL_RTX, VOIDmode, 0),
7922 GEN_INT (MIN (arg1_align, arg2_align))));
7924 /* Return the value in the proper mode for this function. */
7925 mode = TYPE_MODE (TREE_TYPE (exp));
7926 if (GET_MODE (result) == mode)
7928 else if (target != 0)
7930 convert_move (target, result, 0);
7934 return convert_to_mode (mode, result, 0);
7937 case BUILT_IN_STRCMP:
7938 case BUILT_IN_MEMCMP:
7942 default: /* just do library call, if unknown builtin */
7943 error ("built-in function `%s' not currently supported",
7944 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7947 /* The switch statement above can drop through to cause the function
7948 to be called normally. */
7950 return expand_call (exp, target, ignore);
7953 /* Built-in functions to perform an untyped call and return. */
7955 /* For each register that may be used for calling a function, this
7956 gives a mode used to copy the register's value. VOIDmode indicates
7957 the register is not used for calling a function. If the machine
7958 has register windows, this gives only the outbound registers.
7959 INCOMING_REGNO gives the corresponding inbound register. */
7960 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7962 /* For each register that may be used for returning values, this gives
7963 a mode used to copy the register's value. VOIDmode indicates the
7964 register is not used for returning values. If the machine has
7965 register windows, this gives only the outbound registers.
7966 INCOMING_REGNO gives the corresponding inbound register. */
7967 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7969 /* For each register that may be used for calling a function, this
7970 gives the offset of that register into the block returned by
7971 __builtin_apply_args. 0 indicates that the register is not
7972 used for calling a function. */
7973 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7975 /* Return the offset of register REGNO into the block returned by
7976 __builtin_apply_args. This is not declared static, since it is
7977 needed in objc-act.c. */
7980 apply_args_register_offset (regno)
7985 /* Arguments are always put in outgoing registers (in the argument
7986 block) if such make sense. */
7987 #ifdef OUTGOING_REGNO
7988 regno = OUTGOING_REGNO(regno);
7990 return apply_args_reg_offset[regno];
7993 /* Return the size required for the block returned by __builtin_apply_args,
7994 and initialize apply_args_mode. */
7999 static int size = -1;
8001 enum machine_mode mode;
8003 /* The values computed by this function never change. */
8006 /* The first value is the incoming arg-pointer. */
8007 size = GET_MODE_SIZE (Pmode);
8009 /* The second value is the structure value address unless this is
8010 passed as an "invisible" first argument. */
8011 if (struct_value_rtx)
8012 size += GET_MODE_SIZE (Pmode);
8014 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8015 if (FUNCTION_ARG_REGNO_P (regno))
8017 /* Search for the proper mode for copying this register's
8018 value. I'm not sure this is right, but it works so far. */
8019 enum machine_mode best_mode = VOIDmode;
8021 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8023 mode = GET_MODE_WIDER_MODE (mode))
8024 if (HARD_REGNO_MODE_OK (regno, mode)
8025 && HARD_REGNO_NREGS (regno, mode) == 1)
8028 if (best_mode == VOIDmode)
8029 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8031 mode = GET_MODE_WIDER_MODE (mode))
8032 if (HARD_REGNO_MODE_OK (regno, mode)
8033 && (mov_optab->handlers[(int) mode].insn_code
8034 != CODE_FOR_nothing))
8038 if (mode == VOIDmode)
8041 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8042 if (size % align != 0)
8043 size = CEIL (size, align) * align;
8044 apply_args_reg_offset[regno] = size;
8045 size += GET_MODE_SIZE (mode);
8046 apply_args_mode[regno] = mode;
8050 apply_args_mode[regno] = VOIDmode;
8051 apply_args_reg_offset[regno] = 0;
8057 /* Return the size required for the block returned by __builtin_apply,
8058 and initialize apply_result_mode. */
8061 apply_result_size ()
8063 static int size = -1;
8065 enum machine_mode mode;
8067 /* The values computed by this function never change. */
8072 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8073 if (FUNCTION_VALUE_REGNO_P (regno))
8075 /* Search for the proper mode for copying this register's
8076 value. I'm not sure this is right, but it works so far. */
8077 enum machine_mode best_mode = VOIDmode;
8079 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8081 mode = GET_MODE_WIDER_MODE (mode))
8082 if (HARD_REGNO_MODE_OK (regno, mode))
8085 if (best_mode == VOIDmode)
8086 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8088 mode = GET_MODE_WIDER_MODE (mode))
8089 if (HARD_REGNO_MODE_OK (regno, mode)
8090 && (mov_optab->handlers[(int) mode].insn_code
8091 != CODE_FOR_nothing))
8095 if (mode == VOIDmode)
8098 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8099 if (size % align != 0)
8100 size = CEIL (size, align) * align;
8101 size += GET_MODE_SIZE (mode);
8102 apply_result_mode[regno] = mode;
8105 apply_result_mode[regno] = VOIDmode;
8107 /* Allow targets that use untyped_call and untyped_return to override
8108 the size so that machine-specific information can be stored here. */
8109 #ifdef APPLY_RESULT_SIZE
8110 size = APPLY_RESULT_SIZE;
8116 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8117 /* Create a vector describing the result block RESULT. If SAVEP is true,
8118 the result block is used to save the values; otherwise it is used to
8119 restore the values. */
8122 result_vector (savep, result)
8126 int regno, size, align, nelts;
8127 enum machine_mode mode;
8129 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8132 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8133 if ((mode = apply_result_mode[regno]) != VOIDmode)
8135 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8136 if (size % align != 0)
8137 size = CEIL (size, align) * align;
8138 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8139 mem = change_address (result, mode,
8140 plus_constant (XEXP (result, 0), size));
8141 savevec[nelts++] = (savep
8142 ? gen_rtx (SET, VOIDmode, mem, reg)
8143 : gen_rtx (SET, VOIDmode, reg, mem));
8144 size += GET_MODE_SIZE (mode);
8146 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8148 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8150 /* Save the state required to perform an untyped call with the same
8151 arguments as were passed to the current function. */
8154 expand_builtin_apply_args ()
8157 int size, align, regno;
8158 enum machine_mode mode;
8160 /* Create a block where the arg-pointer, structure value address,
8161 and argument registers can be saved. */
8162 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8164 /* Walk past the arg-pointer and structure value address. */
8165 size = GET_MODE_SIZE (Pmode);
8166 if (struct_value_rtx)
8167 size += GET_MODE_SIZE (Pmode);
8169 /* Save each register used in calling a function to the block. */
8170 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8171 if ((mode = apply_args_mode[regno]) != VOIDmode)
8175 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8176 if (size % align != 0)
8177 size = CEIL (size, align) * align;
8179 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8182 /* For reg-stack.c's stack register household.
8183 Compare with a similar piece of code in function.c. */
8185 emit_insn (gen_rtx (USE, mode, tem));
8188 emit_move_insn (change_address (registers, mode,
8189 plus_constant (XEXP (registers, 0),
8192 size += GET_MODE_SIZE (mode);
8195 /* Save the arg pointer to the block. */
8196 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8197 copy_to_reg (virtual_incoming_args_rtx));
8198 size = GET_MODE_SIZE (Pmode);
8200 /* Save the structure value address unless this is passed as an
8201 "invisible" first argument. */
8202 if (struct_value_incoming_rtx)
8204 emit_move_insn (change_address (registers, Pmode,
8205 plus_constant (XEXP (registers, 0),
8207 copy_to_reg (struct_value_incoming_rtx));
8208 size += GET_MODE_SIZE (Pmode);
8211 /* Return the address of the block. */
8212 return copy_addr_to_reg (XEXP (registers, 0));
8215 /* Perform an untyped call and save the state required to perform an
8216 untyped return of whatever value was returned by the given function. */
8219 expand_builtin_apply (function, arguments, argsize)
8220 rtx function, arguments, argsize;
8222 int size, align, regno;
8223 enum machine_mode mode;
8224 rtx incoming_args, result, reg, dest, call_insn;
8225 rtx old_stack_level = 0;
8226 rtx call_fusage = 0;
8228 /* Create a block where the return registers can be saved. */
8229 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8231 /* ??? The argsize value should be adjusted here. */
8233 /* Fetch the arg pointer from the ARGUMENTS block. */
8234 incoming_args = gen_reg_rtx (Pmode);
8235 emit_move_insn (incoming_args,
8236 gen_rtx (MEM, Pmode, arguments));
8237 #ifndef STACK_GROWS_DOWNWARD
8238 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8239 incoming_args, 0, OPTAB_LIB_WIDEN);
8242 /* Perform postincrements before actually calling the function. */
8245 /* Push a new argument block and copy the arguments. */
8246 do_pending_stack_adjust ();
8247 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8249 /* Push a block of memory onto the stack to store the memory arguments.
8250 Save the address in a register, and copy the memory arguments. ??? I
8251 haven't figured out how the calling convention macros effect this,
8252 but it's likely that the source and/or destination addresses in
8253 the block copy will need updating in machine specific ways. */
8254 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8255 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8256 gen_rtx (MEM, BLKmode, incoming_args),
8258 PARM_BOUNDARY / BITS_PER_UNIT);
8260 /* Refer to the argument block. */
8262 arguments = gen_rtx (MEM, BLKmode, arguments);
8264 /* Walk past the arg-pointer and structure value address. */
8265 size = GET_MODE_SIZE (Pmode);
8266 if (struct_value_rtx)
8267 size += GET_MODE_SIZE (Pmode);
8269 /* Restore each of the registers previously saved. Make USE insns
8270 for each of these registers for use in making the call. */
8271 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8272 if ((mode = apply_args_mode[regno]) != VOIDmode)
8274 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8275 if (size % align != 0)
8276 size = CEIL (size, align) * align;
8277 reg = gen_rtx (REG, mode, regno);
8278 emit_move_insn (reg,
8279 change_address (arguments, mode,
8280 plus_constant (XEXP (arguments, 0),
8283 use_reg (&call_fusage, reg);
8284 size += GET_MODE_SIZE (mode);
8287 /* Restore the structure value address unless this is passed as an
8288 "invisible" first argument. */
8289 size = GET_MODE_SIZE (Pmode);
8290 if (struct_value_rtx)
8292 rtx value = gen_reg_rtx (Pmode);
8293 emit_move_insn (value,
8294 change_address (arguments, Pmode,
8295 plus_constant (XEXP (arguments, 0),
8297 emit_move_insn (struct_value_rtx, value);
8298 if (GET_CODE (struct_value_rtx) == REG)
8299 use_reg (&call_fusage, struct_value_rtx);
8300 size += GET_MODE_SIZE (Pmode);
8303 /* All arguments and registers used for the call are set up by now! */
8304 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8306 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8307 and we don't want to load it into a register as an optimization,
8308 because prepare_call_address already did it if it should be done. */
8309 if (GET_CODE (function) != SYMBOL_REF)
8310 function = memory_address (FUNCTION_MODE, function);
8312 /* Generate the actual call instruction and save the return value. */
8313 #ifdef HAVE_untyped_call
8314 if (HAVE_untyped_call)
8315 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8316 result, result_vector (1, result)));
8319 #ifdef HAVE_call_value
8320 if (HAVE_call_value)
8324 /* Locate the unique return register. It is not possible to
8325 express a call that sets more than one return register using
8326 call_value; use untyped_call for that. In fact, untyped_call
8327 only needs to save the return registers in the given block. */
8328 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8329 if ((mode = apply_result_mode[regno]) != VOIDmode)
8332 abort (); /* HAVE_untyped_call required. */
8333 valreg = gen_rtx (REG, mode, regno);
8336 emit_call_insn (gen_call_value (valreg,
8337 gen_rtx (MEM, FUNCTION_MODE, function),
8338 const0_rtx, NULL_RTX, const0_rtx));
8340 emit_move_insn (change_address (result, GET_MODE (valreg),
8348 /* Find the CALL insn we just emitted. */
8349 for (call_insn = get_last_insn ();
8350 call_insn && GET_CODE (call_insn) != CALL_INSN;
8351 call_insn = PREV_INSN (call_insn))
8357 /* Put the register usage information on the CALL. If there is already
8358 some usage information, put ours at the end. */
8359 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8363 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8364 link = XEXP (link, 1))
8367 XEXP (link, 1) = call_fusage;
8370 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8372 /* Restore the stack. */
8373 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8375 /* Return the address of the result block. */
8376 return copy_addr_to_reg (XEXP (result, 0));
8379 /* Perform an untyped return. */
8382 expand_builtin_return (result)
8385 int size, align, regno;
8386 enum machine_mode mode;
8388 rtx call_fusage = 0;
8390 apply_result_size ();
8391 result = gen_rtx (MEM, BLKmode, result);
8393 #ifdef HAVE_untyped_return
8394 if (HAVE_untyped_return)
8396 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8402 /* Restore the return value and note that each value is used. */
8404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8405 if ((mode = apply_result_mode[regno]) != VOIDmode)
8407 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8408 if (size % align != 0)
8409 size = CEIL (size, align) * align;
8410 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8411 emit_move_insn (reg,
8412 change_address (result, mode,
8413 plus_constant (XEXP (result, 0),
8416 push_to_sequence (call_fusage);
8417 emit_insn (gen_rtx (USE, VOIDmode, reg));
8418 call_fusage = get_insns ();
8420 size += GET_MODE_SIZE (mode);
8423 /* Put the USE insns before the return. */
8424 emit_insns (call_fusage);
8426 /* Return whatever values was restored by jumping directly to the end
8428 expand_null_return ();
8431 /* Expand code for a post- or pre- increment or decrement
8432 and return the RTX for the result.
8433 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8436 expand_increment (exp, post)
8440 register rtx op0, op1;
8441 register rtx temp, value;
8442 register tree incremented = TREE_OPERAND (exp, 0);
8443 optab this_optab = add_optab;
8445 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8446 int op0_is_copy = 0;
8447 int single_insn = 0;
8448 /* 1 means we can't store into OP0 directly,
8449 because it is a subreg narrower than a word,
8450 and we don't dare clobber the rest of the word. */
8453 if (output_bytecode)
8455 bc_expand_expr (exp);
8459 /* Stabilize any component ref that might need to be
8460 evaluated more than once below. */
8462 || TREE_CODE (incremented) == BIT_FIELD_REF
8463 || (TREE_CODE (incremented) == COMPONENT_REF
8464 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8465 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8466 incremented = stabilize_reference (incremented);
8467 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8468 ones into save exprs so that they don't accidentally get evaluated
8469 more than once by the code below. */
8470 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8471 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8472 incremented = save_expr (incremented);
8474 /* Compute the operands as RTX.
8475 Note whether OP0 is the actual lvalue or a copy of it:
8476 I believe it is a copy iff it is a register or subreg
8477 and insns were generated in computing it. */
8479 temp = get_last_insn ();
8480 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8482 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8483 in place but instead must do sign- or zero-extension during assignment,
8484 so we copy it into a new register and let the code below use it as
8487 Note that we can safely modify this SUBREG since it is know not to be
8488 shared (it was made by the expand_expr call above). */
8490 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8493 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8497 else if (GET_CODE (op0) == SUBREG
8498 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8500 /* We cannot increment this SUBREG in place. If we are
8501 post-incrementing, get a copy of the old value. Otherwise,
8502 just mark that we cannot increment in place. */
8504 op0 = copy_to_reg (op0);
8509 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8510 && temp != get_last_insn ());
8511 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8513 /* Decide whether incrementing or decrementing. */
8514 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8515 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8516 this_optab = sub_optab;
8518 /* Convert decrement by a constant into a negative increment. */
8519 if (this_optab == sub_optab
8520 && GET_CODE (op1) == CONST_INT)
8522 op1 = GEN_INT (- INTVAL (op1));
8523 this_optab = add_optab;
8526 /* For a preincrement, see if we can do this with a single instruction. */
8529 icode = (int) this_optab->handlers[(int) mode].insn_code;
8530 if (icode != (int) CODE_FOR_nothing
8531 /* Make sure that OP0 is valid for operands 0 and 1
8532 of the insn we want to queue. */
8533 && (*insn_operand_predicate[icode][0]) (op0, mode)
8534 && (*insn_operand_predicate[icode][1]) (op0, mode)
8535 && (*insn_operand_predicate[icode][2]) (op1, mode))
8539 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8540 then we cannot just increment OP0. We must therefore contrive to
8541 increment the original value. Then, for postincrement, we can return
8542 OP0 since it is a copy of the old value. For preincrement, expand here
8543 unless we can do it with a single insn.
8545 Likewise if storing directly into OP0 would clobber high bits
8546 we need to preserve (bad_subreg). */
8547 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8549 /* This is the easiest way to increment the value wherever it is.
8550 Problems with multiple evaluation of INCREMENTED are prevented
8551 because either (1) it is a component_ref or preincrement,
8552 in which case it was stabilized above, or (2) it is an array_ref
8553 with constant index in an array in a register, which is
8554 safe to reevaluate. */
8555 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8556 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8557 ? MINUS_EXPR : PLUS_EXPR),
8560 TREE_OPERAND (exp, 1));
8562 while (TREE_CODE (incremented) == NOP_EXPR
8563 || TREE_CODE (incremented) == CONVERT_EXPR)
8565 newexp = convert (TREE_TYPE (incremented), newexp);
8566 incremented = TREE_OPERAND (incremented, 0);
8569 temp = expand_assignment (incremented, newexp, ! post, 0);
8570 return post ? op0 : temp;
8575 /* We have a true reference to the value in OP0.
8576 If there is an insn to add or subtract in this mode, queue it.
8577 Queueing the increment insn avoids the register shuffling
8578 that often results if we must increment now and first save
8579 the old value for subsequent use. */
8581 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8582 op0 = stabilize (op0);
8585 icode = (int) this_optab->handlers[(int) mode].insn_code;
8586 if (icode != (int) CODE_FOR_nothing
8587 /* Make sure that OP0 is valid for operands 0 and 1
8588 of the insn we want to queue. */
8589 && (*insn_operand_predicate[icode][0]) (op0, mode)
8590 && (*insn_operand_predicate[icode][1]) (op0, mode))
8592 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8593 op1 = force_reg (mode, op1);
8595 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8599 /* Preincrement, or we can't increment with one simple insn. */
8601 /* Save a copy of the value before inc or dec, to return it later. */
8602 temp = value = copy_to_reg (op0);
8604 /* Arrange to return the incremented value. */
8605 /* Copy the rtx because expand_binop will protect from the queue,
8606 and the results of that would be invalid for us to return
8607 if our caller does emit_queue before using our result. */
8608 temp = copy_rtx (value = op0);
8610 /* Increment however we can. */
8611 op1 = expand_binop (mode, this_optab, value, op1, op0,
8612 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8613 /* Make sure the value is stored into OP0. */
8615 emit_move_insn (op0, op1);
8620 /* Expand all function calls contained within EXP, innermost ones first.
8621 But don't look within expressions that have sequence points.
8622 For each CALL_EXPR, record the rtx for its value
8623 in the CALL_EXPR_RTL field. */
8626 preexpand_calls (exp)
8629 register int nops, i;
8630 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8632 if (! do_preexpand_calls)
8635 /* Only expressions and references can contain calls. */
8637 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8640 switch (TREE_CODE (exp))
8643 /* Do nothing if already expanded. */
8644 if (CALL_EXPR_RTL (exp) != 0)
8647 /* Do nothing to built-in functions. */
8648 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8649 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8650 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8651 /* Do nothing if the call returns a variable-sized object. */
8652 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8653 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8658 case TRUTH_ANDIF_EXPR:
8659 case TRUTH_ORIF_EXPR:
8660 /* If we find one of these, then we can be sure
8661 the adjust will be done for it (since it makes jumps).
8662 Do it now, so that if this is inside an argument
8663 of a function, we don't get the stack adjustment
8664 after some other args have already been pushed. */
8665 do_pending_stack_adjust ();
8670 case WITH_CLEANUP_EXPR:
8671 case CLEANUP_POINT_EXPR:
8675 if (SAVE_EXPR_RTL (exp) != 0)
8679 nops = tree_code_length[(int) TREE_CODE (exp)];
8680 for (i = 0; i < nops; i++)
8681 if (TREE_OPERAND (exp, i) != 0)
8683 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8684 if (type == 'e' || type == '<' || type == '1' || type == '2'
8686 preexpand_calls (TREE_OPERAND (exp, i));
8690 /* At the start of a function, record that we have no previously-pushed
8691 arguments waiting to be popped. */
8694 init_pending_stack_adjust ()
8696 pending_stack_adjust = 0;
8699 /* When exiting from function, if safe, clear out any pending stack adjust
8700 so the adjustment won't get done. */
8703 clear_pending_stack_adjust ()
8705 #ifdef EXIT_IGNORE_STACK
8706 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8707 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8708 && ! flag_inline_functions)
8709 pending_stack_adjust = 0;
8713 /* Pop any previously-pushed arguments that have not been popped yet. */
8716 do_pending_stack_adjust ()
8718 if (inhibit_defer_pop == 0)
8720 if (pending_stack_adjust != 0)
8721 adjust_stack (GEN_INT (pending_stack_adjust));
8722 pending_stack_adjust = 0;
8726 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8727 Returns the cleanups to be performed. */
8730 defer_cleanups_to (old_cleanups)
8733 tree new_cleanups = NULL_TREE;
8734 tree cleanups = cleanups_this_call;
8735 tree last = NULL_TREE;
8737 while (cleanups_this_call != old_cleanups)
8739 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8740 last = cleanups_this_call;
8741 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8746 /* Remove the list from the chain of cleanups. */
8747 TREE_CHAIN (last) = NULL_TREE;
8749 /* reverse them so that we can build them in the right order. */
8750 cleanups = nreverse (cleanups);
8755 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8756 TREE_VALUE (cleanups), new_cleanups);
8758 new_cleanups = TREE_VALUE (cleanups);
8760 cleanups = TREE_CHAIN (cleanups);
8764 return new_cleanups;
8767 /* Expand all cleanups up to OLD_CLEANUPS.
8768 Needed here, and also for language-dependent calls. */
8771 expand_cleanups_to (old_cleanups)
8774 while (cleanups_this_call != old_cleanups)
8776 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8777 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8778 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8782 /* Expand conditional expressions. */
8784 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8785 LABEL is an rtx of code CODE_LABEL, in this function and all the
8789 jumpifnot (exp, label)
8793 do_jump (exp, label, NULL_RTX);
8796 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8803 do_jump (exp, NULL_RTX, label);
8806 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8807 the result is zero, or IF_TRUE_LABEL if the result is one.
8808 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8809 meaning fall through in that case.
8811 do_jump always does any pending stack adjust except when it does not
8812 actually perform a jump. An example where there is no jump
8813 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8815 This function is responsible for optimizing cases such as
8816 &&, || and comparison operators in EXP. */
8819 do_jump (exp, if_false_label, if_true_label)
8821 rtx if_false_label, if_true_label;
8823 register enum tree_code code = TREE_CODE (exp);
8824 /* Some cases need to create a label to jump to
8825 in order to properly fall through.
8826 These cases set DROP_THROUGH_LABEL nonzero. */
8827 rtx drop_through_label = 0;
8832 enum machine_mode mode;
8842 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8848 /* This is not true with #pragma weak */
8850 /* The address of something can never be zero. */
8852 emit_jump (if_true_label);
8857 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8858 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8859 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8862 /* If we are narrowing the operand, we have to do the compare in the
8864 if ((TYPE_PRECISION (TREE_TYPE (exp))
8865 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8867 case NON_LVALUE_EXPR:
8868 case REFERENCE_EXPR:
8873 /* These cannot change zero->non-zero or vice versa. */
8874 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8878 /* This is never less insns than evaluating the PLUS_EXPR followed by
8879 a test and can be longer if the test is eliminated. */
8881 /* Reduce to minus. */
8882 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8883 TREE_OPERAND (exp, 0),
8884 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8885 TREE_OPERAND (exp, 1))));
8886 /* Process as MINUS. */
8890 /* Non-zero iff operands of minus differ. */
8891 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8892 TREE_OPERAND (exp, 0),
8893 TREE_OPERAND (exp, 1)),
8898 /* If we are AND'ing with a small constant, do this comparison in the
8899 smallest type that fits. If the machine doesn't have comparisons
8900 that small, it will be converted back to the wider comparison.
8901 This helps if we are testing the sign bit of a narrower object.
8902 combine can't do this for us because it can't know whether a
8903 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8905 if (! SLOW_BYTE_ACCESS
8906 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8907 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8908 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8909 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8910 && (type = type_for_mode (mode, 1)) != 0
8911 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8912 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8913 != CODE_FOR_nothing))
8915 do_jump (convert (type, exp), if_false_label, if_true_label);
8920 case TRUTH_NOT_EXPR:
8921 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8924 case TRUTH_ANDIF_EXPR:
8927 tree cleanups, old_cleanups;
8929 if (if_false_label == 0)
8930 if_false_label = drop_through_label = gen_label_rtx ();
8932 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8933 seq1 = get_insns ();
8936 old_cleanups = cleanups_this_call;
8938 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8939 seq2 = get_insns ();
8942 cleanups = defer_cleanups_to (old_cleanups);
8945 rtx flag = gen_reg_rtx (word_mode);
8949 /* Flag cleanups as not needed. */
8950 emit_move_insn (flag, const0_rtx);
8953 /* Flag cleanups as needed. */
8954 emit_move_insn (flag, const1_rtx);
8957 /* convert flag, which is an rtx, into a tree. */
8958 cond = make_node (RTL_EXPR);
8959 TREE_TYPE (cond) = integer_type_node;
8960 RTL_EXPR_RTL (cond) = flag;
8961 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8962 cond = save_expr (cond);
8964 new_cleanups = build (COND_EXPR, void_type_node,
8965 truthvalue_conversion (cond),
8966 cleanups, integer_zero_node);
8967 new_cleanups = fold (new_cleanups);
8969 /* Now add in the conditionalized cleanups. */
8971 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8972 (*interim_eh_hook) (NULL_TREE);
8982 case TRUTH_ORIF_EXPR:
8985 tree cleanups, old_cleanups;
8987 if (if_true_label == 0)
8988 if_true_label = drop_through_label = gen_label_rtx ();
8990 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8991 seq1 = get_insns ();
8994 old_cleanups = cleanups_this_call;
8996 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8997 seq2 = get_insns ();
9000 cleanups = defer_cleanups_to (old_cleanups);
9003 rtx flag = gen_reg_rtx (word_mode);
9007 /* Flag cleanups as not needed. */
9008 emit_move_insn (flag, const0_rtx);
9011 /* Flag cleanups as needed. */
9012 emit_move_insn (flag, const1_rtx);
9015 /* convert flag, which is an rtx, into a tree. */
9016 cond = make_node (RTL_EXPR);
9017 TREE_TYPE (cond) = integer_type_node;
9018 RTL_EXPR_RTL (cond) = flag;
9019 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9020 cond = save_expr (cond);
9022 new_cleanups = build (COND_EXPR, void_type_node,
9023 truthvalue_conversion (cond),
9024 cleanups, integer_zero_node);
9025 new_cleanups = fold (new_cleanups);
9027 /* Now add in the conditionalized cleanups. */
9029 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9030 (*interim_eh_hook) (NULL_TREE);
9042 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9046 do_pending_stack_adjust ();
9047 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9054 int bitsize, bitpos, unsignedp;
9055 enum machine_mode mode;
9060 /* Get description of this reference. We don't actually care
9061 about the underlying object here. */
9062 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9063 &mode, &unsignedp, &volatilep);
9065 type = type_for_size (bitsize, unsignedp);
9066 if (! SLOW_BYTE_ACCESS
9067 && type != 0 && bitsize >= 0
9068 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9069 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9070 != CODE_FOR_nothing))
9072 do_jump (convert (type, exp), if_false_label, if_true_label);
9079 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9080 if (integer_onep (TREE_OPERAND (exp, 1))
9081 && integer_zerop (TREE_OPERAND (exp, 2)))
9082 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9084 else if (integer_zerop (TREE_OPERAND (exp, 1))
9085 && integer_onep (TREE_OPERAND (exp, 2)))
9086 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9090 register rtx label1 = gen_label_rtx ();
9091 drop_through_label = gen_label_rtx ();
9092 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9093 /* Now the THEN-expression. */
9094 do_jump (TREE_OPERAND (exp, 1),
9095 if_false_label ? if_false_label : drop_through_label,
9096 if_true_label ? if_true_label : drop_through_label);
9097 /* In case the do_jump just above never jumps. */
9098 do_pending_stack_adjust ();
9099 emit_label (label1);
9100 /* Now the ELSE-expression. */
9101 do_jump (TREE_OPERAND (exp, 2),
9102 if_false_label ? if_false_label : drop_through_label,
9103 if_true_label ? if_true_label : drop_through_label);
9109 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9111 if (integer_zerop (TREE_OPERAND (exp, 1)))
9112 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9113 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9114 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9117 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9118 fold (build (EQ_EXPR, TREE_TYPE (exp),
9119 fold (build1 (REALPART_EXPR,
9120 TREE_TYPE (inner_type),
9121 TREE_OPERAND (exp, 0))),
9122 fold (build1 (REALPART_EXPR,
9123 TREE_TYPE (inner_type),
9124 TREE_OPERAND (exp, 1))))),
9125 fold (build (EQ_EXPR, TREE_TYPE (exp),
9126 fold (build1 (IMAGPART_EXPR,
9127 TREE_TYPE (inner_type),
9128 TREE_OPERAND (exp, 0))),
9129 fold (build1 (IMAGPART_EXPR,
9130 TREE_TYPE (inner_type),
9131 TREE_OPERAND (exp, 1))))))),
9132 if_false_label, if_true_label);
9133 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9134 && !can_compare_p (TYPE_MODE (inner_type)))
9135 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9137 comparison = compare (exp, EQ, EQ);
9143 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9145 if (integer_zerop (TREE_OPERAND (exp, 1)))
9146 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9147 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9148 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9151 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9152 fold (build (NE_EXPR, TREE_TYPE (exp),
9153 fold (build1 (REALPART_EXPR,
9154 TREE_TYPE (inner_type),
9155 TREE_OPERAND (exp, 0))),
9156 fold (build1 (REALPART_EXPR,
9157 TREE_TYPE (inner_type),
9158 TREE_OPERAND (exp, 1))))),
9159 fold (build (NE_EXPR, TREE_TYPE (exp),
9160 fold (build1 (IMAGPART_EXPR,
9161 TREE_TYPE (inner_type),
9162 TREE_OPERAND (exp, 0))),
9163 fold (build1 (IMAGPART_EXPR,
9164 TREE_TYPE (inner_type),
9165 TREE_OPERAND (exp, 1))))))),
9166 if_false_label, if_true_label);
9167 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9168 && !can_compare_p (TYPE_MODE (inner_type)))
9169 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9171 comparison = compare (exp, NE, NE);
9176 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9178 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9179 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9181 comparison = compare (exp, LT, LTU);
9185 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9187 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9188 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9190 comparison = compare (exp, LE, LEU);
9194 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9196 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9197 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9199 comparison = compare (exp, GT, GTU);
9203 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9205 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9206 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9208 comparison = compare (exp, GE, GEU);
9213 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9215 /* This is not needed any more and causes poor code since it causes
9216 comparisons and tests from non-SI objects to have different code
9218 /* Copy to register to avoid generating bad insns by cse
9219 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9220 if (!cse_not_expected && GET_CODE (temp) == MEM)
9221 temp = copy_to_reg (temp);
9223 do_pending_stack_adjust ();
9224 if (GET_CODE (temp) == CONST_INT)
9225 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9226 else if (GET_CODE (temp) == LABEL_REF)
9227 comparison = const_true_rtx;
9228 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9229 && !can_compare_p (GET_MODE (temp)))
9230 /* Note swapping the labels gives us not-equal. */
9231 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9232 else if (GET_MODE (temp) != VOIDmode)
9233 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9234 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9235 GET_MODE (temp), NULL_RTX, 0);
9240 /* Do any postincrements in the expression that was tested. */
9243 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9244 straight into a conditional jump instruction as the jump condition.
9245 Otherwise, all the work has been done already. */
9247 if (comparison == const_true_rtx)
9250 emit_jump (if_true_label);
9252 else if (comparison == const0_rtx)
9255 emit_jump (if_false_label);
9257 else if (comparison)
9258 do_jump_for_compare (comparison, if_false_label, if_true_label);
9260 if (drop_through_label)
9262 /* If do_jump produces code that might be jumped around,
9263 do any stack adjusts from that code, before the place
9264 where control merges in. */
9265 do_pending_stack_adjust ();
9266 emit_label (drop_through_label);
9270 /* Given a comparison expression EXP for values too wide to be compared
9271 with one insn, test the comparison and jump to the appropriate label.
9272 The code of EXP is ignored; we always test GT if SWAP is 0,
9273 and LT if SWAP is 1. */
9276 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9279 rtx if_false_label, if_true_label;
9281 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9282 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9283 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9284 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9285 rtx drop_through_label = 0;
9286 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9289 if (! if_true_label || ! if_false_label)
9290 drop_through_label = gen_label_rtx ();
9291 if (! if_true_label)
9292 if_true_label = drop_through_label;
9293 if (! if_false_label)
9294 if_false_label = drop_through_label;
9296 /* Compare a word at a time, high order first. */
9297 for (i = 0; i < nwords; i++)
9300 rtx op0_word, op1_word;
9302 if (WORDS_BIG_ENDIAN)
9304 op0_word = operand_subword_force (op0, i, mode);
9305 op1_word = operand_subword_force (op1, i, mode);
9309 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9310 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9313 /* All but high-order word must be compared as unsigned. */
9314 comp = compare_from_rtx (op0_word, op1_word,
9315 (unsignedp || i > 0) ? GTU : GT,
9316 unsignedp, word_mode, NULL_RTX, 0);
9317 if (comp == const_true_rtx)
9318 emit_jump (if_true_label);
9319 else if (comp != const0_rtx)
9320 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9322 /* Consider lower words only if these are equal. */
9323 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9325 if (comp == const_true_rtx)
9326 emit_jump (if_false_label);
9327 else if (comp != const0_rtx)
9328 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9332 emit_jump (if_false_label);
9333 if (drop_through_label)
9334 emit_label (drop_through_label);
9337 /* Compare OP0 with OP1, word at a time, in mode MODE.
9338 UNSIGNEDP says to do unsigned comparison.
9339 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9342 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9343 enum machine_mode mode;
9346 rtx if_false_label, if_true_label;
9348 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9349 rtx drop_through_label = 0;
9352 if (! if_true_label || ! if_false_label)
9353 drop_through_label = gen_label_rtx ();
9354 if (! if_true_label)
9355 if_true_label = drop_through_label;
9356 if (! if_false_label)
9357 if_false_label = drop_through_label;
9359 /* Compare a word at a time, high order first. */
9360 for (i = 0; i < nwords; i++)
9363 rtx op0_word, op1_word;
9365 if (WORDS_BIG_ENDIAN)
9367 op0_word = operand_subword_force (op0, i, mode);
9368 op1_word = operand_subword_force (op1, i, mode);
9372 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9373 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9376 /* All but high-order word must be compared as unsigned. */
9377 comp = compare_from_rtx (op0_word, op1_word,
9378 (unsignedp || i > 0) ? GTU : GT,
9379 unsignedp, word_mode, NULL_RTX, 0);
9380 if (comp == const_true_rtx)
9381 emit_jump (if_true_label);
9382 else if (comp != const0_rtx)
9383 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9385 /* Consider lower words only if these are equal. */
9386 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9388 if (comp == const_true_rtx)
9389 emit_jump (if_false_label);
9390 else if (comp != const0_rtx)
9391 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9395 emit_jump (if_false_label);
9396 if (drop_through_label)
9397 emit_label (drop_through_label);
9400 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9401 with one insn, test the comparison and jump to the appropriate label. */
9404 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9406 rtx if_false_label, if_true_label;
9408 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9409 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9410 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9411 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9413 rtx drop_through_label = 0;
9415 if (! if_false_label)
9416 drop_through_label = if_false_label = gen_label_rtx ();
9418 for (i = 0; i < nwords; i++)
9420 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9421 operand_subword_force (op1, i, mode),
9422 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9423 word_mode, NULL_RTX, 0);
9424 if (comp == const_true_rtx)
9425 emit_jump (if_false_label);
9426 else if (comp != const0_rtx)
9427 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9431 emit_jump (if_true_label);
9432 if (drop_through_label)
9433 emit_label (drop_through_label);
9436 /* Jump according to whether OP0 is 0.
9437 We assume that OP0 has an integer mode that is too wide
9438 for the available compare insns. */
9441 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9443 rtx if_false_label, if_true_label;
9445 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9447 rtx drop_through_label = 0;
9449 if (! if_false_label)
9450 drop_through_label = if_false_label = gen_label_rtx ();
9452 for (i = 0; i < nwords; i++)
9454 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9456 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9457 if (comp == const_true_rtx)
9458 emit_jump (if_false_label);
9459 else if (comp != const0_rtx)
9460 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9464 emit_jump (if_true_label);
9465 if (drop_through_label)
9466 emit_label (drop_through_label);
9469 /* Given a comparison expression in rtl form, output conditional branches to
9470 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9473 do_jump_for_compare (comparison, if_false_label, if_true_label)
9474 rtx comparison, if_false_label, if_true_label;
9478 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9479 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9484 emit_jump (if_false_label);
9486 else if (if_false_label)
9489 rtx prev = get_last_insn ();
9492 /* Output the branch with the opposite condition. Then try to invert
9493 what is generated. If more than one insn is a branch, or if the
9494 branch is not the last insn written, abort. If we can't invert
9495 the branch, emit make a true label, redirect this jump to that,
9496 emit a jump to the false label and define the true label. */
9498 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9499 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9503 /* Here we get the first insn that was just emitted. It used to be the
9504 case that, on some machines, emitting the branch would discard
9505 the previous compare insn and emit a replacement. This isn't
9506 done anymore, but abort if we see that PREV is deleted. */
9509 insn = get_insns ();
9510 else if (INSN_DELETED_P (prev))
9513 insn = NEXT_INSN (prev);
9515 for (; insn; insn = NEXT_INSN (insn))
9516 if (GET_CODE (insn) == JUMP_INSN)
9523 if (branch != get_last_insn ())
9526 JUMP_LABEL (branch) = if_false_label;
9527 if (! invert_jump (branch, if_false_label))
9529 if_true_label = gen_label_rtx ();
9530 redirect_jump (branch, if_true_label);
9531 emit_jump (if_false_label);
9532 emit_label (if_true_label);
9537 /* Generate code for a comparison expression EXP
9538 (including code to compute the values to be compared)
9539 and set (CC0) according to the result.
9540 SIGNED_CODE should be the rtx operation for this comparison for
9541 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9543 We force a stack adjustment unless there are currently
9544 things pushed on the stack that aren't yet used. */
9547 compare (exp, signed_code, unsigned_code)
9549 enum rtx_code signed_code, unsigned_code;
9552 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9554 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9555 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9556 register enum machine_mode mode = TYPE_MODE (type);
9557 int unsignedp = TREE_UNSIGNED (type);
9558 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9560 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9562 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9563 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9566 /* Like compare but expects the values to compare as two rtx's.
9567 The decision as to signed or unsigned comparison must be made by the caller.
9569 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9572 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9573 size of MODE should be used. */
9576 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9577 register rtx op0, op1;
9580 enum machine_mode mode;
9586 /* If one operand is constant, make it the second one. Only do this
9587 if the other operand is not constant as well. */
9589 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9590 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9595 code = swap_condition (code);
9600 op0 = force_not_mem (op0);
9601 op1 = force_not_mem (op1);
9604 do_pending_stack_adjust ();
9606 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9607 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9611 /* There's no need to do this now that combine.c can eliminate lots of
9612 sign extensions. This can be less efficient in certain cases on other
9615 /* If this is a signed equality comparison, we can do it as an
9616 unsigned comparison since zero-extension is cheaper than sign
9617 extension and comparisons with zero are done as unsigned. This is
9618 the case even on machines that can do fast sign extension, since
9619 zero-extension is easier to combine with other operations than
9620 sign-extension is. If we are comparing against a constant, we must
9621 convert it to what it would look like unsigned. */
9622 if ((code == EQ || code == NE) && ! unsignedp
9623 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9625 if (GET_CODE (op1) == CONST_INT
9626 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9627 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9632 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9634 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9637 /* Generate code to calculate EXP using a store-flag instruction
9638 and return an rtx for the result. EXP is either a comparison
9639 or a TRUTH_NOT_EXPR whose operand is a comparison.
9641 If TARGET is nonzero, store the result there if convenient.
9643 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9646 Return zero if there is no suitable set-flag instruction
9647 available on this machine.
9649 Once expand_expr has been called on the arguments of the comparison,
9650 we are committed to doing the store flag, since it is not safe to
9651 re-evaluate the expression. We emit the store-flag insn by calling
9652 emit_store_flag, but only expand the arguments if we have a reason
9653 to believe that emit_store_flag will be successful. If we think that
9654 it will, but it isn't, we have to simulate the store-flag with a
9655 set/jump/set sequence. */
9658 do_store_flag (exp, target, mode, only_cheap)
9661 enum machine_mode mode;
9665 tree arg0, arg1, type;
9667 enum machine_mode operand_mode;
9671 enum insn_code icode;
9672 rtx subtarget = target;
9673 rtx result, label, pattern, jump_pat;
9675 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9676 result at the end. We can't simply invert the test since it would
9677 have already been inverted if it were valid. This case occurs for
9678 some floating-point comparisons. */
9680 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9681 invert = 1, exp = TREE_OPERAND (exp, 0);
9683 arg0 = TREE_OPERAND (exp, 0);
9684 arg1 = TREE_OPERAND (exp, 1);
9685 type = TREE_TYPE (arg0);
9686 operand_mode = TYPE_MODE (type);
9687 unsignedp = TREE_UNSIGNED (type);
9689 /* We won't bother with BLKmode store-flag operations because it would mean
9690 passing a lot of information to emit_store_flag. */
9691 if (operand_mode == BLKmode)
9697 /* Get the rtx comparison code to use. We know that EXP is a comparison
9698 operation of some type. Some comparisons against 1 and -1 can be
9699 converted to comparisons with zero. Do so here so that the tests
9700 below will be aware that we have a comparison with zero. These
9701 tests will not catch constants in the first operand, but constants
9702 are rarely passed as the first operand. */
9704 switch (TREE_CODE (exp))
9713 if (integer_onep (arg1))
9714 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9716 code = unsignedp ? LTU : LT;
9719 if (! unsignedp && integer_all_onesp (arg1))
9720 arg1 = integer_zero_node, code = LT;
9722 code = unsignedp ? LEU : LE;
9725 if (! unsignedp && integer_all_onesp (arg1))
9726 arg1 = integer_zero_node, code = GE;
9728 code = unsignedp ? GTU : GT;
9731 if (integer_onep (arg1))
9732 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9734 code = unsignedp ? GEU : GE;
9740 /* Put a constant second. */
9741 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9743 tem = arg0; arg0 = arg1; arg1 = tem;
9744 code = swap_condition (code);
9747 /* If this is an equality or inequality test of a single bit, we can
9748 do this by shifting the bit being tested to the low-order bit and
9749 masking the result with the constant 1. If the condition was EQ,
9750 we xor it with 1. This does not require an scc insn and is faster
9751 than an scc insn even if we have it. */
9753 if ((code == NE || code == EQ)
9754 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9755 && integer_pow2p (TREE_OPERAND (arg0, 1))
9756 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9758 tree inner = TREE_OPERAND (arg0, 0);
9759 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9760 NULL_RTX, VOIDmode, 0)));
9763 /* If INNER is a right shift of a constant and it plus BITNUM does
9764 not overflow, adjust BITNUM and INNER. */
9766 if (TREE_CODE (inner) == RSHIFT_EXPR
9767 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9768 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9769 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9770 < TYPE_PRECISION (type)))
9772 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9773 inner = TREE_OPERAND (inner, 0);
9776 /* If we are going to be able to omit the AND below, we must do our
9777 operations as unsigned. If we must use the AND, we have a choice.
9778 Normally unsigned is faster, but for some machines signed is. */
9779 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9780 #ifdef LOAD_EXTEND_OP
9781 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9787 if (subtarget == 0 || GET_CODE (subtarget) != REG
9788 || GET_MODE (subtarget) != operand_mode
9789 || ! safe_from_p (subtarget, inner))
9792 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9795 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9796 size_int (bitnum), subtarget, ops_unsignedp);
9798 if (GET_MODE (op0) != mode)
9799 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9801 if ((code == EQ && ! invert) || (code == NE && invert))
9802 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9803 ops_unsignedp, OPTAB_LIB_WIDEN);
9805 /* Put the AND last so it can combine with more things. */
9806 if (bitnum != TYPE_PRECISION (type) - 1)
9807 op0 = expand_and (op0, const1_rtx, subtarget);
9812 /* Now see if we are likely to be able to do this. Return if not. */
9813 if (! can_compare_p (operand_mode))
9815 icode = setcc_gen_code[(int) code];
9816 if (icode == CODE_FOR_nothing
9817 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9819 /* We can only do this if it is one of the special cases that
9820 can be handled without an scc insn. */
9821 if ((code == LT && integer_zerop (arg1))
9822 || (! only_cheap && code == GE && integer_zerop (arg1)))
9824 else if (BRANCH_COST >= 0
9825 && ! only_cheap && (code == NE || code == EQ)
9826 && TREE_CODE (type) != REAL_TYPE
9827 && ((abs_optab->handlers[(int) operand_mode].insn_code
9828 != CODE_FOR_nothing)
9829 || (ffs_optab->handlers[(int) operand_mode].insn_code
9830 != CODE_FOR_nothing)))
9836 preexpand_calls (exp);
9837 if (subtarget == 0 || GET_CODE (subtarget) != REG
9838 || GET_MODE (subtarget) != operand_mode
9839 || ! safe_from_p (subtarget, arg1))
9842 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9843 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9846 target = gen_reg_rtx (mode);
9848 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9849 because, if the emit_store_flag does anything it will succeed and
9850 OP0 and OP1 will not be used subsequently. */
9852 result = emit_store_flag (target, code,
9853 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9854 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9855 operand_mode, unsignedp, 1);
9860 result = expand_binop (mode, xor_optab, result, const1_rtx,
9861 result, 0, OPTAB_LIB_WIDEN);
9865 /* If this failed, we have to do this with set/compare/jump/set code. */
9866 if (target == 0 || GET_CODE (target) != REG
9867 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9868 target = gen_reg_rtx (GET_MODE (target));
9870 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9871 result = compare_from_rtx (op0, op1, code, unsignedp,
9872 operand_mode, NULL_RTX, 0);
9873 if (GET_CODE (result) == CONST_INT)
9874 return (((result == const0_rtx && ! invert)
9875 || (result != const0_rtx && invert))
9876 ? const0_rtx : const1_rtx);
9878 label = gen_label_rtx ();
9879 if (bcc_gen_fctn[(int) code] == 0)
9882 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9883 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9889 /* Generate a tablejump instruction (used for switch statements). */
9891 #ifdef HAVE_tablejump
9893 /* INDEX is the value being switched on, with the lowest value
9894 in the table already subtracted.
9895 MODE is its expected mode (needed if INDEX is constant).
9896 RANGE is the length of the jump table.
9897 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9899 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9900 index value is out of range. */
9903 do_tablejump (index, mode, range, table_label, default_label)
9904 rtx index, range, table_label, default_label;
9905 enum machine_mode mode;
9907 register rtx temp, vector;
9909 /* Do an unsigned comparison (in the proper mode) between the index
9910 expression and the value which represents the length of the range.
9911 Since we just finished subtracting the lower bound of the range
9912 from the index expression, this comparison allows us to simultaneously
9913 check that the original index expression value is both greater than
9914 or equal to the minimum value of the range and less than or equal to
9915 the maximum value of the range. */
9917 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9918 emit_jump_insn (gen_bgtu (default_label));
9920 /* If index is in range, it must fit in Pmode.
9921 Convert to Pmode so we can index with it. */
9923 index = convert_to_mode (Pmode, index, 1);
9925 /* Don't let a MEM slip thru, because then INDEX that comes
9926 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9927 and break_out_memory_refs will go to work on it and mess it up. */
9928 #ifdef PIC_CASE_VECTOR_ADDRESS
9929 if (flag_pic && GET_CODE (index) != REG)
9930 index = copy_to_mode_reg (Pmode, index);
9933 /* If flag_force_addr were to affect this address
9934 it could interfere with the tricky assumptions made
9935 about addresses that contain label-refs,
9936 which may be valid only very near the tablejump itself. */
9937 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9938 GET_MODE_SIZE, because this indicates how large insns are. The other
9939 uses should all be Pmode, because they are addresses. This code
9940 could fail if addresses and insns are not the same size. */
9941 index = gen_rtx (PLUS, Pmode,
9942 gen_rtx (MULT, Pmode, index,
9943 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9944 gen_rtx (LABEL_REF, Pmode, table_label));
9945 #ifdef PIC_CASE_VECTOR_ADDRESS
9947 index = PIC_CASE_VECTOR_ADDRESS (index);
9950 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9951 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9952 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9953 RTX_UNCHANGING_P (vector) = 1;
9954 convert_move (temp, vector, 0);
9956 emit_jump_insn (gen_tablejump (temp, table_label));
9958 #ifndef CASE_VECTOR_PC_RELATIVE
9959 /* If we are generating PIC code or if the table is PC-relative, the
9960 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9966 #endif /* HAVE_tablejump */
9969 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9970 to that value is on the top of the stack. The resulting type is TYPE, and
9971 the source declaration is DECL. */
9974 bc_load_memory (type, decl)
9977 enum bytecode_opcode opcode;
9980 /* Bit fields are special. We only know about signed and
9981 unsigned ints, and enums. The latter are treated as
9984 if (DECL_BIT_FIELD (decl))
9985 if (TREE_CODE (type) == ENUMERAL_TYPE
9986 || TREE_CODE (type) == INTEGER_TYPE)
9987 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9991 /* See corresponding comment in bc_store_memory(). */
9992 if (TYPE_MODE (type) == BLKmode
9993 || TYPE_MODE (type) == VOIDmode)
9996 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9998 if (opcode == neverneverland)
10001 bc_emit_bytecode (opcode);
10003 #ifdef DEBUG_PRINT_CODE
10004 fputc ('\n', stderr);
10009 /* Store the contents of the second stack slot to the address in the
10010 top stack slot. DECL is the declaration of the destination and is used
10011 to determine whether we're dealing with a bitfield. */
10014 bc_store_memory (type, decl)
10017 enum bytecode_opcode opcode;
10020 if (DECL_BIT_FIELD (decl))
10022 if (TREE_CODE (type) == ENUMERAL_TYPE
10023 || TREE_CODE (type) == INTEGER_TYPE)
10029 if (TYPE_MODE (type) == BLKmode)
10031 /* Copy structure. This expands to a block copy instruction, storeBLK.
10032 In addition to the arguments expected by the other store instructions,
10033 it also expects a type size (SImode) on top of the stack, which is the
10034 structure size in size units (usually bytes). The two first arguments
10035 are already on the stack; so we just put the size on level 1. For some
10036 other languages, the size may be variable, this is why we don't encode
10037 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10039 bc_expand_expr (TYPE_SIZE (type));
10043 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10045 if (opcode == neverneverland)
10048 bc_emit_bytecode (opcode);
10050 #ifdef DEBUG_PRINT_CODE
10051 fputc ('\n', stderr);
10056 /* Allocate local stack space sufficient to hold a value of the given
10057 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10058 integral power of 2. A special case is locals of type VOID, which
10059 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10060 remapped into the corresponding attribute of SI. */
10063 bc_allocate_local (size, alignment)
10064 int size, alignment;
10067 int byte_alignment;
10072 /* Normalize size and alignment */
10074 size = UNITS_PER_WORD;
10076 if (alignment < BITS_PER_UNIT)
10077 byte_alignment = 1 << (INT_ALIGN - 1);
10080 byte_alignment = alignment / BITS_PER_UNIT;
10082 if (local_vars_size & (byte_alignment - 1))
10083 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10085 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10086 local_vars_size += size;
10092 /* Allocate variable-sized local array. Variable-sized arrays are
10093 actually pointers to the address in memory where they are stored. */
10096 bc_allocate_variable_array (size)
10100 const int ptralign = (1 << (PTR_ALIGN - 1));
10102 /* Align pointer */
10103 if (local_vars_size & ptralign)
10104 local_vars_size += ptralign - (local_vars_size & ptralign);
10106 /* Note down local space needed: pointer to block; also return
10109 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10110 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10115 /* Push the machine address for the given external variable offset. */
10117 bc_load_externaddr (externaddr)
10120 bc_emit_bytecode (constP);
10121 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10122 BYTECODE_BC_LABEL (externaddr)->offset);
10124 #ifdef DEBUG_PRINT_CODE
10125 fputc ('\n', stderr);
10134 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10140 /* Like above, but expects an IDENTIFIER. */
10142 bc_load_externaddr_id (id, offset)
10146 if (!IDENTIFIER_POINTER (id))
10149 bc_emit_bytecode (constP);
10150 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10152 #ifdef DEBUG_PRINT_CODE
10153 fputc ('\n', stderr);
10158 /* Push the machine address for the given local variable offset. */
10160 bc_load_localaddr (localaddr)
10163 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10167 /* Push the machine address for the given parameter offset.
10168 NOTE: offset is in bits. */
10170 bc_load_parmaddr (parmaddr)
10173 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10178 /* Convert a[i] into *(a + i). */
10180 bc_canonicalize_array_ref (exp)
10183 tree type = TREE_TYPE (exp);
10184 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10185 TREE_OPERAND (exp, 0));
10186 tree index = TREE_OPERAND (exp, 1);
10189 /* Convert the integer argument to a type the same size as a pointer
10190 so the multiply won't overflow spuriously. */
10192 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10193 index = convert (type_for_size (POINTER_SIZE, 0), index);
10195 /* The array address isn't volatile even if the array is.
10196 (Of course this isn't terribly relevant since the bytecode
10197 translator treats nearly everything as volatile anyway.) */
10198 TREE_THIS_VOLATILE (array_adr) = 0;
10200 return build1 (INDIRECT_REF, type,
10201 fold (build (PLUS_EXPR,
10202 TYPE_POINTER_TO (type),
10204 fold (build (MULT_EXPR,
10205 TYPE_POINTER_TO (type),
10207 size_in_bytes (type))))));
10211 /* Load the address of the component referenced by the given
10212 COMPONENT_REF expression.
10214 Returns innermost lvalue. */
10217 bc_expand_component_address (exp)
10221 enum machine_mode mode;
10223 HOST_WIDE_INT SIval;
10226 tem = TREE_OPERAND (exp, 1);
10227 mode = DECL_MODE (tem);
10230 /* Compute cumulative bit offset for nested component refs
10231 and array refs, and find the ultimate containing object. */
10233 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10235 if (TREE_CODE (tem) == COMPONENT_REF)
10236 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10238 if (TREE_CODE (tem) == ARRAY_REF
10239 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10240 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10242 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10243 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10244 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10249 bc_expand_expr (tem);
10252 /* For bitfields also push their offset and size */
10253 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10254 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10256 if (SIval = bitpos / BITS_PER_UNIT)
10257 bc_emit_instruction (addconstPSI, SIval);
10259 return (TREE_OPERAND (exp, 1));
10263 /* Emit code to push two SI constants */
10265 bc_push_offset_and_size (offset, size)
10266 HOST_WIDE_INT offset, size;
10268 bc_emit_instruction (constSI, offset);
10269 bc_emit_instruction (constSI, size);
10273 /* Emit byte code to push the address of the given lvalue expression to
10274 the stack. If it's a bit field, we also push offset and size info.
10276 Returns innermost component, which allows us to determine not only
10277 its type, but also whether it's a bitfield. */
10280 bc_expand_address (exp)
10284 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10288 switch (TREE_CODE (exp))
10292 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10294 case COMPONENT_REF:
10296 return (bc_expand_component_address (exp));
10300 bc_expand_expr (TREE_OPERAND (exp, 0));
10302 /* For variable-sized types: retrieve pointer. Sometimes the
10303 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10304 also make sure we have an operand, just in case... */
10306 if (TREE_OPERAND (exp, 0)
10307 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10308 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10309 bc_emit_instruction (loadP);
10311 /* If packed, also return offset and size */
10312 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10314 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10315 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10317 return (TREE_OPERAND (exp, 0));
10319 case FUNCTION_DECL:
10321 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10322 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10327 bc_load_parmaddr (DECL_RTL (exp));
10329 /* For variable-sized types: retrieve pointer */
10330 if (TYPE_SIZE (TREE_TYPE (exp))
10331 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10332 bc_emit_instruction (loadP);
10334 /* If packed, also return offset and size */
10335 if (DECL_BIT_FIELD (exp))
10336 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10337 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10343 bc_emit_instruction (returnP);
10349 if (BYTECODE_LABEL (DECL_RTL (exp)))
10350 bc_load_externaddr (DECL_RTL (exp));
10353 if (DECL_EXTERNAL (exp))
10354 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10355 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10357 bc_load_localaddr (DECL_RTL (exp));
10359 /* For variable-sized types: retrieve pointer */
10360 if (TYPE_SIZE (TREE_TYPE (exp))
10361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10362 bc_emit_instruction (loadP);
10364 /* If packed, also return offset and size */
10365 if (DECL_BIT_FIELD (exp))
10366 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10367 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10375 bc_emit_bytecode (constP);
10376 r = output_constant_def (exp);
10377 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10379 #ifdef DEBUG_PRINT_CODE
10380 fputc ('\n', stderr);
10391 /* Most lvalues don't have components. */
10396 /* Emit a type code to be used by the runtime support in handling
10397 parameter passing. The type code consists of the machine mode
10398 plus the minimal alignment shifted left 8 bits. */
10401 bc_runtime_type_code (type)
10406 switch (TREE_CODE (type))
10412 case ENUMERAL_TYPE:
10416 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10428 return build_int_2 (val, 0);
10432 /* Generate constructor label */
10434 bc_gen_constr_label ()
10436 static int label_counter;
10437 static char label[20];
10439 sprintf (label, "*LR%d", label_counter++);
10441 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10445 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10446 expand the constructor data as static data, and push a pointer to it.
10447 The pointer is put in the pointer table and is retrieved by a constP
10448 bytecode instruction. We then loop and store each constructor member in
10449 the corresponding component. Finally, we return the original pointer on
10453 bc_expand_constructor (constr)
10457 HOST_WIDE_INT ptroffs;
10461 /* Literal constructors are handled as constants, whereas
10462 non-literals are evaluated and stored element by element
10463 into the data segment. */
10465 /* Allocate space in proper segment and push pointer to space on stack.
10468 l = bc_gen_constr_label ();
10470 if (TREE_CONSTANT (constr))
10474 bc_emit_const_labeldef (l);
10475 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10481 bc_emit_data_labeldef (l);
10482 bc_output_data_constructor (constr);
10486 /* Add reference to pointer table and recall pointer to stack;
10487 this code is common for both types of constructors: literals
10488 and non-literals. */
10490 ptroffs = bc_define_pointer (l);
10491 bc_emit_instruction (constP, ptroffs);
10493 /* This is all that has to be done if it's a literal. */
10494 if (TREE_CONSTANT (constr))
10498 /* At this point, we have the pointer to the structure on top of the stack.
10499 Generate sequences of store_memory calls for the constructor. */
10501 /* constructor type is structure */
10502 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10506 /* If the constructor has fewer fields than the structure,
10507 clear the whole structure first. */
10509 if (list_length (CONSTRUCTOR_ELTS (constr))
10510 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10512 bc_emit_instruction (duplicate);
10513 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10514 bc_emit_instruction (clearBLK);
10517 /* Store each element of the constructor into the corresponding
10518 field of TARGET. */
10520 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10522 register tree field = TREE_PURPOSE (elt);
10523 register enum machine_mode mode;
10528 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10529 mode = DECL_MODE (field);
10530 unsignedp = TREE_UNSIGNED (field);
10532 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10534 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10535 /* The alignment of TARGET is
10536 at least what its type requires. */
10538 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10539 int_size_in_bytes (TREE_TYPE (constr)));
10544 /* Constructor type is array */
10545 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10549 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10550 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10551 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10552 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10554 /* If the constructor has fewer fields than the structure,
10555 clear the whole structure first. */
10557 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10559 bc_emit_instruction (duplicate);
10560 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10561 bc_emit_instruction (clearBLK);
10565 /* Store each element of the constructor into the corresponding
10566 element of TARGET, determined by counting the elements. */
10568 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10570 elt = TREE_CHAIN (elt), i++)
10572 register enum machine_mode mode;
10577 mode = TYPE_MODE (elttype);
10578 bitsize = GET_MODE_BITSIZE (mode);
10579 unsignedp = TREE_UNSIGNED (elttype);
10581 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10582 /* * TYPE_SIZE_UNIT (elttype) */ );
10584 bc_store_field (elt, bitsize, bitpos, mode,
10585 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10586 /* The alignment of TARGET is
10587 at least what its type requires. */
10589 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10590 int_size_in_bytes (TREE_TYPE (constr)));
10597 /* Store the value of EXP (an expression tree) into member FIELD of
10598 structure at address on stack, which has type TYPE, mode MODE and
10599 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10602 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10603 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10606 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10607 value_mode, unsignedp, align, total_size)
10608 int bitsize, bitpos;
10609 enum machine_mode mode;
10610 tree field, exp, type;
10611 enum machine_mode value_mode;
10617 /* Expand expression and copy pointer */
10618 bc_expand_expr (exp);
10619 bc_emit_instruction (over);
10622 /* If the component is a bit field, we cannot use addressing to access
10623 it. Use bit-field techniques to store in it. */
10625 if (DECL_BIT_FIELD (field))
10627 bc_store_bit_field (bitpos, bitsize, unsignedp);
10631 /* Not bit field */
10633 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10635 /* Advance pointer to the desired member */
10637 bc_emit_instruction (addconstPSI, offset);
10640 bc_store_memory (type, field);
10645 /* Store SI/SU in bitfield */
10647 bc_store_bit_field (offset, size, unsignedp)
10648 int offset, size, unsignedp;
10650 /* Push bitfield offset and size */
10651 bc_push_offset_and_size (offset, size);
10654 bc_emit_instruction (sstoreBI);
10658 /* Load SI/SU from bitfield */
10660 bc_load_bit_field (offset, size, unsignedp)
10661 int offset, size, unsignedp;
10663 /* Push bitfield offset and size */
10664 bc_push_offset_and_size (offset, size);
10666 /* Load: sign-extend if signed, else zero-extend */
10667 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10671 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10672 (adjust stack pointer upwards), negative means add that number of
10673 levels (adjust the stack pointer downwards). Only positive values
10674 normally make sense. */
10677 bc_adjust_stack (nlevels)
10686 bc_emit_instruction (drop);
10689 bc_emit_instruction (drop);
10694 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10695 stack_depth -= nlevels;
10698 #if defined (VALIDATE_STACK_FOR_BC)
10699 VALIDATE_STACK_FOR_BC ();