1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
31 #include "insn-codes.h"
33 #include "insn-config.h"
36 #include "typeclass.h"
39 #include "bc-opcode.h"
40 #include "bc-typecd.h"
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
70 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
72 /* If this is nonzero, we do not bother generating VOLATILE
73 around volatile memory references, and we are willing to
74 output indirect addresses. If cse is to follow, we reject
75 indirect addresses so a useful potential cse is generated;
76 if it is used only once, instruction combination will produce
77 the same indirect address eventually. */
80 /* Nonzero to generate code for all the subroutines within an
81 expression before generating the upper levels of the expression.
82 Nowadays this is never zero. */
83 int do_preexpand_calls = 1;
85 /* Number of units that we should eventually pop off the stack.
86 These are the arguments to function calls that have already returned. */
87 int pending_stack_adjust;
89 /* Nonzero means stack pops must not be deferred, and deferred stack
90 pops must not be output. It is nonzero inside a function call,
91 inside a conditional expression, inside a statement expression,
92 and in other cases as well. */
93 int inhibit_defer_pop;
95 /* A list of all cleanups which belong to the arguments of
96 function calls being expanded by expand_call. */
97 tree cleanups_this_call;
99 /* When temporaries are created by TARGET_EXPRs, they are created at
100 this level of temp_slot_level, so that they can remain allocated
101 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
103 int target_temp_slot_level;
105 /* Nonzero means __builtin_saveregs has already been done in this function.
106 The value is the pseudoreg containing the value __builtin_saveregs
108 static rtx saveregs_value;
110 /* Similarly for __builtin_apply_args. */
111 static rtx apply_args_value;
113 /* This structure is used by move_by_pieces to describe the move to
116 struct move_by_pieces
126 int explicit_inc_from;
133 /* Used to generate bytecodes: keep track of size of local variables,
134 as well as depth of arithmetic stack. (Notice that variables are
135 stored on the machine's stack, not the arithmetic stack.) */
137 extern int local_vars_size;
138 extern int stack_depth;
139 extern int max_stack_depth;
140 extern struct obstack permanent_obstack;
143 static rtx enqueue_insn PROTO((rtx, rtx));
144 static int queued_subexp_p PROTO((rtx));
145 static void init_queue PROTO((void));
146 static void move_by_pieces PROTO((rtx, rtx, int, int));
147 static int move_by_pieces_ninsns PROTO((unsigned int, int));
148 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
149 struct move_by_pieces *));
150 static void store_constructor PROTO((tree, rtx));
151 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
152 enum machine_mode, int, int, int));
153 static int get_inner_unaligned_p PROTO((tree));
154 static tree save_noncopied_parts PROTO((tree, tree));
155 static tree init_noncopied_parts PROTO((tree, tree));
156 static int safe_from_p PROTO((rtx, tree));
157 static int fixed_type_p PROTO((tree));
158 static int get_pointer_alignment PROTO((tree, unsigned));
159 static tree string_constant PROTO((tree, tree *));
160 static tree c_strlen PROTO((tree));
161 static rtx expand_builtin PROTO((tree, rtx, rtx,
162 enum machine_mode, int));
163 static int apply_args_size PROTO((void));
164 static int apply_result_size PROTO((void));
165 static rtx result_vector PROTO((int, rtx));
166 static rtx expand_builtin_apply_args PROTO((void));
167 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
168 static void expand_builtin_return PROTO((rtx));
169 static rtx expand_increment PROTO((tree, int));
170 rtx bc_expand_increment PROTO((struct increment_operator *, tree));
171 tree bc_runtime_type_code PROTO((tree));
172 rtx bc_allocate_local PROTO((int, int));
173 void bc_store_memory PROTO((tree, tree));
174 tree bc_expand_component_address PROTO((tree));
175 tree bc_expand_address PROTO((tree));
176 void bc_expand_constructor PROTO((tree));
177 void bc_adjust_stack PROTO((int));
178 tree bc_canonicalize_array_ref PROTO((tree));
179 void bc_load_memory PROTO((tree, tree));
180 void bc_load_externaddr PROTO((rtx));
181 void bc_load_externaddr_id PROTO((tree, int));
182 void bc_load_localaddr PROTO((rtx));
183 void bc_load_parmaddr PROTO((rtx));
184 static void preexpand_calls PROTO((tree));
185 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
186 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
187 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
188 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
189 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
190 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
191 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
192 static tree defer_cleanups_to PROTO((tree));
193 extern void (*interim_eh_hook) PROTO((tree));
195 /* Record for each mode whether we can move a register directly to or
196 from an object of that mode in memory. If we can't, we won't try
197 to use that mode directly when accessing a field of that mode. */
199 static char direct_load[NUM_MACHINE_MODES];
200 static char direct_store[NUM_MACHINE_MODES];
202 /* MOVE_RATIO is the number of move instructions that is better than
206 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
209 /* A value of around 6 would minimize code size; infinity would minimize
211 #define MOVE_RATIO 15
215 /* This array records the insn_code of insns to perform block moves. */
216 enum insn_code movstr_optab[NUM_MACHINE_MODES];
218 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
224 /* Register mappings for target machines without register windows. */
225 #ifndef INCOMING_REGNO
226 #define INCOMING_REGNO(OUT) (OUT)
228 #ifndef OUTGOING_REGNO
229 #define OUTGOING_REGNO(IN) (IN)
232 /* Maps used to convert modes to const, load, and store bytecodes. */
233 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
234 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
235 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
237 /* Initialize maps used to convert modes to const, load, and store
240 bc_init_mode_to_opcode_maps ()
244 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
245 mode_to_const_map[mode] =
246 mode_to_load_map[mode] =
247 mode_to_store_map[mode] = neverneverland;
249 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
250 mode_to_const_map[(int) SYM] = CONST; \
251 mode_to_load_map[(int) SYM] = LOAD; \
252 mode_to_store_map[(int) SYM] = STORE;
254 #include "modemap.def"
258 /* This is run once per compilation to set up which modes can be used
259 directly in memory and to initialize the block move optab. */
265 enum machine_mode mode;
266 /* Try indexing by frame ptr and try by stack ptr.
267 It is known that on the Convex the stack ptr isn't a valid index.
268 With luck, one or the other is valid on any machine. */
269 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
270 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
273 insn = emit_insn (gen_rtx (SET, 0, 0));
274 pat = PATTERN (insn);
276 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
277 mode = (enum machine_mode) ((int) mode + 1))
283 direct_load[(int) mode] = direct_store[(int) mode] = 0;
284 PUT_MODE (mem, mode);
285 PUT_MODE (mem1, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 if (! HARD_REGNO_MODE_OK (regno, mode))
298 reg = gen_rtx (REG, mode, regno);
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
325 /* This is run at the start of compiling a function. */
332 pending_stack_adjust = 0;
333 inhibit_defer_pop = 0;
334 cleanups_this_call = 0;
336 apply_args_value = 0;
340 /* Save all variables describing the current status into the structure *P.
341 This is used before starting a nested function. */
347 /* Instead of saving the postincrement queue, empty it. */
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
352 p->cleanups_this_call = cleanups_this_call;
353 p->saveregs_value = saveregs_value;
354 p->apply_args_value = apply_args_value;
355 p->forced_labels = forced_labels;
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
361 apply_args_value = 0;
365 /* Restore all variables describing the current status from the structure *P.
366 This is used after a nested function. */
369 restore_expr_status (p)
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
374 cleanups_this_call = p->cleanups_this_call;
375 saveregs_value = p->saveregs_value;
376 apply_args_value = p->apply_args_value;
377 forced_labels = p->forced_labels;
380 /* Manage the queue of increment instructions to be output
381 for POSTINCREMENT_EXPR expressions, etc. */
383 static rtx pending_chain;
385 /* Queue up to increment (or change) VAR later. BODY says how:
386 BODY should be the same thing you would pass to emit_insn
387 to increment right away. It will go to emit_insn later on.
389 The value is a QUEUED expression to be used in place of VAR
390 where you want to guarantee the pre-incrementation value of VAR. */
393 enqueue_insn (var, body)
396 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
397 var, NULL_RTX, NULL_RTX, body, pending_chain);
398 return pending_chain;
401 /* Use protect_from_queue to convert a QUEUED expression
402 into something that you can put immediately into an instruction.
403 If the queued incrementation has not happened yet,
404 protect_from_queue returns the variable itself.
405 If the incrementation has happened, protect_from_queue returns a temp
406 that contains a copy of the old value of the variable.
408 Any time an rtx which might possibly be a QUEUED is to be put
409 into an instruction, it must be passed through protect_from_queue first.
410 QUEUED expressions are not meaningful in instructions.
412 Do not pass a value through protect_from_queue and then hold
413 on to it for a while before putting it in an instruction!
414 If the queue is flushed in between, incorrect code will result. */
417 protect_from_queue (x, modify)
421 register RTX_CODE code = GET_CODE (x);
423 #if 0 /* A QUEUED can hang around after the queue is forced out. */
424 /* Shortcut for most common case. */
425 if (pending_chain == 0)
431 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
432 use of autoincrement. Make a copy of the contents of the memory
433 location rather than a copy of the address, but not if the value is
434 of mode BLKmode. Don't modify X in place since it might be
436 if (code == MEM && GET_MODE (x) != BLKmode
437 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
439 register rtx y = XEXP (x, 0);
440 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
442 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
443 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
444 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
448 register rtx temp = gen_reg_rtx (GET_MODE (new));
449 emit_insn_before (gen_move_insn (temp, new),
455 /* Otherwise, recursively protect the subexpressions of all
456 the kinds of rtx's that can contain a QUEUED. */
459 rtx tem = protect_from_queue (XEXP (x, 0), 0);
460 if (tem != XEXP (x, 0))
466 else if (code == PLUS || code == MULT)
468 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
469 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
470 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
479 /* If the increment has not happened, use the variable itself. */
480 if (QUEUED_INSN (x) == 0)
481 return QUEUED_VAR (x);
482 /* If the increment has happened and a pre-increment copy exists,
484 if (QUEUED_COPY (x) != 0)
485 return QUEUED_COPY (x);
486 /* The increment has happened but we haven't set up a pre-increment copy.
487 Set one up now, and use it. */
488 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
489 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
491 return QUEUED_COPY (x);
494 /* Return nonzero if X contains a QUEUED expression:
495 if it contains anything that will be altered by a queued increment.
496 We handle only combinations of MEM, PLUS, MINUS and MULT operators
497 since memory addresses generally contain only those. */
503 register enum rtx_code code = GET_CODE (x);
509 return queued_subexp_p (XEXP (x, 0));
513 return queued_subexp_p (XEXP (x, 0))
514 || queued_subexp_p (XEXP (x, 1));
519 /* Perform all the pending incrementations. */
525 while (p = pending_chain)
527 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
528 pending_chain = QUEUED_NEXT (p);
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
545 convert_move (to, from, unsignedp)
546 register rtx to, from;
549 enum machine_mode to_mode = GET_MODE (to);
550 enum machine_mode from_mode = GET_MODE (from);
551 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
559 to = protect_from_queue (to, 1);
560 from = protect_from_queue (from, 0);
562 if (to_real != from_real)
565 /* If FROM is a SUBREG that indicates that we have already done at least
566 the required extension, strip it. We don't handle such SUBREGs as
569 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
570 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
571 >= GET_MODE_SIZE (to_mode))
572 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
573 from = gen_lowpart (to_mode, from), from_mode = to_mode;
575 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
578 if (to_mode == from_mode
579 || (from_mode == VOIDmode && CONSTANT_P (from)))
581 emit_move_insn (to, from);
589 #ifdef HAVE_extendqfhf2
590 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
592 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
596 #ifdef HAVE_extendqfsf2
597 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
599 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
603 #ifdef HAVE_extendqfdf2
604 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
606 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
610 #ifdef HAVE_extendqfxf2
611 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
613 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
617 #ifdef HAVE_extendqftf2
618 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
620 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
625 #ifdef HAVE_extendhftqf2
626 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
628 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
633 #ifdef HAVE_extendhfsf2
634 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
636 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
640 #ifdef HAVE_extendhfdf2
641 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
643 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
647 #ifdef HAVE_extendhfxf2
648 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
650 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
654 #ifdef HAVE_extendhftf2
655 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
657 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
662 #ifdef HAVE_extendsfdf2
663 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
665 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
669 #ifdef HAVE_extendsfxf2
670 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
672 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
676 #ifdef HAVE_extendsftf2
677 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
679 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
683 #ifdef HAVE_extenddfxf2
684 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
686 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
690 #ifdef HAVE_extenddftf2
691 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
693 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
698 #ifdef HAVE_trunchfqf2
699 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
701 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
705 #ifdef HAVE_truncsfqf2
706 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
708 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
712 #ifdef HAVE_truncdfqf2
713 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
715 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
719 #ifdef HAVE_truncxfqf2
720 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
722 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
726 #ifdef HAVE_trunctfqf2
727 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
729 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
734 #ifdef HAVE_trunctqfhf2
735 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
737 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
741 #ifdef HAVE_truncsfhf2
742 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
744 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
748 #ifdef HAVE_truncdfhf2
749 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
751 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
755 #ifdef HAVE_truncxfhf2
756 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
758 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
762 #ifdef HAVE_trunctfhf2
763 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
765 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
769 #ifdef HAVE_truncdfsf2
770 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
772 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
776 #ifdef HAVE_truncxfsf2
777 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
779 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
783 #ifdef HAVE_trunctfsf2
784 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
786 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
790 #ifdef HAVE_truncxfdf2
791 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
793 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
797 #ifdef HAVE_trunctfdf2
798 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
800 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
812 libcall = extendsfdf2_libfunc;
816 libcall = extendsfxf2_libfunc;
820 libcall = extendsftf2_libfunc;
829 libcall = truncdfsf2_libfunc;
833 libcall = extenddfxf2_libfunc;
837 libcall = extenddftf2_libfunc;
846 libcall = truncxfsf2_libfunc;
850 libcall = truncxfdf2_libfunc;
859 libcall = trunctfsf2_libfunc;
863 libcall = trunctfdf2_libfunc;
869 if (libcall == (rtx) 0)
870 /* This conversion is not implemented yet. */
873 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
875 emit_move_insn (to, value);
879 /* Now both modes are integers. */
881 /* Handle expanding beyond a word. */
882 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
883 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
890 enum machine_mode lowpart_mode;
891 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
893 /* Try converting directly if the insn is supported. */
894 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
897 /* If FROM is a SUBREG, put it into a register. Do this
898 so that we always generate the same set of insns for
899 better cse'ing; if an intermediate assignment occurred,
900 we won't be doing the operation directly on the SUBREG. */
901 if (optimize > 0 && GET_CODE (from) == SUBREG)
902 from = force_reg (from_mode, from);
903 emit_unop_insn (code, to, from, equiv_code);
906 /* Next, try converting via full word. */
907 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
908 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
909 != CODE_FOR_nothing))
911 if (GET_CODE (to) == REG)
912 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
913 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
914 emit_unop_insn (code, to,
915 gen_lowpart (word_mode, to), equiv_code);
919 /* No special multiword conversion insn; do it by hand. */
922 /* Since we will turn this into a no conflict block, we must ensure
923 that the source does not overlap the target. */
925 if (reg_overlap_mentioned_p (to, from))
926 from = force_reg (from_mode, from);
928 /* Get a copy of FROM widened to a word, if necessary. */
929 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
930 lowpart_mode = word_mode;
932 lowpart_mode = from_mode;
934 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
936 lowpart = gen_lowpart (lowpart_mode, to);
937 emit_move_insn (lowpart, lowfrom);
939 /* Compute the value to put in each remaining word. */
941 fill_value = const0_rtx;
946 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
947 && STORE_FLAG_VALUE == -1)
949 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
951 fill_value = gen_reg_rtx (word_mode);
952 emit_insn (gen_slt (fill_value));
958 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
959 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
961 fill_value = convert_to_mode (word_mode, fill_value, 1);
965 /* Fill the remaining words. */
966 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
968 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
969 rtx subword = operand_subword (to, index, 1, to_mode);
974 if (fill_value != subword)
975 emit_move_insn (subword, fill_value);
978 insns = get_insns ();
981 emit_no_conflict_block (insns, to, from, NULL_RTX,
982 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
986 /* Truncating multi-word to a word or less. */
987 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
988 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
990 if (!((GET_CODE (from) == MEM
991 && ! MEM_VOLATILE_P (from)
992 && direct_load[(int) to_mode]
993 && ! mode_dependent_address_p (XEXP (from, 0)))
994 || GET_CODE (from) == REG
995 || GET_CODE (from) == SUBREG))
996 from = force_reg (from_mode, from);
997 convert_move (to, gen_lowpart (word_mode, from), 0);
1001 /* Handle pointer conversion */ /* SPEE 900220 */
1002 if (to_mode == PSImode)
1004 if (from_mode != SImode)
1005 from = convert_to_mode (SImode, from, unsignedp);
1007 #ifdef HAVE_truncsipsi2
1008 if (HAVE_truncsipsi2)
1010 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1013 #endif /* HAVE_truncsipsi2 */
1017 if (from_mode == PSImode)
1019 if (to_mode != SImode)
1021 from = convert_to_mode (SImode, from, unsignedp);
1026 #ifdef HAVE_extendpsisi2
1027 if (HAVE_extendpsisi2)
1029 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1032 #endif /* HAVE_extendpsisi2 */
1037 if (to_mode == PDImode)
1039 if (from_mode != DImode)
1040 from = convert_to_mode (DImode, from, unsignedp);
1042 #ifdef HAVE_truncdipdi2
1043 if (HAVE_truncdipdi2)
1045 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1048 #endif /* HAVE_truncdipdi2 */
1052 if (from_mode == PDImode)
1054 if (to_mode != DImode)
1056 from = convert_to_mode (DImode, from, unsignedp);
1061 #ifdef HAVE_extendpdidi2
1062 if (HAVE_extendpdidi2)
1064 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1067 #endif /* HAVE_extendpdidi2 */
1072 /* Now follow all the conversions between integers
1073 no more than a word long. */
1075 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1076 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1077 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1078 GET_MODE_BITSIZE (from_mode)))
1080 if (!((GET_CODE (from) == MEM
1081 && ! MEM_VOLATILE_P (from)
1082 && direct_load[(int) to_mode]
1083 && ! mode_dependent_address_p (XEXP (from, 0)))
1084 || GET_CODE (from) == REG
1085 || GET_CODE (from) == SUBREG))
1086 from = force_reg (from_mode, from);
1087 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1088 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1089 from = copy_to_reg (from);
1090 emit_move_insn (to, gen_lowpart (to_mode, from));
1094 /* Handle extension. */
1095 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1097 /* Convert directly if that works. */
1098 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1099 != CODE_FOR_nothing)
1101 emit_unop_insn (code, to, from, equiv_code);
1106 enum machine_mode intermediate;
1108 /* Search for a mode to convert via. */
1109 for (intermediate = from_mode; intermediate != VOIDmode;
1110 intermediate = GET_MODE_WIDER_MODE (intermediate))
1111 if (((can_extend_p (to_mode, intermediate, unsignedp)
1112 != CODE_FOR_nothing)
1113 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1114 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1115 && (can_extend_p (intermediate, from_mode, unsignedp)
1116 != CODE_FOR_nothing))
1118 convert_move (to, convert_to_mode (intermediate, from,
1119 unsignedp), unsignedp);
1123 /* No suitable intermediate mode. */
1128 /* Support special truncate insns for certain modes. */
1130 if (from_mode == DImode && to_mode == SImode)
1132 #ifdef HAVE_truncdisi2
1133 if (HAVE_truncdisi2)
1135 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1143 if (from_mode == DImode && to_mode == HImode)
1145 #ifdef HAVE_truncdihi2
1146 if (HAVE_truncdihi2)
1148 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1156 if (from_mode == DImode && to_mode == QImode)
1158 #ifdef HAVE_truncdiqi2
1159 if (HAVE_truncdiqi2)
1161 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1169 if (from_mode == SImode && to_mode == HImode)
1171 #ifdef HAVE_truncsihi2
1172 if (HAVE_truncsihi2)
1174 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1182 if (from_mode == SImode && to_mode == QImode)
1184 #ifdef HAVE_truncsiqi2
1185 if (HAVE_truncsiqi2)
1187 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1195 if (from_mode == HImode && to_mode == QImode)
1197 #ifdef HAVE_trunchiqi2
1198 if (HAVE_trunchiqi2)
1200 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1208 if (from_mode == TImode && to_mode == DImode)
1210 #ifdef HAVE_trunctidi2
1211 if (HAVE_trunctidi2)
1213 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1221 if (from_mode == TImode && to_mode == SImode)
1223 #ifdef HAVE_trunctisi2
1224 if (HAVE_trunctisi2)
1226 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1234 if (from_mode == TImode && to_mode == HImode)
1236 #ifdef HAVE_trunctihi2
1237 if (HAVE_trunctihi2)
1239 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1243 convert_move (to, force_reg (from_mode, from), unsignedp);
1247 if (from_mode == TImode && to_mode == QImode)
1249 #ifdef HAVE_trunctiqi2
1250 if (HAVE_trunctiqi2)
1252 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1256 convert_move (to, force_reg (from_mode, from), unsignedp);
1260 /* Handle truncation of volatile memrefs, and so on;
1261 the things that couldn't be truncated directly,
1262 and for which there was no special instruction. */
1263 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1265 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1266 emit_move_insn (to, temp);
1270 /* Mode combination is not recognized. */
1274 /* Return an rtx for a value that would result
1275 from converting X to mode MODE.
1276 Both X and MODE may be floating, or both integer.
1277 UNSIGNEDP is nonzero if X is an unsigned value.
1278 This can be done by referring to a part of X in place
1279 or by copying to a new temporary with conversion.
1281 This function *must not* call protect_from_queue
1282 except when putting X into an insn (in which case convert_move does it). */
1285 convert_to_mode (mode, x, unsignedp)
1286 enum machine_mode mode;
1290 return convert_modes (mode, VOIDmode, x, unsignedp);
1293 /* Return an rtx for a value that would result
1294 from converting X from mode OLDMODE to mode MODE.
1295 Both modes may be floating, or both integer.
1296 UNSIGNEDP is nonzero if X is an unsigned value.
1298 This can be done by referring to a part of X in place
1299 or by copying to a new temporary with conversion.
1301 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1303 This function *must not* call protect_from_queue
1304 except when putting X into an insn (in which case convert_move does it). */
1307 convert_modes (mode, oldmode, x, unsignedp)
1308 enum machine_mode mode, oldmode;
1314 /* If FROM is a SUBREG that indicates that we have already done at least
1315 the required extension, strip it. */
1317 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1318 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1319 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1320 x = gen_lowpart (mode, x);
1322 if (GET_MODE (x) != VOIDmode)
1323 oldmode = GET_MODE (x);
1325 if (mode == oldmode)
1328 /* There is one case that we must handle specially: If we are converting
1329 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1330 we are to interpret the constant as unsigned, gen_lowpart will do
1331 the wrong if the constant appears negative. What we want to do is
1332 make the high-order word of the constant zero, not all ones. */
1334 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1335 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1336 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1337 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1339 /* We can do this with a gen_lowpart if both desired and current modes
1340 are integer, and this is either a constant integer, a register, or a
1341 non-volatile MEM. Except for the constant case where MODE is no
1342 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1344 if ((GET_CODE (x) == CONST_INT
1345 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1346 || (GET_MODE_CLASS (mode) == MODE_INT
1347 && GET_MODE_CLASS (oldmode) == MODE_INT
1348 && (GET_CODE (x) == CONST_DOUBLE
1349 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1350 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1351 && direct_load[(int) mode])
1352 || (GET_CODE (x) == REG
1353 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1354 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1356 /* ?? If we don't know OLDMODE, we have to assume here that
1357 X does not need sign- or zero-extension. This may not be
1358 the case, but it's the best we can do. */
1359 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1360 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1362 HOST_WIDE_INT val = INTVAL (x);
1363 int width = GET_MODE_BITSIZE (oldmode);
1365 /* We must sign or zero-extend in this case. Start by
1366 zero-extending, then sign extend if we need to. */
1367 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1369 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1370 val |= (HOST_WIDE_INT) (-1) << width;
1372 return GEN_INT (val);
1375 return gen_lowpart (mode, x);
1378 temp = gen_reg_rtx (mode);
1379 convert_move (temp, x, unsignedp);
1383 /* Generate several move instructions to copy LEN bytes
1384 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1385 The caller must pass FROM and TO
1386 through protect_from_queue before calling.
1387 ALIGN (in bytes) is maximum alignment we can assume. */
1390 move_by_pieces (to, from, len, align)
1394 struct move_by_pieces data;
1395 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1396 int max_size = MOVE_MAX + 1;
1399 data.to_addr = to_addr;
1400 data.from_addr = from_addr;
1404 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1405 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1407 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1408 || GET_CODE (from_addr) == POST_INC
1409 || GET_CODE (from_addr) == POST_DEC);
1411 data.explicit_inc_from = 0;
1412 data.explicit_inc_to = 0;
1414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1415 if (data.reverse) data.offset = len;
1418 data.to_struct = MEM_IN_STRUCT_P (to);
1419 data.from_struct = MEM_IN_STRUCT_P (from);
1421 /* If copying requires more than two move insns,
1422 copy addresses to registers (to make displacements shorter)
1423 and use post-increment if available. */
1424 if (!(data.autinc_from && data.autinc_to)
1425 && move_by_pieces_ninsns (len, align) > 2)
1427 #ifdef HAVE_PRE_DECREMENT
1428 if (data.reverse && ! data.autinc_from)
1430 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1431 data.autinc_from = 1;
1432 data.explicit_inc_from = -1;
1435 #ifdef HAVE_POST_INCREMENT
1436 if (! data.autinc_from)
1438 data.from_addr = copy_addr_to_reg (from_addr);
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = 1;
1443 if (!data.autinc_from && CONSTANT_P (from_addr))
1444 data.from_addr = copy_addr_to_reg (from_addr);
1445 #ifdef HAVE_PRE_DECREMENT
1446 if (data.reverse && ! data.autinc_to)
1448 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1450 data.explicit_inc_to = -1;
1453 #ifdef HAVE_POST_INCREMENT
1454 if (! data.reverse && ! data.autinc_to)
1456 data.to_addr = copy_addr_to_reg (to_addr);
1458 data.explicit_inc_to = 1;
1461 if (!data.autinc_to && CONSTANT_P (to_addr))
1462 data.to_addr = copy_addr_to_reg (to_addr);
1465 if (! SLOW_UNALIGNED_ACCESS
1466 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1469 /* First move what we can in the largest integer mode, then go to
1470 successively smaller modes. */
1472 while (max_size > 1)
1474 enum machine_mode mode = VOIDmode, tmode;
1475 enum insn_code icode;
1477 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1478 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1479 if (GET_MODE_SIZE (tmode) < max_size)
1482 if (mode == VOIDmode)
1485 icode = mov_optab->handlers[(int) mode].insn_code;
1486 if (icode != CODE_FOR_nothing
1487 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1488 GET_MODE_SIZE (mode)))
1489 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1491 max_size = GET_MODE_SIZE (mode);
1494 /* The code above should have handled everything. */
1499 /* Return number of insns required to move L bytes by pieces.
1500 ALIGN (in bytes) is maximum alignment we can assume. */
1503 move_by_pieces_ninsns (l, align)
1507 register int n_insns = 0;
1508 int max_size = MOVE_MAX + 1;
1510 if (! SLOW_UNALIGNED_ACCESS
1511 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1514 while (max_size > 1)
1516 enum machine_mode mode = VOIDmode, tmode;
1517 enum insn_code icode;
1519 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1520 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1521 if (GET_MODE_SIZE (tmode) < max_size)
1524 if (mode == VOIDmode)
1527 icode = mov_optab->handlers[(int) mode].insn_code;
1528 if (icode != CODE_FOR_nothing
1529 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1530 GET_MODE_SIZE (mode)))
1531 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1533 max_size = GET_MODE_SIZE (mode);
1539 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1540 with move instructions for mode MODE. GENFUN is the gen_... function
1541 to make a move insn for that mode. DATA has all the other info. */
1544 move_by_pieces_1 (genfun, mode, data)
1546 enum machine_mode mode;
1547 struct move_by_pieces *data;
1549 register int size = GET_MODE_SIZE (mode);
1550 register rtx to1, from1;
1552 while (data->len >= size)
1554 if (data->reverse) data->offset -= size;
1556 to1 = (data->autinc_to
1557 ? gen_rtx (MEM, mode, data->to_addr)
1558 : change_address (data->to, mode,
1559 plus_constant (data->to_addr, data->offset)));
1560 MEM_IN_STRUCT_P (to1) = data->to_struct;
1563 ? gen_rtx (MEM, mode, data->from_addr)
1564 : change_address (data->from, mode,
1565 plus_constant (data->from_addr, data->offset)));
1566 MEM_IN_STRUCT_P (from1) = data->from_struct;
1568 #ifdef HAVE_PRE_DECREMENT
1569 if (data->explicit_inc_to < 0)
1570 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1571 if (data->explicit_inc_from < 0)
1572 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1575 emit_insn ((*genfun) (to1, from1));
1576 #ifdef HAVE_POST_INCREMENT
1577 if (data->explicit_inc_to > 0)
1578 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1579 if (data->explicit_inc_from > 0)
1580 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1583 if (! data->reverse) data->offset += size;
1589 /* Emit code to move a block Y to a block X.
1590 This may be done with string-move instructions,
1591 with multiple scalar move instructions, or with a library call.
1593 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1595 SIZE is an rtx that says how long they are.
1596 ALIGN is the maximum alignment we can assume they have,
1597 measured in bytes. */
1600 emit_block_move (x, y, size, align)
1605 if (GET_MODE (x) != BLKmode)
1608 if (GET_MODE (y) != BLKmode)
1611 x = protect_from_queue (x, 1);
1612 y = protect_from_queue (y, 0);
1613 size = protect_from_queue (size, 0);
1615 if (GET_CODE (x) != MEM)
1617 if (GET_CODE (y) != MEM)
1622 if (GET_CODE (size) == CONST_INT
1623 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1624 move_by_pieces (x, y, INTVAL (size), align);
1627 /* Try the most limited insn first, because there's no point
1628 including more than one in the machine description unless
1629 the more limited one has some advantage. */
1631 rtx opalign = GEN_INT (align);
1632 enum machine_mode mode;
1634 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1635 mode = GET_MODE_WIDER_MODE (mode))
1637 enum insn_code code = movstr_optab[(int) mode];
1639 if (code != CODE_FOR_nothing
1640 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1641 here because if SIZE is less than the mode mask, as it is
1642 returned by the macro, it will definitely be less than the
1643 actual mode mask. */
1644 && ((GET_CODE (size) == CONST_INT
1645 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1646 <= GET_MODE_MASK (mode)))
1647 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1648 && (insn_operand_predicate[(int) code][0] == 0
1649 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1650 && (insn_operand_predicate[(int) code][1] == 0
1651 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1652 && (insn_operand_predicate[(int) code][3] == 0
1653 || (*insn_operand_predicate[(int) code][3]) (opalign,
1657 rtx last = get_last_insn ();
1660 op2 = convert_to_mode (mode, size, 1);
1661 if (insn_operand_predicate[(int) code][2] != 0
1662 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1663 op2 = copy_to_mode_reg (mode, op2);
1665 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1672 delete_insns_since (last);
1676 #ifdef TARGET_MEM_FUNCTIONS
1677 emit_library_call (memcpy_libfunc, 0,
1678 VOIDmode, 3, XEXP (x, 0), Pmode,
1680 convert_to_mode (TYPE_MODE (sizetype), size,
1681 TREE_UNSIGNED (sizetype)),
1682 TYPE_MODE (sizetype));
1684 emit_library_call (bcopy_libfunc, 0,
1685 VOIDmode, 3, XEXP (y, 0), Pmode,
1687 convert_to_mode (TYPE_MODE (sizetype), size,
1688 TREE_UNSIGNED (sizetype)),
1689 TYPE_MODE (sizetype));
1694 /* Copy all or part of a value X into registers starting at REGNO.
1695 The number of registers to be filled is NREGS. */
1698 move_block_to_reg (regno, x, nregs, mode)
1702 enum machine_mode mode;
1710 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1711 x = validize_mem (force_const_mem (mode, x));
1713 /* See if the machine can do this with a load multiple insn. */
1714 #ifdef HAVE_load_multiple
1715 if (HAVE_load_multiple)
1717 last = get_last_insn ();
1718 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1726 delete_insns_since (last);
1730 for (i = 0; i < nregs; i++)
1731 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1732 operand_subword_force (x, i, mode));
1735 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1736 The number of registers to be filled is NREGS. SIZE indicates the number
1737 of bytes in the object X. */
1741 move_block_from_reg (regno, x, nregs, size)
1750 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1751 to the left before storing to memory. */
1752 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1754 rtx tem = operand_subword (x, 0, 1, BLKmode);
1760 shift = expand_shift (LSHIFT_EXPR, word_mode,
1761 gen_rtx (REG, word_mode, regno),
1762 build_int_2 ((UNITS_PER_WORD - size)
1763 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1764 emit_move_insn (tem, shift);
1768 /* See if the machine can do this with a store multiple insn. */
1769 #ifdef HAVE_store_multiple
1770 if (HAVE_store_multiple)
1772 last = get_last_insn ();
1773 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1781 delete_insns_since (last);
1785 for (i = 0; i < nregs; i++)
1787 rtx tem = operand_subword (x, i, 1, BLKmode);
1792 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1796 /* Add a USE expression for REG to the (possibly empty) list pointed
1797 to by CALL_FUSAGE. REG must denote a hard register. */
1800 use_reg (call_fusage, reg)
1801 rtx *call_fusage, reg;
1803 if (GET_CODE (reg) != REG
1804 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1808 = gen_rtx (EXPR_LIST, VOIDmode,
1809 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1812 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1813 starting at REGNO. All of these registers must be hard registers. */
1816 use_regs (call_fusage, regno, nregs)
1823 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1826 for (i = 0; i < nregs; i++)
1827 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1830 /* Write zeros through the storage of OBJECT.
1831 If OBJECT has BLKmode, SIZE is its length in bytes. */
1834 clear_storage (object, size)
1838 if (GET_MODE (object) == BLKmode)
1840 #ifdef TARGET_MEM_FUNCTIONS
1841 emit_library_call (memset_libfunc, 0,
1843 XEXP (object, 0), Pmode, const0_rtx, ptr_mode,
1844 GEN_INT (size), ptr_mode);
1846 emit_library_call (bzero_libfunc, 0,
1848 XEXP (object, 0), Pmode,
1849 GEN_INT (size), ptr_mode);
1853 emit_move_insn (object, const0_rtx);
1856 /* Generate code to copy Y into X.
1857 Both Y and X must have the same mode, except that
1858 Y can be a constant with VOIDmode.
1859 This mode cannot be BLKmode; use emit_block_move for that.
1861 Return the last instruction emitted. */
1864 emit_move_insn (x, y)
1867 enum machine_mode mode = GET_MODE (x);
1869 x = protect_from_queue (x, 1);
1870 y = protect_from_queue (y, 0);
1872 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1875 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1876 y = force_const_mem (mode, y);
1878 /* If X or Y are memory references, verify that their addresses are valid
1880 if (GET_CODE (x) == MEM
1881 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1882 && ! push_operand (x, GET_MODE (x)))
1884 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1885 x = change_address (x, VOIDmode, XEXP (x, 0));
1887 if (GET_CODE (y) == MEM
1888 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1890 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1891 y = change_address (y, VOIDmode, XEXP (y, 0));
1893 if (mode == BLKmode)
1896 return emit_move_insn_1 (x, y);
1899 /* Low level part of emit_move_insn.
1900 Called just like emit_move_insn, but assumes X and Y
1901 are basically valid. */
1904 emit_move_insn_1 (x, y)
1907 enum machine_mode mode = GET_MODE (x);
1908 enum machine_mode submode;
1909 enum mode_class class = GET_MODE_CLASS (mode);
1912 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1914 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1916 /* Expand complex moves by moving real part and imag part, if possible. */
1917 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1918 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
1920 (class == MODE_COMPLEX_INT
1921 ? MODE_INT : MODE_FLOAT),
1923 && (mov_optab->handlers[(int) submode].insn_code
1924 != CODE_FOR_nothing))
1926 /* Don't split destination if it is a stack push. */
1927 int stack = push_operand (x, GET_MODE (x));
1930 /* If this is a stack, push the highpart first, so it
1931 will be in the argument order.
1933 In that case, change_address is used only to convert
1934 the mode, not to change the address. */
1937 /* Note that the real part always precedes the imag part in memory
1938 regardless of machine's endianness. */
1939 #ifdef STACK_GROWS_DOWNWARD
1940 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1941 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1942 gen_imagpart (submode, y)));
1943 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1944 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1945 gen_realpart (submode, y)));
1947 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1948 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1949 gen_realpart (submode, y)));
1950 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1951 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1952 gen_imagpart (submode, y)));
1957 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1958 (gen_realpart (submode, x), gen_realpart (submode, y)));
1959 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1960 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
1963 return get_last_insn ();
1966 /* This will handle any multi-word mode that lacks a move_insn pattern.
1967 However, you will get better code if you define such patterns,
1968 even if they must turn into multiple assembler instructions. */
1969 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1974 #ifdef PUSH_ROUNDING
1976 /* If X is a push on the stack, do the push now and replace
1977 X with a reference to the stack pointer. */
1978 if (push_operand (x, GET_MODE (x)))
1980 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
1981 x = change_address (x, VOIDmode, stack_pointer_rtx);
1986 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1989 rtx xpart = operand_subword (x, i, 1, mode);
1990 rtx ypart = operand_subword (y, i, 1, mode);
1992 /* If we can't get a part of Y, put Y into memory if it is a
1993 constant. Otherwise, force it into a register. If we still
1994 can't get a part of Y, abort. */
1995 if (ypart == 0 && CONSTANT_P (y))
1997 y = force_const_mem (mode, y);
1998 ypart = operand_subword (y, i, 1, mode);
2000 else if (ypart == 0)
2001 ypart = operand_subword_force (y, i, mode);
2003 if (xpart == 0 || ypart == 0)
2006 last_insn = emit_move_insn (xpart, ypart);
2015 /* Pushing data onto the stack. */
2017 /* Push a block of length SIZE (perhaps variable)
2018 and return an rtx to address the beginning of the block.
2019 Note that it is not possible for the value returned to be a QUEUED.
2020 The value may be virtual_outgoing_args_rtx.
2022 EXTRA is the number of bytes of padding to push in addition to SIZE.
2023 BELOW nonzero means this padding comes at low addresses;
2024 otherwise, the padding comes at high addresses. */
2027 push_block (size, extra, below)
2033 size = convert_modes (Pmode, ptr_mode, size, 1);
2034 if (CONSTANT_P (size))
2035 anti_adjust_stack (plus_constant (size, extra));
2036 else if (GET_CODE (size) == REG && extra == 0)
2037 anti_adjust_stack (size);
2040 rtx temp = copy_to_mode_reg (Pmode, size);
2042 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2043 temp, 0, OPTAB_LIB_WIDEN);
2044 anti_adjust_stack (temp);
2047 #ifdef STACK_GROWS_DOWNWARD
2048 temp = virtual_outgoing_args_rtx;
2049 if (extra != 0 && below)
2050 temp = plus_constant (temp, extra);
2052 if (GET_CODE (size) == CONST_INT)
2053 temp = plus_constant (virtual_outgoing_args_rtx,
2054 - INTVAL (size) - (below ? 0 : extra));
2055 else if (extra != 0 && !below)
2056 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2057 negate_rtx (Pmode, plus_constant (size, extra)));
2059 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2060 negate_rtx (Pmode, size));
2063 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2069 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2072 /* Generate code to push X onto the stack, assuming it has mode MODE and
2074 MODE is redundant except when X is a CONST_INT (since they don't
2076 SIZE is an rtx for the size of data to be copied (in bytes),
2077 needed only if X is BLKmode.
2079 ALIGN (in bytes) is maximum alignment we can assume.
2081 If PARTIAL and REG are both nonzero, then copy that many of the first
2082 words of X into registers starting with REG, and push the rest of X.
2083 The amount of space pushed is decreased by PARTIAL words,
2084 rounded *down* to a multiple of PARM_BOUNDARY.
2085 REG must be a hard register in this case.
2086 If REG is zero but PARTIAL is not, take any all others actions for an
2087 argument partially in registers, but do not actually load any
2090 EXTRA is the amount in bytes of extra space to leave next to this arg.
2091 This is ignored if an argument block has already been allocated.
2093 On a machine that lacks real push insns, ARGS_ADDR is the address of
2094 the bottom of the argument block for this call. We use indexing off there
2095 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2096 argument block has not been preallocated.
2098 ARGS_SO_FAR is the size of args previously pushed for this call. */
2101 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2102 args_addr, args_so_far)
2104 enum machine_mode mode;
2115 enum direction stack_direction
2116 #ifdef STACK_GROWS_DOWNWARD
2122 /* Decide where to pad the argument: `downward' for below,
2123 `upward' for above, or `none' for don't pad it.
2124 Default is below for small data on big-endian machines; else above. */
2125 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2127 /* Invert direction if stack is post-update. */
2128 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2129 if (where_pad != none)
2130 where_pad = (where_pad == downward ? upward : downward);
2132 xinner = x = protect_from_queue (x, 0);
2134 if (mode == BLKmode)
2136 /* Copy a block into the stack, entirely or partially. */
2139 int used = partial * UNITS_PER_WORD;
2140 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2148 /* USED is now the # of bytes we need not copy to the stack
2149 because registers will take care of them. */
2152 xinner = change_address (xinner, BLKmode,
2153 plus_constant (XEXP (xinner, 0), used));
2155 /* If the partial register-part of the arg counts in its stack size,
2156 skip the part of stack space corresponding to the registers.
2157 Otherwise, start copying to the beginning of the stack space,
2158 by setting SKIP to 0. */
2159 #ifndef REG_PARM_STACK_SPACE
2165 #ifdef PUSH_ROUNDING
2166 /* Do it with several push insns if that doesn't take lots of insns
2167 and if there is no difficulty with push insns that skip bytes
2168 on the stack for alignment purposes. */
2170 && GET_CODE (size) == CONST_INT
2172 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2174 /* Here we avoid the case of a structure whose weak alignment
2175 forces many pushes of a small amount of data,
2176 and such small pushes do rounding that causes trouble. */
2177 && ((! SLOW_UNALIGNED_ACCESS)
2178 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2179 || PUSH_ROUNDING (align) == align)
2180 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2182 /* Push padding now if padding above and stack grows down,
2183 or if padding below and stack grows up.
2184 But if space already allocated, this has already been done. */
2185 if (extra && args_addr == 0
2186 && where_pad != none && where_pad != stack_direction)
2187 anti_adjust_stack (GEN_INT (extra));
2189 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2190 INTVAL (size) - used, align);
2193 #endif /* PUSH_ROUNDING */
2195 /* Otherwise make space on the stack and copy the data
2196 to the address of that space. */
2198 /* Deduct words put into registers from the size we must copy. */
2201 if (GET_CODE (size) == CONST_INT)
2202 size = GEN_INT (INTVAL (size) - used);
2204 size = expand_binop (GET_MODE (size), sub_optab, size,
2205 GEN_INT (used), NULL_RTX, 0,
2209 /* Get the address of the stack space.
2210 In this case, we do not deal with EXTRA separately.
2211 A single stack adjust will do. */
2214 temp = push_block (size, extra, where_pad == downward);
2217 else if (GET_CODE (args_so_far) == CONST_INT)
2218 temp = memory_address (BLKmode,
2219 plus_constant (args_addr,
2220 skip + INTVAL (args_so_far)));
2222 temp = memory_address (BLKmode,
2223 plus_constant (gen_rtx (PLUS, Pmode,
2224 args_addr, args_so_far),
2227 /* TEMP is the address of the block. Copy the data there. */
2228 if (GET_CODE (size) == CONST_INT
2229 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2232 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2233 INTVAL (size), align);
2236 /* Try the most limited insn first, because there's no point
2237 including more than one in the machine description unless
2238 the more limited one has some advantage. */
2239 #ifdef HAVE_movstrqi
2241 && GET_CODE (size) == CONST_INT
2242 && ((unsigned) INTVAL (size)
2243 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2245 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2246 xinner, size, GEN_INT (align));
2254 #ifdef HAVE_movstrhi
2256 && GET_CODE (size) == CONST_INT
2257 && ((unsigned) INTVAL (size)
2258 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2260 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2261 xinner, size, GEN_INT (align));
2269 #ifdef HAVE_movstrsi
2272 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2273 xinner, size, GEN_INT (align));
2281 #ifdef HAVE_movstrdi
2284 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2285 xinner, size, GEN_INT (align));
2294 #ifndef ACCUMULATE_OUTGOING_ARGS
2295 /* If the source is referenced relative to the stack pointer,
2296 copy it to another register to stabilize it. We do not need
2297 to do this if we know that we won't be changing sp. */
2299 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2300 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2301 temp = copy_to_reg (temp);
2304 /* Make inhibit_defer_pop nonzero around the library call
2305 to force it to pop the bcopy-arguments right away. */
2307 #ifdef TARGET_MEM_FUNCTIONS
2308 emit_library_call (memcpy_libfunc, 0,
2309 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2310 convert_to_mode (TYPE_MODE (sizetype),
2311 size, TREE_UNSIGNED (sizetype)),
2312 TYPE_MODE (sizetype));
2314 emit_library_call (bcopy_libfunc, 0,
2315 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2316 convert_to_mode (TYPE_MODE (sizetype),
2317 size, TREE_UNSIGNED (sizetype)),
2318 TYPE_MODE (sizetype));
2323 else if (partial > 0)
2325 /* Scalar partly in registers. */
2327 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2330 /* # words of start of argument
2331 that we must make space for but need not store. */
2332 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2333 int args_offset = INTVAL (args_so_far);
2336 /* Push padding now if padding above and stack grows down,
2337 or if padding below and stack grows up.
2338 But if space already allocated, this has already been done. */
2339 if (extra && args_addr == 0
2340 && where_pad != none && where_pad != stack_direction)
2341 anti_adjust_stack (GEN_INT (extra));
2343 /* If we make space by pushing it, we might as well push
2344 the real data. Otherwise, we can leave OFFSET nonzero
2345 and leave the space uninitialized. */
2349 /* Now NOT_STACK gets the number of words that we don't need to
2350 allocate on the stack. */
2351 not_stack = partial - offset;
2353 /* If the partial register-part of the arg counts in its stack size,
2354 skip the part of stack space corresponding to the registers.
2355 Otherwise, start copying to the beginning of the stack space,
2356 by setting SKIP to 0. */
2357 #ifndef REG_PARM_STACK_SPACE
2363 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2364 x = validize_mem (force_const_mem (mode, x));
2366 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2367 SUBREGs of such registers are not allowed. */
2368 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2369 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2370 x = copy_to_reg (x);
2372 /* Loop over all the words allocated on the stack for this arg. */
2373 /* We can do it by words, because any scalar bigger than a word
2374 has a size a multiple of a word. */
2375 #ifndef PUSH_ARGS_REVERSED
2376 for (i = not_stack; i < size; i++)
2378 for (i = size - 1; i >= not_stack; i--)
2380 if (i >= not_stack + offset)
2381 emit_push_insn (operand_subword_force (x, i, mode),
2382 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2384 GEN_INT (args_offset + ((i - not_stack + skip)
2385 * UNITS_PER_WORD)));
2391 /* Push padding now if padding above and stack grows down,
2392 or if padding below and stack grows up.
2393 But if space already allocated, this has already been done. */
2394 if (extra && args_addr == 0
2395 && where_pad != none && where_pad != stack_direction)
2396 anti_adjust_stack (GEN_INT (extra));
2398 #ifdef PUSH_ROUNDING
2400 addr = gen_push_operand ();
2403 if (GET_CODE (args_so_far) == CONST_INT)
2405 = memory_address (mode,
2406 plus_constant (args_addr, INTVAL (args_so_far)));
2408 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2411 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2415 /* If part should go in registers, copy that part
2416 into the appropriate registers. Do this now, at the end,
2417 since mem-to-mem copies above may do function calls. */
2418 if (partial > 0 && reg != 0)
2419 move_block_to_reg (REGNO (reg), x, partial, mode);
2421 if (extra && args_addr == 0 && where_pad == stack_direction)
2422 anti_adjust_stack (GEN_INT (extra));
2425 /* Expand an assignment that stores the value of FROM into TO.
2426 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2427 (This may contain a QUEUED rtx;
2428 if the value is constant, this rtx is a constant.)
2429 Otherwise, the returned value is NULL_RTX.
2431 SUGGEST_REG is no longer actually used.
2432 It used to mean, copy the value through a register
2433 and return that register, if that is possible.
2434 We now use WANT_VALUE to decide whether to do this. */
2437 expand_assignment (to, from, want_value, suggest_reg)
2442 register rtx to_rtx = 0;
2445 /* Don't crash if the lhs of the assignment was erroneous. */
2447 if (TREE_CODE (to) == ERROR_MARK)
2449 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2450 return want_value ? result : NULL_RTX;
2453 if (output_bytecode)
2455 tree dest_innermost;
2457 bc_expand_expr (from);
2458 bc_emit_instruction (duplicate);
2460 dest_innermost = bc_expand_address (to);
2462 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2463 take care of it here. */
2465 bc_store_memory (TREE_TYPE (to), dest_innermost);
2469 /* Assignment of a structure component needs special treatment
2470 if the structure component's rtx is not simply a MEM.
2471 Assignment of an array element at a constant index, and assignment of
2472 an array element in an unaligned packed structure field, has the same
2475 if (TREE_CODE (to) == COMPONENT_REF
2476 || TREE_CODE (to) == BIT_FIELD_REF
2477 || (TREE_CODE (to) == ARRAY_REF
2478 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2479 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2480 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2482 enum machine_mode mode1;
2492 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2493 &mode1, &unsignedp, &volatilep);
2495 /* If we are going to use store_bit_field and extract_bit_field,
2496 make sure to_rtx will be safe for multiple use. */
2498 if (mode1 == VOIDmode && want_value)
2499 tem = stabilize_reference (tem);
2501 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2502 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2505 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2507 if (GET_CODE (to_rtx) != MEM)
2509 to_rtx = change_address (to_rtx, VOIDmode,
2510 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2511 force_reg (ptr_mode, offset_rtx)));
2512 /* If we have a variable offset, the known alignment
2513 is only that of the innermost structure containing the field.
2514 (Actually, we could sometimes do better by using the
2515 align of an element of the innermost array, but no need.) */
2516 if (TREE_CODE (to) == COMPONENT_REF
2517 || TREE_CODE (to) == BIT_FIELD_REF)
2519 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2523 if (GET_CODE (to_rtx) == MEM)
2525 /* When the offset is zero, to_rtx is the address of the
2526 structure we are storing into, and hence may be shared.
2527 We must make a new MEM before setting the volatile bit. */
2529 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2530 MEM_VOLATILE_P (to_rtx) = 1;
2532 #if 0 /* This was turned off because, when a field is volatile
2533 in an object which is not volatile, the object may be in a register,
2534 and then we would abort over here. */
2540 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2542 /* Spurious cast makes HPUX compiler happy. */
2543 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2546 /* Required alignment of containing datum. */
2548 int_size_in_bytes (TREE_TYPE (tem)));
2549 preserve_temp_slots (result);
2553 /* If the value is meaningful, convert RESULT to the proper mode.
2554 Otherwise, return nothing. */
2555 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2556 TYPE_MODE (TREE_TYPE (from)),
2558 TREE_UNSIGNED (TREE_TYPE (to)))
2562 /* If the rhs is a function call and its value is not an aggregate,
2563 call the function before we start to compute the lhs.
2564 This is needed for correct code for cases such as
2565 val = setjmp (buf) on machines where reference to val
2566 requires loading up part of an address in a separate insn.
2568 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2569 a promoted variable where the zero- or sign- extension needs to be done.
2570 Handling this in the normal way is safe because no computation is done
2572 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2573 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2578 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2580 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2582 if (GET_MODE (to_rtx) == BLKmode)
2584 int align = MIN (TYPE_ALIGN (TREE_TYPE (from)), BITS_PER_WORD);
2585 emit_block_move (to_rtx, value, expr_size (from), align);
2588 emit_move_insn (to_rtx, value);
2589 preserve_temp_slots (to_rtx);
2592 return want_value ? to_rtx : NULL_RTX;
2595 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2596 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2599 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2601 /* Don't move directly into a return register. */
2602 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2607 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2608 emit_move_insn (to_rtx, temp);
2609 preserve_temp_slots (to_rtx);
2612 return want_value ? to_rtx : NULL_RTX;
2615 /* In case we are returning the contents of an object which overlaps
2616 the place the value is being stored, use a safe function when copying
2617 a value through a pointer into a structure value return block. */
2618 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2619 && current_function_returns_struct
2620 && !current_function_returns_pcc_struct)
2625 size = expr_size (from);
2626 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2628 #ifdef TARGET_MEM_FUNCTIONS
2629 emit_library_call (memcpy_libfunc, 0,
2630 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2631 XEXP (from_rtx, 0), Pmode,
2632 convert_to_mode (TYPE_MODE (sizetype),
2633 size, TREE_UNSIGNED (sizetype)),
2634 TYPE_MODE (sizetype));
2636 emit_library_call (bcopy_libfunc, 0,
2637 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2638 XEXP (to_rtx, 0), Pmode,
2639 convert_to_mode (TYPE_MODE (sizetype),
2640 size, TREE_UNSIGNED (sizetype)),
2641 TYPE_MODE (sizetype));
2644 preserve_temp_slots (to_rtx);
2647 return want_value ? to_rtx : NULL_RTX;
2650 /* Compute FROM and store the value in the rtx we got. */
2653 result = store_expr (from, to_rtx, want_value);
2654 preserve_temp_slots (result);
2657 return want_value ? result : NULL_RTX;
2660 /* Generate code for computing expression EXP,
2661 and storing the value into TARGET.
2662 TARGET may contain a QUEUED rtx.
2664 If WANT_VALUE is nonzero, return a copy of the value
2665 not in TARGET, so that we can be sure to use the proper
2666 value in a containing expression even if TARGET has something
2667 else stored in it. If possible, we copy the value through a pseudo
2668 and return that pseudo. Or, if the value is constant, we try to
2669 return the constant. In some cases, we return a pseudo
2670 copied *from* TARGET.
2672 If the mode is BLKmode then we may return TARGET itself.
2673 It turns out that in BLKmode it doesn't cause a problem.
2674 because C has no operators that could combine two different
2675 assignments into the same BLKmode object with different values
2676 with no sequence point. Will other languages need this to
2679 If WANT_VALUE is 0, we return NULL, to make sure
2680 to catch quickly any cases where the caller uses the value
2681 and fails to set WANT_VALUE. */
2684 store_expr (exp, target, want_value)
2686 register rtx target;
2690 int dont_return_target = 0;
2692 if (TREE_CODE (exp) == COMPOUND_EXPR)
2694 /* Perform first part of compound expression, then assign from second
2696 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2698 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
2700 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2702 /* For conditional expression, get safe form of the target. Then
2703 test the condition, doing the appropriate assignment on either
2704 side. This avoids the creation of unnecessary temporaries.
2705 For non-BLKmode, it is more efficient not to do this. */
2707 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2710 target = protect_from_queue (target, 1);
2712 do_pending_stack_adjust ();
2714 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2715 store_expr (TREE_OPERAND (exp, 1), target, 0);
2717 emit_jump_insn (gen_jump (lab2));
2720 store_expr (TREE_OPERAND (exp, 2), target, 0);
2724 return want_value ? target : NULL_RTX;
2726 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
2727 && GET_MODE (target) != BLKmode)
2728 /* If target is in memory and caller wants value in a register instead,
2729 arrange that. Pass TARGET as target for expand_expr so that,
2730 if EXP is another assignment, WANT_VALUE will be nonzero for it.
2731 We know expand_expr will not use the target in that case.
2732 Don't do this if TARGET is volatile because we are supposed
2733 to write it and then read it. */
2735 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2736 GET_MODE (target), 0);
2737 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2738 temp = copy_to_reg (temp);
2739 dont_return_target = 1;
2741 else if (queued_subexp_p (target))
2742 /* If target contains a postincrement, let's not risk
2743 using it as the place to generate the rhs. */
2745 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2747 /* Expand EXP into a new pseudo. */
2748 temp = gen_reg_rtx (GET_MODE (target));
2749 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2752 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2754 /* If target is volatile, ANSI requires accessing the value
2755 *from* the target, if it is accessed. So make that happen.
2756 In no case return the target itself. */
2757 if (! MEM_VOLATILE_P (target) && want_value)
2758 dont_return_target = 1;
2760 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2761 /* If this is an scalar in a register that is stored in a wider mode
2762 than the declared mode, compute the result into its declared mode
2763 and then convert to the wider mode. Our value is the computed
2766 /* If we don't want a value, we can do the conversion inside EXP,
2767 which will often result in some optimizations. Do the conversion
2768 in two steps: first change the signedness, if needed, then
2772 if (TREE_UNSIGNED (TREE_TYPE (exp))
2773 != SUBREG_PROMOTED_UNSIGNED_P (target))
2776 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
2780 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
2781 SUBREG_PROMOTED_UNSIGNED_P (target)),
2785 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2787 /* If TEMP is a volatile MEM and we want a result value, make
2788 the access now so it gets done only once. */
2789 if (GET_CODE (temp) == MEM && MEM_VOLATILE_P (temp) && want_value)
2790 temp = copy_to_reg (temp);
2792 /* If TEMP is a VOIDmode constant, use convert_modes to make
2793 sure that we properly convert it. */
2794 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2795 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2796 TYPE_MODE (TREE_TYPE (exp)), temp,
2797 SUBREG_PROMOTED_UNSIGNED_P (target));
2799 convert_move (SUBREG_REG (target), temp,
2800 SUBREG_PROMOTED_UNSIGNED_P (target));
2801 return want_value ? temp : NULL_RTX;
2805 temp = expand_expr (exp, target, GET_MODE (target), 0);
2806 /* Return TARGET if it's a specified hardware register.
2807 If TARGET is a volatile mem ref, either return TARGET
2808 or return a reg copied *from* TARGET; ANSI requires this.
2810 Otherwise, if TEMP is not TARGET, return TEMP
2811 if it is constant (for efficiency),
2812 or if we really want the correct value. */
2813 if (!(target && GET_CODE (target) == REG
2814 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2815 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2817 && (CONSTANT_P (temp) || want_value))
2818 dont_return_target = 1;
2821 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2822 the same as that of TARGET, adjust the constant. This is needed, for
2823 example, in case it is a CONST_DOUBLE and we want only a word-sized
2825 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2826 && TREE_CODE (exp) != ERROR_MARK
2827 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2828 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2829 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2831 /* If value was not generated in the target, store it there.
2832 Convert the value to TARGET's type first if nec. */
2834 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2836 target = protect_from_queue (target, 1);
2837 if (GET_MODE (temp) != GET_MODE (target)
2838 && GET_MODE (temp) != VOIDmode)
2840 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2841 if (dont_return_target)
2843 /* In this case, we will return TEMP,
2844 so make sure it has the proper mode.
2845 But don't forget to store the value into TARGET. */
2846 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2847 emit_move_insn (target, temp);
2850 convert_move (target, temp, unsignedp);
2853 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2855 /* Handle copying a string constant into an array.
2856 The string constant may be shorter than the array.
2857 So copy just the string's actual length, and clear the rest. */
2861 /* Get the size of the data type of the string,
2862 which is actually the size of the target. */
2863 size = expr_size (exp);
2864 if (GET_CODE (size) == CONST_INT
2865 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2866 emit_block_move (target, temp, size,
2867 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2870 /* Compute the size of the data to copy from the string. */
2872 = size_binop (MIN_EXPR,
2873 make_tree (sizetype, size),
2875 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2876 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2880 /* Copy that much. */
2881 emit_block_move (target, temp, copy_size_rtx,
2882 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2884 /* Figure out how much is left in TARGET that we have to clear.
2885 Do all calculations in ptr_mode. */
2887 addr = XEXP (target, 0);
2888 addr = convert_modes (ptr_mode, Pmode, addr, 1);
2890 if (GET_CODE (copy_size_rtx) == CONST_INT)
2892 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
2893 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
2897 addr = force_reg (ptr_mode, addr);
2898 addr = expand_binop (ptr_mode, add_optab, addr,
2899 copy_size_rtx, NULL_RTX, 0,
2902 size = expand_binop (ptr_mode, sub_optab, size,
2903 copy_size_rtx, NULL_RTX, 0,
2906 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2907 GET_MODE (size), 0, 0);
2908 label = gen_label_rtx ();
2909 emit_jump_insn (gen_blt (label));
2912 if (size != const0_rtx)
2914 #ifdef TARGET_MEM_FUNCTIONS
2915 emit_library_call (memset_libfunc, 0, VOIDmode, 3, addr,
2916 Pmode, const0_rtx, Pmode, size, ptr_mode);
2918 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2919 addr, Pmode, size, ptr_mode);
2927 else if (GET_MODE (temp) == BLKmode)
2928 emit_block_move (target, temp, expr_size (exp),
2929 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2931 emit_move_insn (target, temp);
2934 /* If we don't want a value, return NULL_RTX. */
2938 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
2939 ??? The latter test doesn't seem to make sense. */
2940 else if (dont_return_target && GET_CODE (temp) != MEM)
2943 /* Return TARGET itself if it is a hard register. */
2944 else if (want_value && GET_MODE (target) != BLKmode
2945 && ! (GET_CODE (target) == REG
2946 && REGNO (target) < FIRST_PSEUDO_REGISTER))
2947 return copy_to_reg (target);
2953 /* Store the value of constructor EXP into the rtx TARGET.
2954 TARGET is either a REG or a MEM. */
2957 store_constructor (exp, target)
2961 tree type = TREE_TYPE (exp);
2963 /* We know our target cannot conflict, since safe_from_p has been called. */
2965 /* Don't try copying piece by piece into a hard register
2966 since that is vulnerable to being clobbered by EXP.
2967 Instead, construct in a pseudo register and then copy it all. */
2968 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2970 rtx temp = gen_reg_rtx (GET_MODE (target));
2971 store_constructor (exp, temp);
2972 emit_move_insn (target, temp);
2977 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2978 || TREE_CODE (type) == QUAL_UNION_TYPE)
2982 /* Inform later passes that the whole union value is dead. */
2983 if (TREE_CODE (type) == UNION_TYPE
2984 || TREE_CODE (type) == QUAL_UNION_TYPE)
2985 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2987 /* If we are building a static constructor into a register,
2988 set the initial value as zero so we can fold the value into
2989 a constant. But if more than one register is involved,
2990 this probably loses. */
2991 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
2992 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
2993 emit_move_insn (target, const0_rtx);
2995 /* If the constructor has fewer fields than the structure,
2996 clear the whole structure first. */
2997 else if (list_length (CONSTRUCTOR_ELTS (exp))
2998 != list_length (TYPE_FIELDS (type)))
2999 clear_storage (target, int_size_in_bytes (type));
3001 /* Inform later passes that the old value is dead. */
3002 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3004 /* Store each element of the constructor into
3005 the corresponding field of TARGET. */
3007 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3009 register tree field = TREE_PURPOSE (elt);
3010 register enum machine_mode mode;
3014 tree pos, constant = 0, offset = 0;
3015 rtx to_rtx = target;
3017 /* Just ignore missing fields.
3018 We cleared the whole structure, above,
3019 if any fields are missing. */
3023 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3024 unsignedp = TREE_UNSIGNED (field);
3025 mode = DECL_MODE (field);
3026 if (DECL_BIT_FIELD (field))
3029 pos = DECL_FIELD_BITPOS (field);
3030 if (TREE_CODE (pos) == INTEGER_CST)
3032 else if (TREE_CODE (pos) == PLUS_EXPR
3033 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3034 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3039 bitpos = TREE_INT_CST_LOW (constant);
3045 if (contains_placeholder_p (offset))
3046 offset = build (WITH_RECORD_EXPR, sizetype,
3049 offset = size_binop (FLOOR_DIV_EXPR, offset,
3050 size_int (BITS_PER_UNIT));
3052 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3053 if (GET_CODE (to_rtx) != MEM)
3057 = change_address (to_rtx, VOIDmode,
3058 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3059 force_reg (ptr_mode, offset_rtx)));
3062 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
3063 /* The alignment of TARGET is
3064 at least what its type requires. */
3066 TYPE_ALIGN (type) / BITS_PER_UNIT,
3067 int_size_in_bytes (type));
3070 else if (TREE_CODE (type) == ARRAY_TYPE)
3074 tree domain = TYPE_DOMAIN (type);
3075 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3076 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3077 tree elttype = TREE_TYPE (type);
3079 /* If the constructor has fewer fields than the structure,
3080 clear the whole structure first. Similarly if this this is
3081 static constructor of a non-BLKmode object. */
3083 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3084 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3085 clear_storage (target, int_size_in_bytes (type));
3087 /* Inform later passes that the old value is dead. */
3088 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3090 /* Store each element of the constructor into
3091 the corresponding element of TARGET, determined
3092 by counting the elements. */
3093 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3095 elt = TREE_CHAIN (elt), i++)
3097 register enum machine_mode mode;
3101 tree index = TREE_PURPOSE (elt);
3102 rtx xtarget = target;
3104 mode = TYPE_MODE (elttype);
3105 bitsize = GET_MODE_BITSIZE (mode);
3106 unsignedp = TREE_UNSIGNED (elttype);
3108 if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3109 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3111 rtx pos_rtx, addr, xtarget;
3115 index = size_int (i);
3117 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3118 size_int (BITS_PER_UNIT));
3119 position = size_binop (MULT_EXPR, index, position);
3120 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3121 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3122 xtarget = change_address (target, mode, addr);
3123 store_expr (TREE_VALUE (elt), xtarget, 0);
3128 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3129 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3131 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3133 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
3134 /* The alignment of TARGET is
3135 at least what its type requires. */
3137 TYPE_ALIGN (type) / BITS_PER_UNIT,
3138 int_size_in_bytes (type));
3142 /* set constructor assignments */
3143 else if (TREE_CODE (type) == SET_TYPE)
3146 rtx xtarget = XEXP (target, 0);
3147 int set_word_size = TYPE_ALIGN (type);
3148 int nbytes = int_size_in_bytes (type);
3149 tree non_const_elements;
3150 int need_to_clear_first;
3151 tree domain = TYPE_DOMAIN (type);
3152 tree domain_min, domain_max, bitlength;
3154 /* The default implementation strategy is to extract the constant
3155 parts of the constructor, use that to initialize the target,
3156 and then "or" in whatever non-constant ranges we need in addition.
3158 If a large set is all zero or all ones, it is
3159 probably better to set it using memset (if available) or bzero.
3160 Also, if a large set has just a single range, it may also be
3161 better to first clear all the first clear the set (using
3162 bzero/memset), and set the bits we want. */
3164 /* Check for all zeros. */
3165 if (CONSTRUCTOR_ELTS (exp) == NULL_TREE)
3167 clear_storage (target, nbytes);
3174 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3175 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3176 bitlength = size_binop (PLUS_EXPR,
3177 size_binop (MINUS_EXPR, domain_max, domain_min),
3180 /* Check for range all ones, or at most a single range.
3181 (This optimization is only a win for big sets.) */
3182 if (GET_MODE (target) == BLKmode && nbytes > 16
3183 && TREE_CHAIN (CONSTRUCTOR_ELTS (exp)) == NULL_TREE)
3185 need_to_clear_first = 1;
3186 non_const_elements = CONSTRUCTOR_ELTS (exp);
3190 int nbits = nbytes * BITS_PER_UNIT;
3191 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3192 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3193 char *bit_buffer = (char*) alloca (nbits);
3194 HOST_WIDE_INT word = 0;
3197 int offset = 0; /* In bytes from beginning of set. */
3198 non_const_elements = get_set_constructor_bits (exp,
3202 if (bit_buffer[ibit])
3204 if (BYTES_BIG_ENDIAN)
3205 word |= (1 << (set_word_size - 1 - bit_pos));
3207 word |= 1 << bit_pos;
3210 if (bit_pos >= set_word_size || ibit == nbits)
3212 rtx datum = GEN_INT (word);
3214 /* The assumption here is that it is safe to use XEXP if
3215 the set is multi-word, but not if it's single-word. */
3216 if (GET_CODE (target) == MEM)
3217 to_rtx = change_address (target, mode,
3218 plus_constant (XEXP (target, 0),
3220 else if (offset == 0)
3224 emit_move_insn (to_rtx, datum);
3229 offset += set_word_size / BITS_PER_UNIT;
3232 need_to_clear_first = 0;
3235 for (elt = non_const_elements; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3237 /* start of range of element or NULL */
3238 tree startbit = TREE_PURPOSE (elt);
3239 /* end of range of element, or element value */
3240 tree endbit = TREE_VALUE (elt);
3241 HOST_WIDE_INT startb, endb;
3242 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3244 bitlength_rtx = expand_expr (bitlength,
3245 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3247 /* handle non-range tuple element like [ expr ] */
3248 if (startbit == NULL_TREE)
3250 startbit = save_expr (endbit);
3253 startbit = convert (sizetype, startbit);
3254 endbit = convert (sizetype, endbit);
3255 if (! integer_zerop (domain_min))
3257 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3258 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3260 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3261 EXPAND_CONST_ADDRESS);
3262 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3263 EXPAND_CONST_ADDRESS);
3267 targetx = assign_stack_temp (GET_MODE (target),
3268 GET_MODE_SIZE (GET_MODE (target)),
3270 emit_move_insn (targetx, target);
3272 else if (GET_CODE (target) == MEM)
3277 #ifdef TARGET_MEM_FUNCTIONS
3278 /* Optimization: If startbit and endbit are
3279 constants divisible by BITS_PER_UNIT,
3280 call memset instead. */
3281 if (TREE_CODE (startbit) == INTEGER_CST
3282 && TREE_CODE (endbit) == INTEGER_CST
3283 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3284 && (endb = TREE_INT_CST_LOW (endbit)) % BITS_PER_UNIT == 0)
3287 if (need_to_clear_first
3288 && endb - startb != nbytes * BITS_PER_UNIT)
3289 clear_storage (target, nbytes);
3290 need_to_clear_first = 0;
3291 emit_library_call (memset_libfunc, 0,
3293 plus_constant (XEXP (targetx, 0), startb),
3296 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3302 if (need_to_clear_first)
3304 clear_storage (target, nbytes);
3305 need_to_clear_first = 0;
3307 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3308 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3309 bitlength_rtx, TYPE_MODE (sizetype),
3310 startbit_rtx, TYPE_MODE (sizetype),
3311 endbit_rtx, TYPE_MODE (sizetype));
3314 emit_move_insn (target, targetx);
3322 /* Store the value of EXP (an expression tree)
3323 into a subfield of TARGET which has mode MODE and occupies
3324 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3325 If MODE is VOIDmode, it means that we are storing into a bit-field.
3327 If VALUE_MODE is VOIDmode, return nothing in particular.
3328 UNSIGNEDP is not used in this case.
3330 Otherwise, return an rtx for the value stored. This rtx
3331 has mode VALUE_MODE if that is convenient to do.
3332 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3334 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3335 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3338 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3339 unsignedp, align, total_size)
3341 int bitsize, bitpos;
3342 enum machine_mode mode;
3344 enum machine_mode value_mode;
3349 HOST_WIDE_INT width_mask = 0;
3351 if (bitsize < HOST_BITS_PER_WIDE_INT)
3352 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3354 /* If we are storing into an unaligned field of an aligned union that is
3355 in a register, we may have the mode of TARGET being an integer mode but
3356 MODE == BLKmode. In that case, get an aligned object whose size and
3357 alignment are the same as TARGET and store TARGET into it (we can avoid
3358 the store if the field being stored is the entire width of TARGET). Then
3359 call ourselves recursively to store the field into a BLKmode version of
3360 that object. Finally, load from the object into TARGET. This is not
3361 very efficient in general, but should only be slightly more expensive
3362 than the otherwise-required unaligned accesses. Perhaps this can be
3363 cleaned up later. */
3366 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3368 rtx object = assign_stack_temp (GET_MODE (target),
3369 GET_MODE_SIZE (GET_MODE (target)), 0);
3370 rtx blk_object = copy_rtx (object);
3372 MEM_IN_STRUCT_P (object) = 1;
3373 MEM_IN_STRUCT_P (blk_object) = 1;
3374 PUT_MODE (blk_object, BLKmode);
3376 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3377 emit_move_insn (object, target);
3379 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3382 /* Even though we aren't returning target, we need to
3383 give it the updated value. */
3384 emit_move_insn (target, object);
3389 /* If the structure is in a register or if the component
3390 is a bit field, we cannot use addressing to access it.
3391 Use bit-field techniques or SUBREG to store in it. */
3393 if (mode == VOIDmode
3394 || (mode != BLKmode && ! direct_store[(int) mode])
3395 || GET_CODE (target) == REG
3396 || GET_CODE (target) == SUBREG
3397 /* If the field isn't aligned enough to store as an ordinary memref,
3398 store it as a bit field. */
3399 || (SLOW_UNALIGNED_ACCESS
3400 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3401 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
3403 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3405 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3407 if (mode != VOIDmode && mode != BLKmode
3408 && mode != TYPE_MODE (TREE_TYPE (exp)))
3409 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3411 /* Store the value in the bitfield. */
3412 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3413 if (value_mode != VOIDmode)
3415 /* The caller wants an rtx for the value. */
3416 /* If possible, avoid refetching from the bitfield itself. */
3418 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
3421 enum machine_mode tmode;
3424 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3425 tmode = GET_MODE (temp);
3426 if (tmode == VOIDmode)
3428 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3429 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3430 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3432 return extract_bit_field (target, bitsize, bitpos, unsignedp,
3433 NULL_RTX, value_mode, 0, align,
3440 rtx addr = XEXP (target, 0);
3443 /* If a value is wanted, it must be the lhs;
3444 so make the address stable for multiple use. */
3446 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3447 && ! CONSTANT_ADDRESS_P (addr)
3448 /* A frame-pointer reference is already stable. */
3449 && ! (GET_CODE (addr) == PLUS
3450 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3451 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3452 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3453 addr = copy_to_reg (addr);
3455 /* Now build a reference to just the desired component. */
3457 to_rtx = change_address (target, mode,
3458 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3459 MEM_IN_STRUCT_P (to_rtx) = 1;
3461 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3465 /* Return true if any object containing the innermost array is an unaligned
3466 packed structure field. */
3469 get_inner_unaligned_p (exp)
3472 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
3476 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3478 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
3482 else if (TREE_CODE (exp) != ARRAY_REF
3483 && TREE_CODE (exp) != NON_LVALUE_EXPR
3484 && ! ((TREE_CODE (exp) == NOP_EXPR
3485 || TREE_CODE (exp) == CONVERT_EXPR)
3486 && (TYPE_MODE (TREE_TYPE (exp))
3487 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3490 exp = TREE_OPERAND (exp, 0);
3496 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3497 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3498 ARRAY_REFs and find the ultimate containing object, which we return.
3500 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3501 bit position, and *PUNSIGNEDP to the signedness of the field.
3502 If the position of the field is variable, we store a tree
3503 giving the variable offset (in units) in *POFFSET.
3504 This offset is in addition to the bit position.
3505 If the position is not variable, we store 0 in *POFFSET.
3507 If any of the extraction expressions is volatile,
3508 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3510 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3511 is a mode that can be used to access the field. In that case, *PBITSIZE
3514 If the field describes a variable-sized object, *PMODE is set to
3515 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3516 this case, but the address of the object can be found. */
3519 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3520 punsignedp, pvolatilep)
3525 enum machine_mode *pmode;
3529 tree orig_exp = exp;
3531 enum machine_mode mode = VOIDmode;
3532 tree offset = integer_zero_node;
3534 if (TREE_CODE (exp) == COMPONENT_REF)
3536 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3537 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3538 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3539 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3541 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3543 size_tree = TREE_OPERAND (exp, 1);
3544 *punsignedp = TREE_UNSIGNED (exp);
3548 mode = TYPE_MODE (TREE_TYPE (exp));
3549 *pbitsize = GET_MODE_BITSIZE (mode);
3550 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3555 if (TREE_CODE (size_tree) != INTEGER_CST)
3556 mode = BLKmode, *pbitsize = -1;
3558 *pbitsize = TREE_INT_CST_LOW (size_tree);
3561 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3562 and find the ultimate containing object. */
3568 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
3570 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3571 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3572 : TREE_OPERAND (exp, 2));
3573 tree constant = integer_zero_node, var = pos;
3575 /* If this field hasn't been filled in yet, don't go
3576 past it. This should only happen when folding expressions
3577 made during type construction. */
3581 /* Assume here that the offset is a multiple of a unit.
3582 If not, there should be an explicitly added constant. */
3583 if (TREE_CODE (pos) == PLUS_EXPR
3584 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3585 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
3586 else if (TREE_CODE (pos) == INTEGER_CST)
3587 constant = pos, var = integer_zero_node;
3589 *pbitpos += TREE_INT_CST_LOW (constant);
3592 offset = size_binop (PLUS_EXPR, offset,
3593 size_binop (EXACT_DIV_EXPR, var,
3594 size_int (BITS_PER_UNIT)));
3597 else if (TREE_CODE (exp) == ARRAY_REF)
3599 /* This code is based on the code in case ARRAY_REF in expand_expr
3600 below. We assume here that the size of an array element is
3601 always an integral multiple of BITS_PER_UNIT. */
3603 tree index = TREE_OPERAND (exp, 1);
3604 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3606 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3607 tree index_type = TREE_TYPE (index);
3609 if (! integer_zerop (low_bound))
3610 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3612 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3614 index = convert (type_for_size (POINTER_SIZE, 0), index);
3615 index_type = TREE_TYPE (index);
3618 index = fold (build (MULT_EXPR, index_type, index,
3619 TYPE_SIZE (TREE_TYPE (exp))));
3621 if (TREE_CODE (index) == INTEGER_CST
3622 && TREE_INT_CST_HIGH (index) == 0)
3623 *pbitpos += TREE_INT_CST_LOW (index);
3625 offset = size_binop (PLUS_EXPR, offset,
3626 size_binop (FLOOR_DIV_EXPR, index,
3627 size_int (BITS_PER_UNIT)));
3629 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3630 && ! ((TREE_CODE (exp) == NOP_EXPR
3631 || TREE_CODE (exp) == CONVERT_EXPR)
3632 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3633 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
3635 && (TYPE_MODE (TREE_TYPE (exp))
3636 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3639 /* If any reference in the chain is volatile, the effect is volatile. */
3640 if (TREE_THIS_VOLATILE (exp))
3642 exp = TREE_OPERAND (exp, 0);
3645 /* If this was a bit-field, see if there is a mode that allows direct
3646 access in case EXP is in memory. */
3647 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
3649 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3650 if (mode == BLKmode)
3654 if (integer_zerop (offset))
3657 if (offset != 0 && contains_placeholder_p (offset))
3658 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3665 /* Given an rtx VALUE that may contain additions and multiplications,
3666 return an equivalent value that just refers to a register or memory.
3667 This is done by generating instructions to perform the arithmetic
3668 and returning a pseudo-register containing the value.
3670 The returned value may be a REG, SUBREG, MEM or constant. */
3673 force_operand (value, target)
3676 register optab binoptab = 0;
3677 /* Use a temporary to force order of execution of calls to
3681 /* Use subtarget as the target for operand 0 of a binary operation. */
3682 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3684 if (GET_CODE (value) == PLUS)
3685 binoptab = add_optab;
3686 else if (GET_CODE (value) == MINUS)
3687 binoptab = sub_optab;
3688 else if (GET_CODE (value) == MULT)
3690 op2 = XEXP (value, 1);
3691 if (!CONSTANT_P (op2)
3692 && !(GET_CODE (op2) == REG && op2 != subtarget))
3694 tmp = force_operand (XEXP (value, 0), subtarget);
3695 return expand_mult (GET_MODE (value), tmp,
3696 force_operand (op2, NULL_RTX),
3702 op2 = XEXP (value, 1);
3703 if (!CONSTANT_P (op2)
3704 && !(GET_CODE (op2) == REG && op2 != subtarget))
3706 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3708 binoptab = add_optab;
3709 op2 = negate_rtx (GET_MODE (value), op2);
3712 /* Check for an addition with OP2 a constant integer and our first
3713 operand a PLUS of a virtual register and something else. In that
3714 case, we want to emit the sum of the virtual register and the
3715 constant first and then add the other value. This allows virtual
3716 register instantiation to simply modify the constant rather than
3717 creating another one around this addition. */
3718 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3719 && GET_CODE (XEXP (value, 0)) == PLUS
3720 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3721 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3722 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3724 rtx temp = expand_binop (GET_MODE (value), binoptab,
3725 XEXP (XEXP (value, 0), 0), op2,
3726 subtarget, 0, OPTAB_LIB_WIDEN);
3727 return expand_binop (GET_MODE (value), binoptab, temp,
3728 force_operand (XEXP (XEXP (value, 0), 1), 0),
3729 target, 0, OPTAB_LIB_WIDEN);
3732 tmp = force_operand (XEXP (value, 0), subtarget);
3733 return expand_binop (GET_MODE (value), binoptab, tmp,
3734 force_operand (op2, NULL_RTX),
3735 target, 0, OPTAB_LIB_WIDEN);
3736 /* We give UNSIGNEDP = 0 to expand_binop
3737 because the only operations we are expanding here are signed ones. */
3742 /* Subroutine of expand_expr:
3743 save the non-copied parts (LIST) of an expr (LHS), and return a list
3744 which can restore these values to their previous values,
3745 should something modify their storage. */
3748 save_noncopied_parts (lhs, list)
3755 for (tail = list; tail; tail = TREE_CHAIN (tail))
3756 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3757 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3760 tree part = TREE_VALUE (tail);
3761 tree part_type = TREE_TYPE (part);
3762 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3763 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3764 int_size_in_bytes (part_type), 0);
3765 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (part_type);
3766 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3767 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3768 parts = tree_cons (to_be_saved,
3769 build (RTL_EXPR, part_type, NULL_TREE,
3772 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3777 /* Subroutine of expand_expr:
3778 record the non-copied parts (LIST) of an expr (LHS), and return a list
3779 which specifies the initial values of these parts. */
3782 init_noncopied_parts (lhs, list)
3789 for (tail = list; tail; tail = TREE_CHAIN (tail))
3790 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3791 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3794 tree part = TREE_VALUE (tail);
3795 tree part_type = TREE_TYPE (part);
3796 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3797 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3802 /* Subroutine of expand_expr: return nonzero iff there is no way that
3803 EXP can reference X, which is being modified. */
3806 safe_from_p (x, exp)
3814 /* If EXP has varying size, we MUST use a target since we currently
3815 have no way of allocating temporaries of variable size. So we
3816 assume here that something at a higher level has prevented a
3817 clash. This is somewhat bogus, but the best we can do. */
3818 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3819 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST))
3822 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3823 find the underlying pseudo. */
3824 if (GET_CODE (x) == SUBREG)
3827 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3831 /* If X is a location in the outgoing argument area, it is always safe. */
3832 if (GET_CODE (x) == MEM
3833 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3834 || (GET_CODE (XEXP (x, 0)) == PLUS
3835 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3838 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3841 exp_rtl = DECL_RTL (exp);
3848 if (TREE_CODE (exp) == TREE_LIST)
3849 return ((TREE_VALUE (exp) == 0
3850 || safe_from_p (x, TREE_VALUE (exp)))
3851 && (TREE_CHAIN (exp) == 0
3852 || safe_from_p (x, TREE_CHAIN (exp))));
3857 return safe_from_p (x, TREE_OPERAND (exp, 0));
3861 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3862 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3866 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3867 the expression. If it is set, we conflict iff we are that rtx or
3868 both are in memory. Otherwise, we check all operands of the
3869 expression recursively. */
3871 switch (TREE_CODE (exp))
3874 return (staticp (TREE_OPERAND (exp, 0))
3875 || safe_from_p (x, TREE_OPERAND (exp, 0)));
3878 if (GET_CODE (x) == MEM)
3883 exp_rtl = CALL_EXPR_RTL (exp);
3886 /* Assume that the call will clobber all hard registers and
3888 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3889 || GET_CODE (x) == MEM)
3896 exp_rtl = RTL_EXPR_RTL (exp);
3898 /* We don't know what this can modify. */
3903 case WITH_CLEANUP_EXPR:
3904 exp_rtl = RTL_EXPR_RTL (exp);
3907 case CLEANUP_POINT_EXPR:
3908 return safe_from_p (x, TREE_OPERAND (exp, 0));
3911 exp_rtl = SAVE_EXPR_RTL (exp);
3915 /* The only operand we look at is operand 1. The rest aren't
3916 part of the expression. */
3917 return safe_from_p (x, TREE_OPERAND (exp, 1));
3919 case METHOD_CALL_EXPR:
3920 /* This takes a rtx argument, but shouldn't appear here. */
3924 /* If we have an rtx, we do not need to scan our operands. */
3928 nops = tree_code_length[(int) TREE_CODE (exp)];
3929 for (i = 0; i < nops; i++)
3930 if (TREE_OPERAND (exp, i) != 0
3931 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3935 /* If we have an rtl, find any enclosed object. Then see if we conflict
3939 if (GET_CODE (exp_rtl) == SUBREG)
3941 exp_rtl = SUBREG_REG (exp_rtl);
3942 if (GET_CODE (exp_rtl) == REG
3943 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3947 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3948 are memory and EXP is not readonly. */
3949 return ! (rtx_equal_p (x, exp_rtl)
3950 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3951 && ! TREE_READONLY (exp)));
3954 /* If we reach here, it is safe. */
3958 /* Subroutine of expand_expr: return nonzero iff EXP is an
3959 expression whose type is statically determinable. */
3965 if (TREE_CODE (exp) == PARM_DECL
3966 || TREE_CODE (exp) == VAR_DECL
3967 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3968 || TREE_CODE (exp) == COMPONENT_REF
3969 || TREE_CODE (exp) == ARRAY_REF)
3974 /* expand_expr: generate code for computing expression EXP.
3975 An rtx for the computed value is returned. The value is never null.
3976 In the case of a void EXP, const0_rtx is returned.
3978 The value may be stored in TARGET if TARGET is nonzero.
3979 TARGET is just a suggestion; callers must assume that
3980 the rtx returned may not be the same as TARGET.
3982 If TARGET is CONST0_RTX, it means that the value will be ignored.
3984 If TMODE is not VOIDmode, it suggests generating the
3985 result in mode TMODE. But this is done only when convenient.
3986 Otherwise, TMODE is ignored and the value generated in its natural mode.
3987 TMODE is just a suggestion; callers must assume that
3988 the rtx returned may not have mode TMODE.
3990 Note that TARGET may have neither TMODE nor MODE. In that case, it
3991 probably will not be used.
3993 If MODIFIER is EXPAND_SUM then when EXP is an addition
3994 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3995 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3996 products as above, or REG or MEM, or constant.
3997 Ordinarily in such cases we would output mul or add instructions
3998 and then return a pseudo reg containing the sum.
4000 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4001 it also marks a label as absolutely required (it can't be dead).
4002 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4003 This is used for outputting expressions used in initializers.
4005 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4006 with a constant address even if that address is not normally legitimate.
4007 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4010 expand_expr (exp, target, tmode, modifier)
4013 enum machine_mode tmode;
4014 enum expand_modifier modifier;
4016 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4017 This is static so it will be accessible to our recursive callees. */
4018 static tree placeholder_list = 0;
4019 register rtx op0, op1, temp;
4020 tree type = TREE_TYPE (exp);
4021 int unsignedp = TREE_UNSIGNED (type);
4022 register enum machine_mode mode = TYPE_MODE (type);
4023 register enum tree_code code = TREE_CODE (exp);
4025 /* Use subtarget as the target for operand 0 of a binary operation. */
4026 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4027 rtx original_target = target;
4028 /* Maybe defer this until sure not doing bytecode? */
4029 int ignore = (target == const0_rtx
4030 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4031 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4032 || code == COND_EXPR)
4033 && TREE_CODE (type) == VOID_TYPE));
4037 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4039 bc_expand_expr (exp);
4043 /* Don't use hard regs as subtargets, because the combiner
4044 can only handle pseudo regs. */
4045 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4047 /* Avoid subtargets inside loops,
4048 since they hide some invariant expressions. */
4049 if (preserve_subexpressions_p ())
4052 /* If we are going to ignore this result, we need only do something
4053 if there is a side-effect somewhere in the expression. If there
4054 is, short-circuit the most common cases here. Note that we must
4055 not call expand_expr with anything but const0_rtx in case this
4056 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4060 if (! TREE_SIDE_EFFECTS (exp))
4063 /* Ensure we reference a volatile object even if value is ignored. */
4064 if (TREE_THIS_VOLATILE (exp)
4065 && TREE_CODE (exp) != FUNCTION_DECL
4066 && mode != VOIDmode && mode != BLKmode)
4068 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4069 if (GET_CODE (temp) == MEM)
4070 temp = copy_to_reg (temp);
4074 if (TREE_CODE_CLASS (code) == '1')
4075 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4076 VOIDmode, modifier);
4077 else if (TREE_CODE_CLASS (code) == '2'
4078 || TREE_CODE_CLASS (code) == '<')
4080 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4081 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4084 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4085 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4086 /* If the second operand has no side effects, just evaluate
4088 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4089 VOIDmode, modifier);
4094 /* If will do cse, generate all results into pseudo registers
4095 since 1) that allows cse to find more things
4096 and 2) otherwise cse could produce an insn the machine
4099 if (! cse_not_expected && mode != BLKmode && target
4100 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4107 tree function = decl_function_context (exp);
4108 /* Handle using a label in a containing function. */
4109 if (function != current_function_decl && function != 0)
4111 struct function *p = find_function_data (function);
4112 /* Allocate in the memory associated with the function
4113 that the label is in. */
4114 push_obstacks (p->function_obstack,
4115 p->function_maybepermanent_obstack);
4117 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4118 label_rtx (exp), p->forced_labels);
4121 else if (modifier == EXPAND_INITIALIZER)
4122 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4123 label_rtx (exp), forced_labels);
4124 temp = gen_rtx (MEM, FUNCTION_MODE,
4125 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4126 if (function != current_function_decl && function != 0)
4127 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4132 if (DECL_RTL (exp) == 0)
4134 error_with_decl (exp, "prior parameter's size depends on `%s'");
4135 return CONST0_RTX (mode);
4138 /* ... fall through ... */
4141 /* If a static var's type was incomplete when the decl was written,
4142 but the type is complete now, lay out the decl now. */
4143 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4144 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4146 push_obstacks_nochange ();
4147 end_temporary_allocation ();
4148 layout_decl (exp, 0);
4149 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4153 /* ... fall through ... */
4157 if (DECL_RTL (exp) == 0)
4160 /* Ensure variable marked as used even if it doesn't go through
4161 a parser. If it hasn't be used yet, write out an external
4163 if (! TREE_USED (exp))
4165 assemble_external (exp);
4166 TREE_USED (exp) = 1;
4169 /* Handle variables inherited from containing functions. */
4170 context = decl_function_context (exp);
4172 /* We treat inline_function_decl as an alias for the current function
4173 because that is the inline function whose vars, types, etc.
4174 are being merged into the current function.
4175 See expand_inline_function. */
4177 if (context != 0 && context != current_function_decl
4178 && context != inline_function_decl
4179 /* If var is static, we don't need a static chain to access it. */
4180 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4181 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4185 /* Mark as non-local and addressable. */
4186 DECL_NONLOCAL (exp) = 1;
4187 mark_addressable (exp);
4188 if (GET_CODE (DECL_RTL (exp)) != MEM)
4190 addr = XEXP (DECL_RTL (exp), 0);
4191 if (GET_CODE (addr) == MEM)
4192 addr = gen_rtx (MEM, Pmode,
4193 fix_lexical_addr (XEXP (addr, 0), exp));
4195 addr = fix_lexical_addr (addr, exp);
4196 return change_address (DECL_RTL (exp), mode, addr);
4199 /* This is the case of an array whose size is to be determined
4200 from its initializer, while the initializer is still being parsed.
4203 if (GET_CODE (DECL_RTL (exp)) == MEM
4204 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4205 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4206 XEXP (DECL_RTL (exp), 0));
4208 /* If DECL_RTL is memory, we are in the normal case and either
4209 the address is not valid or it is not a register and -fforce-addr
4210 is specified, get the address into a register. */
4212 if (GET_CODE (DECL_RTL (exp)) == MEM
4213 && modifier != EXPAND_CONST_ADDRESS
4214 && modifier != EXPAND_SUM
4215 && modifier != EXPAND_INITIALIZER
4216 && (! memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
4218 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4219 return change_address (DECL_RTL (exp), VOIDmode,
4220 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4222 /* If the mode of DECL_RTL does not match that of the decl, it
4223 must be a promoted value. We return a SUBREG of the wanted mode,
4224 but mark it so that we know that it was already extended. */
4226 if (GET_CODE (DECL_RTL (exp)) == REG
4227 && GET_MODE (DECL_RTL (exp)) != mode)
4229 /* Get the signedness used for this variable. Ensure we get the
4230 same mode we got when the variable was declared. */
4231 if (GET_MODE (DECL_RTL (exp))
4232 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4235 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4236 SUBREG_PROMOTED_VAR_P (temp) = 1;
4237 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4241 return DECL_RTL (exp);
4244 return immed_double_const (TREE_INT_CST_LOW (exp),
4245 TREE_INT_CST_HIGH (exp),
4249 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4252 /* If optimized, generate immediate CONST_DOUBLE
4253 which will be turned into memory by reload if necessary.
4255 We used to force a register so that loop.c could see it. But
4256 this does not allow gen_* patterns to perform optimizations with
4257 the constants. It also produces two insns in cases like "x = 1.0;".
4258 On most machines, floating-point constants are not permitted in
4259 many insns, so we'd end up copying it to a register in any case.
4261 Now, we do the copying in expand_binop, if appropriate. */
4262 return immed_real_const (exp);
4266 if (! TREE_CST_RTL (exp))
4267 output_constant_def (exp);
4269 /* TREE_CST_RTL probably contains a constant address.
4270 On RISC machines where a constant address isn't valid,
4271 make some insns to get that address into a register. */
4272 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4273 && modifier != EXPAND_CONST_ADDRESS
4274 && modifier != EXPAND_INITIALIZER
4275 && modifier != EXPAND_SUM
4276 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4278 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4279 return change_address (TREE_CST_RTL (exp), VOIDmode,
4280 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4281 return TREE_CST_RTL (exp);
4284 context = decl_function_context (exp);
4286 /* We treat inline_function_decl as an alias for the current function
4287 because that is the inline function whose vars, types, etc.
4288 are being merged into the current function.
4289 See expand_inline_function. */
4290 if (context == current_function_decl || context == inline_function_decl)
4293 /* If this is non-local, handle it. */
4296 temp = SAVE_EXPR_RTL (exp);
4297 if (temp && GET_CODE (temp) == REG)
4299 put_var_into_stack (exp);
4300 temp = SAVE_EXPR_RTL (exp);
4302 if (temp == 0 || GET_CODE (temp) != MEM)
4304 return change_address (temp, mode,
4305 fix_lexical_addr (XEXP (temp, 0), exp));
4307 if (SAVE_EXPR_RTL (exp) == 0)
4309 if (mode == BLKmode)
4312 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4313 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
4315 else if (mode == VOIDmode)
4318 temp = gen_reg_rtx (promote_mode (type, mode, &unsignedp, 0));
4320 SAVE_EXPR_RTL (exp) = temp;
4321 if (!optimize && GET_CODE (temp) == REG)
4322 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4325 /* If the mode of TEMP does not match that of the expression, it
4326 must be a promoted value. We pass store_expr a SUBREG of the
4327 wanted mode but mark it so that we know that it was already
4328 extended. Note that `unsignedp' was modified above in
4331 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4333 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4334 SUBREG_PROMOTED_VAR_P (temp) = 1;
4335 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4338 if (temp == const0_rtx)
4339 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4341 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4344 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4345 must be a promoted value. We return a SUBREG of the wanted mode,
4346 but mark it so that we know that it was already extended. */
4348 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4349 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4351 /* Compute the signedness and make the proper SUBREG. */
4352 promote_mode (type, mode, &unsignedp, 0);
4353 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4354 SUBREG_PROMOTED_VAR_P (temp) = 1;
4355 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4359 return SAVE_EXPR_RTL (exp);
4361 case PLACEHOLDER_EXPR:
4362 /* If there is an object on the head of the placeholder list,
4363 see if some object in it's references is of type TYPE. For
4364 further information, see tree.def. */
4365 if (placeholder_list)
4368 tree old_list = placeholder_list;
4370 for (object = TREE_PURPOSE (placeholder_list);
4371 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4372 != TYPE_MAIN_VARIANT (type))
4373 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4374 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
4375 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
4376 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
4377 object = TREE_OPERAND (object, 0))
4381 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
4382 == TYPE_MAIN_VARIANT (type)))
4384 /* Expand this object skipping the list entries before
4385 it was found in case it is also a PLACEHOLDER_EXPR.
4386 In that case, we want to translate it using subsequent
4388 placeholder_list = TREE_CHAIN (placeholder_list);
4389 temp = expand_expr (object, original_target, tmode, modifier);
4390 placeholder_list = old_list;
4395 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4398 case WITH_RECORD_EXPR:
4399 /* Put the object on the placeholder list, expand our first operand,
4400 and pop the list. */
4401 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4403 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4405 placeholder_list = TREE_CHAIN (placeholder_list);
4409 expand_exit_loop_if_false (NULL_PTR,
4410 invert_truthvalue (TREE_OPERAND (exp, 0)));
4415 expand_start_loop (1);
4416 expand_expr_stmt (TREE_OPERAND (exp, 0));
4424 tree vars = TREE_OPERAND (exp, 0);
4425 int vars_need_expansion = 0;
4427 /* Need to open a binding contour here because
4428 if there are any cleanups they most be contained here. */
4429 expand_start_bindings (0);
4431 /* Mark the corresponding BLOCK for output in its proper place. */
4432 if (TREE_OPERAND (exp, 2) != 0
4433 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4434 insert_block (TREE_OPERAND (exp, 2));
4436 /* If VARS have not yet been expanded, expand them now. */
4439 if (DECL_RTL (vars) == 0)
4441 vars_need_expansion = 1;
4444 expand_decl_init (vars);
4445 vars = TREE_CHAIN (vars);
4448 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4450 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4456 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4458 emit_insns (RTL_EXPR_SEQUENCE (exp));
4459 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4460 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
4461 free_temps_for_rtl_expr (exp);
4462 return RTL_EXPR_RTL (exp);
4465 /* If we don't need the result, just ensure we evaluate any
4470 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4471 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4475 /* All elts simple constants => refer to a constant in memory. But
4476 if this is a non-BLKmode mode, let it store a field at a time
4477 since that should make a CONST_INT or CONST_DOUBLE when we
4478 fold. Likewise, if we have a target we can use, it is best to
4479 store directly into the target unless the type is large enough
4480 that memcpy will be used. If we are making an initializer and
4481 all operands are constant, put it in memory as well. */
4482 else if ((TREE_STATIC (exp)
4483 && ((mode == BLKmode
4484 && ! (target != 0 && safe_from_p (target, exp)))
4485 || TREE_ADDRESSABLE (exp)
4486 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4487 && (move_by_pieces_ninsns
4488 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
4489 TYPE_ALIGN (type) / BITS_PER_UNIT)
4491 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
4493 rtx constructor = output_constant_def (exp);
4494 if (modifier != EXPAND_CONST_ADDRESS
4495 && modifier != EXPAND_INITIALIZER
4496 && modifier != EXPAND_SUM
4497 && (! memory_address_p (GET_MODE (constructor),
4498 XEXP (constructor, 0))
4500 && GET_CODE (XEXP (constructor, 0)) != REG)))
4501 constructor = change_address (constructor, VOIDmode,
4502 XEXP (constructor, 0));
4508 if (target == 0 || ! safe_from_p (target, exp))
4510 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4511 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4515 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4516 if (AGGREGATE_TYPE_P (type))
4517 MEM_IN_STRUCT_P (target) = 1;
4520 store_constructor (exp, target);
4526 tree exp1 = TREE_OPERAND (exp, 0);
4529 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4530 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4531 This code has the same general effect as simply doing
4532 expand_expr on the save expr, except that the expression PTR
4533 is computed for use as a memory address. This means different
4534 code, suitable for indexing, may be generated. */
4535 if (TREE_CODE (exp1) == SAVE_EXPR
4536 && SAVE_EXPR_RTL (exp1) == 0
4537 && TYPE_MODE (TREE_TYPE (exp1)) == ptr_mode)
4539 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4540 VOIDmode, EXPAND_SUM);
4541 op0 = memory_address (mode, temp);
4542 op0 = copy_all_regs (op0);
4543 SAVE_EXPR_RTL (exp1) = op0;
4547 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
4548 op0 = memory_address (mode, op0);
4551 temp = gen_rtx (MEM, mode, op0);
4552 /* If address was computed by addition,
4553 mark this as an element of an aggregate. */
4554 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4555 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4556 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4557 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
4558 || (TREE_CODE (exp1) == ADDR_EXPR
4559 && (exp2 = TREE_OPERAND (exp1, 0))
4560 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
4561 MEM_IN_STRUCT_P (temp) = 1;
4562 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
4564 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
4565 here, because, in C and C++, the fact that a location is accessed
4566 through a pointer to const does not mean that the value there can
4567 never change. Languages where it can never change should
4568 also set TREE_STATIC. */
4569 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) | TREE_STATIC (exp);
4574 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4578 tree array = TREE_OPERAND (exp, 0);
4579 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4580 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4581 tree index = TREE_OPERAND (exp, 1);
4582 tree index_type = TREE_TYPE (index);
4585 if (TREE_CODE (low_bound) != INTEGER_CST
4586 && contains_placeholder_p (low_bound))
4587 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4589 /* Optimize the special-case of a zero lower bound.
4591 We convert the low_bound to sizetype to avoid some problems
4592 with constant folding. (E.g. suppose the lower bound is 1,
4593 and its mode is QI. Without the conversion, (ARRAY
4594 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4595 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4597 But sizetype isn't quite right either (especially if
4598 the lowbound is negative). FIXME */
4600 if (! integer_zerop (low_bound))
4601 index = fold (build (MINUS_EXPR, index_type, index,
4602 convert (sizetype, low_bound)));
4604 if ((TREE_CODE (index) != INTEGER_CST
4605 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4606 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
4608 /* Nonconstant array index or nonconstant element size, and
4609 not an array in an unaligned (packed) structure field.
4610 Generate the tree for *(&array+index) and expand that,
4611 except do it in a language-independent way
4612 and don't complain about non-lvalue arrays.
4613 `mark_addressable' should already have been called
4614 for any array for which this case will be reached. */
4616 /* Don't forget the const or volatile flag from the array
4618 tree variant_type = build_type_variant (type,
4619 TREE_READONLY (exp),
4620 TREE_THIS_VOLATILE (exp));
4621 tree array_adr = build1 (ADDR_EXPR,
4622 build_pointer_type (variant_type), array);
4624 tree size = size_in_bytes (type);
4626 /* Convert the integer argument to a type the same size as a
4627 pointer so the multiply won't overflow spuriously. */
4628 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4629 index = convert (type_for_size (POINTER_SIZE, 0), index);
4631 if (TREE_CODE (size) != INTEGER_CST
4632 && contains_placeholder_p (size))
4633 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4635 /* Don't think the address has side effects
4636 just because the array does.
4637 (In some cases the address might have side effects,
4638 and we fail to record that fact here. However, it should not
4639 matter, since expand_expr should not care.) */
4640 TREE_SIDE_EFFECTS (array_adr) = 0;
4642 elt = build1 (INDIRECT_REF, type,
4643 fold (build (PLUS_EXPR,
4644 TYPE_POINTER_TO (variant_type),
4646 fold (build (MULT_EXPR,
4647 TYPE_POINTER_TO (variant_type),
4650 /* Volatility, etc., of new expression is same as old
4652 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4653 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4654 TREE_READONLY (elt) = TREE_READONLY (exp);
4656 return expand_expr (elt, target, tmode, modifier);
4659 /* Fold an expression like: "foo"[2].
4660 This is not done in fold so it won't happen inside &.
4661 Don't fold if this is for wide characters since it's too
4662 difficult to do correctly and this is a very rare case. */
4664 if (TREE_CODE (array) == STRING_CST
4665 && TREE_CODE (index) == INTEGER_CST
4666 && !TREE_INT_CST_HIGH (index)
4667 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4668 && GET_MODE_CLASS (mode) == MODE_INT
4669 && GET_MODE_SIZE (mode) == 1)
4670 return GEN_INT (TREE_STRING_POINTER (array)[i]);
4672 /* If this is a constant index into a constant array,
4673 just get the value from the array. Handle both the cases when
4674 we have an explicit constructor and when our operand is a variable
4675 that was declared const. */
4677 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4679 if (TREE_CODE (index) == INTEGER_CST
4680 && TREE_INT_CST_HIGH (index) == 0)
4682 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4684 i = TREE_INT_CST_LOW (index);
4686 elem = TREE_CHAIN (elem);
4688 return expand_expr (fold (TREE_VALUE (elem)), target,
4693 else if (optimize >= 1
4694 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4695 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4696 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4698 if (TREE_CODE (index) == INTEGER_CST
4699 && TREE_INT_CST_HIGH (index) == 0)
4701 tree init = DECL_INITIAL (array);
4703 i = TREE_INT_CST_LOW (index);
4704 if (TREE_CODE (init) == CONSTRUCTOR)
4706 tree elem = CONSTRUCTOR_ELTS (init);
4709 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
4710 elem = TREE_CHAIN (elem);
4712 return expand_expr (fold (TREE_VALUE (elem)), target,
4715 else if (TREE_CODE (init) == STRING_CST
4716 && i < TREE_STRING_LENGTH (init))
4717 return GEN_INT (TREE_STRING_POINTER (init)[i]);
4722 /* Treat array-ref with constant index as a component-ref. */
4726 /* If the operand is a CONSTRUCTOR, we can just extract the
4727 appropriate field if it is present. Don't do this if we have
4728 already written the data since we want to refer to that copy
4729 and varasm.c assumes that's what we'll do. */
4730 if (code != ARRAY_REF
4731 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4732 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4736 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4737 elt = TREE_CHAIN (elt))
4738 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4739 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4743 enum machine_mode mode1;
4748 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4749 &mode1, &unsignedp, &volatilep);
4752 /* If we got back the original object, something is wrong. Perhaps
4753 we are evaluating an expression too early. In any event, don't
4754 infinitely recurse. */
4758 /* In some cases, we will be offsetting OP0's address by a constant.
4759 So get it as a sum, if possible. If we will be using it
4760 directly in an insn, we validate it. */
4761 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
4763 /* If this is a constant, put it into a register if it is a
4764 legitimate constant and memory if it isn't. */
4765 if (CONSTANT_P (op0))
4767 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4768 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
4769 op0 = force_reg (mode, op0);
4771 op0 = validize_mem (force_const_mem (mode, op0));
4774 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
4777 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4779 if (GET_CODE (op0) != MEM)
4781 op0 = change_address (op0, VOIDmode,
4782 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
4783 force_reg (ptr_mode, offset_rtx)));
4784 /* If we have a variable offset, the known alignment
4785 is only that of the innermost structure containing the field.
4786 (Actually, we could sometimes do better by using the
4787 size of an element of the innermost array, but no need.) */
4788 if (TREE_CODE (exp) == COMPONENT_REF
4789 || TREE_CODE (exp) == BIT_FIELD_REF)
4790 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4794 /* Don't forget about volatility even if this is a bitfield. */
4795 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4797 op0 = copy_rtx (op0);
4798 MEM_VOLATILE_P (op0) = 1;
4801 /* In cases where an aligned union has an unaligned object
4802 as a field, we might be extracting a BLKmode value from
4803 an integer-mode (e.g., SImode) object. Handle this case
4804 by doing the extract into an object as wide as the field
4805 (which we know to be the width of a basic mode), then
4806 storing into memory, and changing the mode to BLKmode. */
4807 if (mode1 == VOIDmode
4808 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4809 || (modifier != EXPAND_CONST_ADDRESS
4810 && modifier != EXPAND_SUM
4811 && modifier != EXPAND_INITIALIZER
4812 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
4813 /* If the field isn't aligned enough to fetch as a memref,
4814 fetch it as a bit field. */
4815 || (SLOW_UNALIGNED_ACCESS
4816 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4817 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
4819 enum machine_mode ext_mode = mode;
4821 if (ext_mode == BLKmode)
4822 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4824 if (ext_mode == BLKmode)
4827 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4828 unsignedp, target, ext_mode, ext_mode,
4830 int_size_in_bytes (TREE_TYPE (tem)));
4831 if (mode == BLKmode)
4833 rtx new = assign_stack_temp (ext_mode,
4834 bitsize / BITS_PER_UNIT, 0);
4836 emit_move_insn (new, op0);
4837 op0 = copy_rtx (new);
4838 PUT_MODE (op0, BLKmode);
4839 MEM_IN_STRUCT_P (op0) = 1;
4845 /* Get a reference to just this component. */
4846 if (modifier == EXPAND_CONST_ADDRESS
4847 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4848 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4849 (bitpos / BITS_PER_UNIT)));
4851 op0 = change_address (op0, mode1,
4852 plus_constant (XEXP (op0, 0),
4853 (bitpos / BITS_PER_UNIT)));
4854 MEM_IN_STRUCT_P (op0) = 1;
4855 MEM_VOLATILE_P (op0) |= volatilep;
4856 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4859 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4860 convert_move (target, op0, unsignedp);
4866 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
4867 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
4868 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
4869 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4870 MEM_IN_STRUCT_P (temp) = 1;
4871 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
4872 #if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
4873 a location is accessed through a pointer to const does not mean
4874 that the value there can never change. */
4875 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4880 /* Intended for a reference to a buffer of a file-object in Pascal.
4881 But it's not certain that a special tree code will really be
4882 necessary for these. INDIRECT_REF might work for them. */
4888 /* Pascal set IN expression.
4891 rlo = set_low - (set_low%bits_per_word);
4892 the_word = set [ (index - rlo)/bits_per_word ];
4893 bit_index = index % bits_per_word;
4894 bitmask = 1 << bit_index;
4895 return !!(the_word & bitmask); */
4897 tree set = TREE_OPERAND (exp, 0);
4898 tree index = TREE_OPERAND (exp, 1);
4899 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
4900 tree set_type = TREE_TYPE (set);
4901 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4902 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4903 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
4904 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4905 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4906 rtx setval = expand_expr (set, 0, VOIDmode, 0);
4907 rtx setaddr = XEXP (setval, 0);
4908 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4910 rtx diff, quo, rem, addr, bit, result;
4912 preexpand_calls (exp);
4914 /* If domain is empty, answer is no. Likewise if index is constant
4915 and out of bounds. */
4916 if ((TREE_CODE (set_high_bound) == INTEGER_CST
4917 && TREE_CODE (set_low_bound) == INTEGER_CST
4918 && tree_int_cst_lt (set_high_bound, set_low_bound)
4919 || (TREE_CODE (index) == INTEGER_CST
4920 && TREE_CODE (set_low_bound) == INTEGER_CST
4921 && tree_int_cst_lt (index, set_low_bound))
4922 || (TREE_CODE (set_high_bound) == INTEGER_CST
4923 && TREE_CODE (index) == INTEGER_CST
4924 && tree_int_cst_lt (set_high_bound, index))))
4928 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4930 /* If we get here, we have to generate the code for both cases
4931 (in range and out of range). */
4933 op0 = gen_label_rtx ();
4934 op1 = gen_label_rtx ();
4936 if (! (GET_CODE (index_val) == CONST_INT
4937 && GET_CODE (lo_r) == CONST_INT))
4939 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4940 GET_MODE (index_val), iunsignedp, 0);
4941 emit_jump_insn (gen_blt (op1));
4944 if (! (GET_CODE (index_val) == CONST_INT
4945 && GET_CODE (hi_r) == CONST_INT))
4947 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4948 GET_MODE (index_val), iunsignedp, 0);
4949 emit_jump_insn (gen_bgt (op1));
4952 /* Calculate the element number of bit zero in the first word
4954 if (GET_CODE (lo_r) == CONST_INT)
4955 rlow = GEN_INT (INTVAL (lo_r)
4956 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4958 rlow = expand_binop (index_mode, and_optab, lo_r,
4959 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4960 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4962 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
4963 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
4965 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4966 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4967 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4968 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
4970 addr = memory_address (byte_mode,
4971 expand_binop (index_mode, add_optab, diff,
4972 setaddr, NULL_RTX, iunsignedp,
4975 /* Extract the bit we want to examine */
4976 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4977 gen_rtx (MEM, byte_mode, addr),
4978 make_tree (TREE_TYPE (index), rem),
4980 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4981 GET_MODE (target) == byte_mode ? target : 0,
4982 1, OPTAB_LIB_WIDEN);
4984 if (result != target)
4985 convert_move (target, result, 1);
4987 /* Output the code to handle the out-of-range case. */
4990 emit_move_insn (target, const0_rtx);
4995 case WITH_CLEANUP_EXPR:
4996 if (RTL_EXPR_RTL (exp) == 0)
4999 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5001 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5002 /* That's it for this cleanup. */
5003 TREE_OPERAND (exp, 2) = 0;
5004 (*interim_eh_hook) (NULL_TREE);
5006 return RTL_EXPR_RTL (exp);
5008 case CLEANUP_POINT_EXPR:
5010 extern int temp_slot_level;
5011 tree old_cleanups = cleanups_this_call;
5012 int old_temp_level = target_temp_slot_level;
5014 target_temp_slot_level = temp_slot_level;
5015 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5016 /* If we're going to use this value, load it up now. */
5018 op0 = force_not_mem (op0);
5019 expand_cleanups_to (old_cleanups);
5020 preserve_temp_slots (op0);
5023 target_temp_slot_level = old_temp_level;
5028 /* Check for a built-in function. */
5029 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5030 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5032 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5033 return expand_builtin (exp, target, subtarget, tmode, ignore);
5035 /* If this call was expanded already by preexpand_calls,
5036 just return the result we got. */
5037 if (CALL_EXPR_RTL (exp) != 0)
5038 return CALL_EXPR_RTL (exp);
5040 return expand_call (exp, target, ignore);
5042 case NON_LVALUE_EXPR:
5045 case REFERENCE_EXPR:
5046 if (TREE_CODE (type) == UNION_TYPE)
5048 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5051 if (mode == BLKmode)
5053 if (TYPE_SIZE (type) == 0
5054 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5056 target = assign_stack_temp (BLKmode,
5057 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5058 + BITS_PER_UNIT - 1)
5059 / BITS_PER_UNIT, 0);
5060 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
5063 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5066 if (GET_CODE (target) == MEM)
5067 /* Store data into beginning of memory target. */
5068 store_expr (TREE_OPERAND (exp, 0),
5069 change_address (target, TYPE_MODE (valtype), 0), 0);
5071 else if (GET_CODE (target) == REG)
5072 /* Store this field into a union of the proper type. */
5073 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5074 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5076 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5080 /* Return the entire union. */
5084 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5086 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5089 /* If the signedness of the conversion differs and OP0 is
5090 a promoted SUBREG, clear that indication since we now
5091 have to do the proper extension. */
5092 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5093 && GET_CODE (op0) == SUBREG)
5094 SUBREG_PROMOTED_VAR_P (op0) = 0;
5099 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5100 if (GET_MODE (op0) == mode)
5103 /* If OP0 is a constant, just convert it into the proper mode. */
5104 if (CONSTANT_P (op0))
5106 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5107 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5109 if (modifier == EXPAND_INITIALIZER)
5110 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5112 if (flag_force_mem && GET_CODE (op0) == MEM)
5113 op0 = copy_to_reg (op0);
5117 convert_to_mode (mode, op0,
5118 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5120 convert_move (target, op0,
5121 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5125 /* We come here from MINUS_EXPR when the second operand is a constant. */
5127 this_optab = add_optab;
5129 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5130 something else, make sure we add the register to the constant and
5131 then to the other thing. This case can occur during strength
5132 reduction and doing it this way will produce better code if the
5133 frame pointer or argument pointer is eliminated.
5135 fold-const.c will ensure that the constant is always in the inner
5136 PLUS_EXPR, so the only case we need to do anything about is if
5137 sp, ap, or fp is our second argument, in which case we must swap
5138 the innermost first argument and our second argument. */
5140 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5141 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5142 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5143 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5144 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5145 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5147 tree t = TREE_OPERAND (exp, 1);
5149 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5150 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5153 /* If the result is to be ptr_mode and we are adding an integer to
5154 something, we might be forming a constant. So try to use
5155 plus_constant. If it produces a sum and we can't accept it,
5156 use force_operand. This allows P = &ARR[const] to generate
5157 efficient code on machines where a SYMBOL_REF is not a valid
5160 If this is an EXPAND_SUM call, always return the sum. */
5161 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5162 || mode == ptr_mode)
5164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5165 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5166 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5168 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5170 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5171 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5172 op1 = force_operand (op1, target);
5176 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5177 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5178 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5182 if (! CONSTANT_P (op0))
5184 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5185 VOIDmode, modifier);
5186 /* Don't go to both_summands if modifier
5187 says it's not right to return a PLUS. */
5188 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5192 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5193 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5194 op0 = force_operand (op0, target);
5199 /* No sense saving up arithmetic to be done
5200 if it's all in the wrong mode to form part of an address.
5201 And force_operand won't know whether to sign-extend or
5203 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5204 || mode != ptr_mode)
5207 preexpand_calls (exp);
5208 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5212 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5215 /* Make sure any term that's a sum with a constant comes last. */
5216 if (GET_CODE (op0) == PLUS
5217 && CONSTANT_P (XEXP (op0, 1)))
5223 /* If adding to a sum including a constant,
5224 associate it to put the constant outside. */
5225 if (GET_CODE (op1) == PLUS
5226 && CONSTANT_P (XEXP (op1, 1)))
5228 rtx constant_term = const0_rtx;
5230 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5233 /* Ensure that MULT comes first if there is one. */
5234 else if (GET_CODE (op0) == MULT)
5235 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5237 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5239 /* Let's also eliminate constants from op0 if possible. */
5240 op0 = eliminate_constant_term (op0, &constant_term);
5242 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5243 their sum should be a constant. Form it into OP1, since the
5244 result we want will then be OP0 + OP1. */
5246 temp = simplify_binary_operation (PLUS, mode, constant_term,
5251 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5254 /* Put a constant term last and put a multiplication first. */
5255 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5256 temp = op1, op1 = op0, op0 = temp;
5258 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5259 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5262 /* For initializers, we are allowed to return a MINUS of two
5263 symbolic constants. Here we handle all cases when both operands
5265 /* Handle difference of two symbolic constants,
5266 for the sake of an initializer. */
5267 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5268 && really_constant_p (TREE_OPERAND (exp, 0))
5269 && really_constant_p (TREE_OPERAND (exp, 1)))
5271 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5272 VOIDmode, modifier);
5273 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5274 VOIDmode, modifier);
5276 /* If the last operand is a CONST_INT, use plus_constant of
5277 the negated constant. Else make the MINUS. */
5278 if (GET_CODE (op1) == CONST_INT)
5279 return plus_constant (op0, - INTVAL (op1));
5281 return gen_rtx (MINUS, mode, op0, op1);
5283 /* Convert A - const to A + (-const). */
5284 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5286 tree negated = fold (build1 (NEGATE_EXPR, type,
5287 TREE_OPERAND (exp, 1)));
5289 /* Deal with the case where we can't negate the constant
5291 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5293 tree newtype = signed_type (type);
5294 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5295 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5296 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5298 if (! TREE_OVERFLOW (newneg))
5299 return expand_expr (convert (type,
5300 build (PLUS_EXPR, newtype,
5302 target, tmode, modifier);
5306 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5310 this_optab = sub_optab;
5314 preexpand_calls (exp);
5315 /* If first operand is constant, swap them.
5316 Thus the following special case checks need only
5317 check the second operand. */
5318 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5320 register tree t1 = TREE_OPERAND (exp, 0);
5321 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5322 TREE_OPERAND (exp, 1) = t1;
5325 /* Attempt to return something suitable for generating an
5326 indexed address, for machines that support that. */
5328 if (modifier == EXPAND_SUM && mode == ptr_mode
5329 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5330 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5332 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
5334 /* Apply distributive law if OP0 is x+c. */
5335 if (GET_CODE (op0) == PLUS
5336 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
5337 return gen_rtx (PLUS, mode,
5338 gen_rtx (MULT, mode, XEXP (op0, 0),
5339 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
5340 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
5341 * INTVAL (XEXP (op0, 1))));
5343 if (GET_CODE (op0) != REG)
5344 op0 = force_operand (op0, NULL_RTX);
5345 if (GET_CODE (op0) != REG)
5346 op0 = copy_to_mode_reg (mode, op0);
5348 return gen_rtx (MULT, mode, op0,
5349 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
5352 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5355 /* Check for multiplying things that have been extended
5356 from a narrower type. If this machine supports multiplying
5357 in that narrower type with a result in the desired type,
5358 do it that way, and avoid the explicit type-conversion. */
5359 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
5360 && TREE_CODE (type) == INTEGER_TYPE
5361 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5362 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
5363 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5364 && int_fits_type_p (TREE_OPERAND (exp, 1),
5365 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5366 /* Don't use a widening multiply if a shift will do. */
5367 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
5368 > HOST_BITS_PER_WIDE_INT)
5369 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
5371 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
5372 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5374 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
5375 /* If both operands are extended, they must either both
5376 be zero-extended or both be sign-extended. */
5377 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
5379 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
5381 enum machine_mode innermode
5382 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
5383 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5384 ? umul_widen_optab : smul_widen_optab);
5385 if (mode == GET_MODE_WIDER_MODE (innermode)
5386 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
5388 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5389 NULL_RTX, VOIDmode, 0);
5390 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5391 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5394 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
5395 NULL_RTX, VOIDmode, 0);
5399 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5400 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5401 return expand_mult (mode, op0, op1, target, unsignedp);
5403 case TRUNC_DIV_EXPR:
5404 case FLOOR_DIV_EXPR:
5406 case ROUND_DIV_EXPR:
5407 case EXACT_DIV_EXPR:
5408 preexpand_calls (exp);
5409 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5411 /* Possible optimization: compute the dividend with EXPAND_SUM
5412 then if the divisor is constant can optimize the case
5413 where some terms of the dividend have coeffs divisible by it. */
5414 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5415 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5416 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
5419 this_optab = flodiv_optab;
5422 case TRUNC_MOD_EXPR:
5423 case FLOOR_MOD_EXPR:
5425 case ROUND_MOD_EXPR:
5426 preexpand_calls (exp);
5427 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5429 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5431 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5433 case FIX_ROUND_EXPR:
5434 case FIX_FLOOR_EXPR:
5436 abort (); /* Not used for C. */
5438 case FIX_TRUNC_EXPR:
5439 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5441 target = gen_reg_rtx (mode);
5442 expand_fix (target, op0, unsignedp);
5446 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
5448 target = gen_reg_rtx (mode);
5449 /* expand_float can't figure out what to do if FROM has VOIDmode.
5450 So give it the correct mode. With -O, cse will optimize this. */
5451 if (GET_MODE (op0) == VOIDmode)
5452 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5454 expand_float (target, op0,
5455 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5460 temp = expand_unop (mode, neg_optab, op0, target, 0);
5466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5468 /* Handle complex values specially. */
5469 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
5470 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
5471 return expand_complex_abs (mode, op0, target, unsignedp);
5473 /* Unsigned abs is simply the operand. Testing here means we don't
5474 risk generating incorrect code below. */
5475 if (TREE_UNSIGNED (type))
5478 return expand_abs (mode, op0, target, unsignedp,
5479 safe_from_p (target, TREE_OPERAND (exp, 0)));
5483 target = original_target;
5484 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5485 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
5486 || GET_MODE (target) != mode
5487 || (GET_CODE (target) == REG
5488 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5489 target = gen_reg_rtx (mode);
5490 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5491 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5493 /* First try to do it with a special MIN or MAX instruction.
5494 If that does not win, use a conditional jump to select the proper
5496 this_optab = (TREE_UNSIGNED (type)
5497 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5498 : (code == MIN_EXPR ? smin_optab : smax_optab));
5500 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5505 /* At this point, a MEM target is no longer useful; we will get better
5508 if (GET_CODE (target) == MEM)
5509 target = gen_reg_rtx (mode);
5512 emit_move_insn (target, op0);
5514 op0 = gen_label_rtx ();
5516 /* If this mode is an integer too wide to compare properly,
5517 compare word by word. Rely on cse to optimize constant cases. */
5518 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
5520 if (code == MAX_EXPR)
5521 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5522 target, op1, NULL_RTX, op0);
5524 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
5525 op1, target, NULL_RTX, op0);
5526 emit_move_insn (target, op1);
5530 if (code == MAX_EXPR)
5531 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5532 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5533 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
5535 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
5536 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5537 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
5538 if (temp == const0_rtx)
5539 emit_move_insn (target, op1);
5540 else if (temp != const_true_rtx)
5542 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5543 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5546 emit_move_insn (target, op1);
5553 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5554 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5560 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5561 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5566 /* ??? Can optimize bitwise operations with one arg constant.
5567 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5568 and (a bitwise1 b) bitwise2 b (etc)
5569 but that is probably not worth while. */
5571 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
5572 boolean values when we want in all cases to compute both of them. In
5573 general it is fastest to do TRUTH_AND_EXPR by computing both operands
5574 as actual zero-or-1 values and then bitwise anding. In cases where
5575 there cannot be any side effects, better code would be made by
5576 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
5577 how to recognize those cases. */
5579 case TRUTH_AND_EXPR:
5581 this_optab = and_optab;
5586 this_optab = ior_optab;
5589 case TRUTH_XOR_EXPR:
5591 this_optab = xor_optab;
5598 preexpand_calls (exp);
5599 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5601 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5602 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5605 /* Could determine the answer when only additive constants differ. Also,
5606 the addition of one can be handled by changing the condition. */
5613 preexpand_calls (exp);
5614 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5618 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5619 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5621 && GET_CODE (original_target) == REG
5622 && (GET_MODE (original_target)
5623 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5625 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
5628 if (temp != original_target)
5629 temp = copy_to_reg (temp);
5631 op1 = gen_label_rtx ();
5632 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
5633 GET_MODE (temp), unsignedp, 0);
5634 emit_jump_insn (gen_beq (op1));
5635 emit_move_insn (temp, const1_rtx);
5640 /* If no set-flag instruction, must generate a conditional
5641 store into a temporary variable. Drop through
5642 and handle this like && and ||. */
5644 case TRUTH_ANDIF_EXPR:
5645 case TRUTH_ORIF_EXPR:
5647 && (target == 0 || ! safe_from_p (target, exp)
5648 /* Make sure we don't have a hard reg (such as function's return
5649 value) live across basic blocks, if not optimizing. */
5650 || (!optimize && GET_CODE (target) == REG
5651 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
5652 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5655 emit_clr_insn (target);
5657 op1 = gen_label_rtx ();
5658 jumpifnot (exp, op1);
5661 emit_0_to_1_insn (target);
5664 return ignore ? const0_rtx : target;
5666 case TRUTH_NOT_EXPR:
5667 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5668 /* The parser is careful to generate TRUTH_NOT_EXPR
5669 only with operands that are always zero or one. */
5670 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
5671 target, 1, OPTAB_LIB_WIDEN);
5677 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5679 return expand_expr (TREE_OPERAND (exp, 1),
5680 (ignore ? const0_rtx : target),
5685 rtx flag = NULL_RTX;
5686 tree left_cleanups = NULL_TREE;
5687 tree right_cleanups = NULL_TREE;
5689 /* Used to save a pointer to the place to put the setting of
5690 the flag that indicates if this side of the conditional was
5691 taken. We backpatch the code, if we find out later that we
5692 have any conditional cleanups that need to be performed. */
5693 rtx dest_right_flag = NULL_RTX;
5694 rtx dest_left_flag = NULL_RTX;
5696 /* Note that COND_EXPRs whose type is a structure or union
5697 are required to be constructed to contain assignments of
5698 a temporary variable, so that we can evaluate them here
5699 for side effect only. If type is void, we must do likewise. */
5701 /* If an arm of the branch requires a cleanup,
5702 only that cleanup is performed. */
5705 tree binary_op = 0, unary_op = 0;
5706 tree old_cleanups = cleanups_this_call;
5708 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5709 convert it to our mode, if necessary. */
5710 if (integer_onep (TREE_OPERAND (exp, 1))
5711 && integer_zerop (TREE_OPERAND (exp, 2))
5712 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5716 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5721 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5722 if (GET_MODE (op0) == mode)
5726 target = gen_reg_rtx (mode);
5727 convert_move (target, op0, unsignedp);
5731 /* If we are not to produce a result, we have no target. Otherwise,
5732 if a target was specified use it; it will not be used as an
5733 intermediate target unless it is safe. If no target, use a
5738 else if (original_target
5739 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
5740 && GET_MODE (original_target) == mode
5741 && ! (GET_CODE (original_target) == MEM
5742 && MEM_VOLATILE_P (original_target)))
5743 temp = original_target;
5744 else if (mode == BLKmode)
5746 if (TYPE_SIZE (type) == 0
5747 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5750 temp = assign_stack_temp (BLKmode,
5751 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5752 + BITS_PER_UNIT - 1)
5753 / BITS_PER_UNIT, 0);
5754 MEM_IN_STRUCT_P (temp) = AGGREGATE_TYPE_P (type);
5757 temp = gen_reg_rtx (mode);
5759 /* Check for X ? A + B : A. If we have this, we can copy
5760 A to the output and conditionally add B. Similarly for unary
5761 operations. Don't do this if X has side-effects because
5762 those side effects might affect A or B and the "?" operation is
5763 a sequence point in ANSI. (We test for side effects later.) */
5765 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5766 && operand_equal_p (TREE_OPERAND (exp, 2),
5767 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5768 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5769 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5770 && operand_equal_p (TREE_OPERAND (exp, 1),
5771 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5772 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5773 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5774 && operand_equal_p (TREE_OPERAND (exp, 2),
5775 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5776 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5778 && operand_equal_p (TREE_OPERAND (exp, 1),
5779 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5780 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5782 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5783 operation, do this as A + (X != 0). Similarly for other simple
5784 binary operators. */
5785 if (temp && singleton && binary_op
5786 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5787 && (TREE_CODE (binary_op) == PLUS_EXPR
5788 || TREE_CODE (binary_op) == MINUS_EXPR
5789 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5790 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
5791 && integer_onep (TREE_OPERAND (binary_op, 1))
5792 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5795 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5796 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5797 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5800 /* If we had X ? A : A + 1, do this as A + (X == 0).
5802 We have to invert the truth value here and then put it
5803 back later if do_store_flag fails. We cannot simply copy
5804 TREE_OPERAND (exp, 0) to another variable and modify that
5805 because invert_truthvalue can modify the tree pointed to
5807 if (singleton == TREE_OPERAND (exp, 1))
5808 TREE_OPERAND (exp, 0)
5809 = invert_truthvalue (TREE_OPERAND (exp, 0));
5811 result = do_store_flag (TREE_OPERAND (exp, 0),
5812 (safe_from_p (temp, singleton)
5814 mode, BRANCH_COST <= 1);
5818 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
5819 return expand_binop (mode, boptab, op1, result, temp,
5820 unsignedp, OPTAB_LIB_WIDEN);
5822 else if (singleton == TREE_OPERAND (exp, 1))
5823 TREE_OPERAND (exp, 0)
5824 = invert_truthvalue (TREE_OPERAND (exp, 0));
5827 do_pending_stack_adjust ();
5829 op0 = gen_label_rtx ();
5831 flag = gen_reg_rtx (word_mode);
5832 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5836 /* If the target conflicts with the other operand of the
5837 binary op, we can't use it. Also, we can't use the target
5838 if it is a hard register, because evaluating the condition
5839 might clobber it. */
5841 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5842 || (GET_CODE (temp) == REG
5843 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5844 temp = gen_reg_rtx (mode);
5845 store_expr (singleton, temp, 0);
5848 expand_expr (singleton,
5849 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5850 dest_left_flag = get_last_insn ();
5851 if (singleton == TREE_OPERAND (exp, 1))
5852 jumpif (TREE_OPERAND (exp, 0), op0);
5854 jumpifnot (TREE_OPERAND (exp, 0), op0);
5856 /* Allows cleanups up to here. */
5857 old_cleanups = cleanups_this_call;
5858 if (binary_op && temp == 0)
5859 /* Just touch the other operand. */
5860 expand_expr (TREE_OPERAND (binary_op, 1),
5861 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5863 store_expr (build (TREE_CODE (binary_op), type,
5864 make_tree (type, temp),
5865 TREE_OPERAND (binary_op, 1)),
5868 store_expr (build1 (TREE_CODE (unary_op), type,
5869 make_tree (type, temp)),
5872 dest_right_flag = get_last_insn ();
5875 /* This is now done in jump.c and is better done there because it
5876 produces shorter register lifetimes. */
5878 /* Check for both possibilities either constants or variables
5879 in registers (but not the same as the target!). If so, can
5880 save branches by assigning one, branching, and assigning the
5882 else if (temp && GET_MODE (temp) != BLKmode
5883 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5884 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5885 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5886 && DECL_RTL (TREE_OPERAND (exp, 1))
5887 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5888 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5889 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5890 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5891 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5892 && DECL_RTL (TREE_OPERAND (exp, 2))
5893 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5894 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5896 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5897 temp = gen_reg_rtx (mode);
5898 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5899 dest_left_flag = get_last_insn ();
5900 jumpifnot (TREE_OPERAND (exp, 0), op0);
5902 /* Allows cleanups up to here. */
5903 old_cleanups = cleanups_this_call;
5904 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5906 dest_right_flag = get_last_insn ();
5909 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5910 comparison operator. If we have one of these cases, set the
5911 output to A, branch on A (cse will merge these two references),
5912 then set the output to FOO. */
5914 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5915 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5916 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5917 TREE_OPERAND (exp, 1), 0)
5918 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5919 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5921 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5922 temp = gen_reg_rtx (mode);
5923 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5924 dest_left_flag = get_last_insn ();
5925 jumpif (TREE_OPERAND (exp, 0), op0);
5927 /* Allows cleanups up to here. */
5928 old_cleanups = cleanups_this_call;
5929 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5931 dest_right_flag = get_last_insn ();
5934 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5935 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5936 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5937 TREE_OPERAND (exp, 2), 0)
5938 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5939 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5941 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5942 temp = gen_reg_rtx (mode);
5943 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5944 dest_left_flag = get_last_insn ();
5945 jumpifnot (TREE_OPERAND (exp, 0), op0);
5947 /* Allows cleanups up to here. */
5948 old_cleanups = cleanups_this_call;
5949 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5951 dest_right_flag = get_last_insn ();
5955 op1 = gen_label_rtx ();
5956 jumpifnot (TREE_OPERAND (exp, 0), op0);
5958 /* Allows cleanups up to here. */
5959 old_cleanups = cleanups_this_call;
5961 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5963 expand_expr (TREE_OPERAND (exp, 1),
5964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5965 dest_left_flag = get_last_insn ();
5967 /* Handle conditional cleanups, if any. */
5968 left_cleanups = defer_cleanups_to (old_cleanups);
5971 emit_jump_insn (gen_jump (op1));
5975 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5977 expand_expr (TREE_OPERAND (exp, 2),
5978 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5979 dest_right_flag = get_last_insn ();
5982 /* Handle conditional cleanups, if any. */
5983 right_cleanups = defer_cleanups_to (old_cleanups);
5989 /* Add back in, any conditional cleanups. */
5990 if (left_cleanups || right_cleanups)
5996 /* Now that we know that a flag is needed, go back and add in the
5997 setting of the flag. */
5999 /* Do the left side flag. */
6000 last = get_last_insn ();
6001 /* Flag left cleanups as needed. */
6002 emit_move_insn (flag, const1_rtx);
6003 /* ??? deprecated, use sequences instead. */
6004 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6006 /* Do the right side flag. */
6007 last = get_last_insn ();
6008 /* Flag left cleanups as needed. */
6009 emit_move_insn (flag, const0_rtx);
6010 /* ??? deprecated, use sequences instead. */
6011 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6013 /* convert flag, which is an rtx, into a tree. */
6014 cond = make_node (RTL_EXPR);
6015 TREE_TYPE (cond) = integer_type_node;
6016 RTL_EXPR_RTL (cond) = flag;
6017 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6018 cond = save_expr (cond);
6020 if (! left_cleanups)
6021 left_cleanups = integer_zero_node;
6022 if (! right_cleanups)
6023 right_cleanups = integer_zero_node;
6024 new_cleanups = build (COND_EXPR, void_type_node,
6025 truthvalue_conversion (cond),
6026 left_cleanups, right_cleanups);
6027 new_cleanups = fold (new_cleanups);
6029 /* Now add in the conditionalized cleanups. */
6031 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6032 (*interim_eh_hook) (NULL_TREE);
6039 int need_exception_region = 0;
6040 /* Something needs to be initialized, but we didn't know
6041 where that thing was when building the tree. For example,
6042 it could be the return value of a function, or a parameter
6043 to a function which lays down in the stack, or a temporary
6044 variable which must be passed by reference.
6046 We guarantee that the expression will either be constructed
6047 or copied into our original target. */
6049 tree slot = TREE_OPERAND (exp, 0);
6053 if (TREE_CODE (slot) != VAR_DECL)
6057 target = original_target;
6061 if (DECL_RTL (slot) != 0)
6063 target = DECL_RTL (slot);
6064 /* If we have already expanded the slot, so don't do
6066 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6071 target = assign_stack_temp (mode, int_size_in_bytes (type), 2);
6072 MEM_IN_STRUCT_P (target) = AGGREGATE_TYPE_P (type);
6073 /* All temp slots at this level must not conflict. */
6074 preserve_temp_slots (target);
6075 DECL_RTL (slot) = target;
6077 /* Since SLOT is not known to the called function
6078 to belong to its stack frame, we must build an explicit
6079 cleanup. This case occurs when we must build up a reference
6080 to pass the reference as an argument. In this case,
6081 it is very likely that such a reference need not be
6084 if (TREE_OPERAND (exp, 2) == 0)
6085 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6086 if (TREE_OPERAND (exp, 2))
6088 cleanups_this_call = tree_cons (NULL_TREE,
6089 TREE_OPERAND (exp, 2),
6090 cleanups_this_call);
6091 need_exception_region = 1;
6097 /* This case does occur, when expanding a parameter which
6098 needs to be constructed on the stack. The target
6099 is the actual stack address that we want to initialize.
6100 The function we call will perform the cleanup in this case. */
6102 /* If we have already assigned it space, use that space,
6103 not target that we were passed in, as our target
6104 parameter is only a hint. */
6105 if (DECL_RTL (slot) != 0)
6107 target = DECL_RTL (slot);
6108 /* If we have already expanded the slot, so don't do
6110 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6114 DECL_RTL (slot) = target;
6117 exp1 = TREE_OPERAND (exp, 1);
6118 /* Mark it as expanded. */
6119 TREE_OPERAND (exp, 1) = NULL_TREE;
6121 temp = expand_expr (exp1, target, tmode, modifier);
6123 if (need_exception_region)
6124 (*interim_eh_hook) (NULL_TREE);
6131 tree lhs = TREE_OPERAND (exp, 0);
6132 tree rhs = TREE_OPERAND (exp, 1);
6133 tree noncopied_parts = 0;
6134 tree lhs_type = TREE_TYPE (lhs);
6136 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6137 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6138 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6139 TYPE_NONCOPIED_PARTS (lhs_type));
6140 while (noncopied_parts != 0)
6142 expand_assignment (TREE_VALUE (noncopied_parts),
6143 TREE_PURPOSE (noncopied_parts), 0, 0);
6144 noncopied_parts = TREE_CHAIN (noncopied_parts);
6151 /* If lhs is complex, expand calls in rhs before computing it.
6152 That's so we don't compute a pointer and save it over a call.
6153 If lhs is simple, compute it first so we can give it as a
6154 target if the rhs is just a call. This avoids an extra temp and copy
6155 and that prevents a partial-subsumption which makes bad code.
6156 Actually we could treat component_ref's of vars like vars. */
6158 tree lhs = TREE_OPERAND (exp, 0);
6159 tree rhs = TREE_OPERAND (exp, 1);
6160 tree noncopied_parts = 0;
6161 tree lhs_type = TREE_TYPE (lhs);
6165 if (TREE_CODE (lhs) != VAR_DECL
6166 && TREE_CODE (lhs) != RESULT_DECL
6167 && TREE_CODE (lhs) != PARM_DECL)
6168 preexpand_calls (exp);
6170 /* Check for |= or &= of a bitfield of size one into another bitfield
6171 of size 1. In this case, (unless we need the result of the
6172 assignment) we can do this more efficiently with a
6173 test followed by an assignment, if necessary.
6175 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6176 things change so we do, this code should be enhanced to
6179 && TREE_CODE (lhs) == COMPONENT_REF
6180 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6181 || TREE_CODE (rhs) == BIT_AND_EXPR)
6182 && TREE_OPERAND (rhs, 0) == lhs
6183 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6184 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6185 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6187 rtx label = gen_label_rtx ();
6189 do_jump (TREE_OPERAND (rhs, 1),
6190 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6191 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6192 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6193 (TREE_CODE (rhs) == BIT_IOR_EXPR
6195 : integer_zero_node)),
6197 do_pending_stack_adjust ();
6202 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6203 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6204 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6205 TYPE_NONCOPIED_PARTS (lhs_type));
6207 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6208 while (noncopied_parts != 0)
6210 expand_assignment (TREE_PURPOSE (noncopied_parts),
6211 TREE_VALUE (noncopied_parts), 0, 0);
6212 noncopied_parts = TREE_CHAIN (noncopied_parts);
6217 case PREINCREMENT_EXPR:
6218 case PREDECREMENT_EXPR:
6219 return expand_increment (exp, 0);
6221 case POSTINCREMENT_EXPR:
6222 case POSTDECREMENT_EXPR:
6223 /* Faster to treat as pre-increment if result is not used. */
6224 return expand_increment (exp, ! ignore);
6227 /* If nonzero, TEMP will be set to the address of something that might
6228 be a MEM corresponding to a stack slot. */
6231 /* Are we taking the address of a nested function? */
6232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6233 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
6235 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6236 op0 = force_operand (op0, target);
6238 /* If we are taking the address of something erroneous, just
6240 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6244 /* We make sure to pass const0_rtx down if we came in with
6245 ignore set, to avoid doing the cleanups twice for something. */
6246 op0 = expand_expr (TREE_OPERAND (exp, 0),
6247 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6248 (modifier == EXPAND_INITIALIZER
6249 ? modifier : EXPAND_CONST_ADDRESS));
6251 /* If we are going to ignore the result, OP0 will have been set
6252 to const0_rtx, so just return it. Don't get confused and
6253 think we are taking the address of the constant. */
6257 /* We would like the object in memory. If it is a constant,
6258 we can have it be statically allocated into memory. For
6259 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6260 memory and store the value into it. */
6262 if (CONSTANT_P (op0))
6263 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6265 else if (GET_CODE (op0) == MEM)
6267 mark_temp_addr_taken (op0);
6268 temp = XEXP (op0, 0);
6271 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6272 || GET_CODE (op0) == CONCAT)
6274 /* If this object is in a register, it must be not
6276 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6277 enum machine_mode inner_mode = TYPE_MODE (inner_type);
6279 = assign_stack_temp (inner_mode,
6280 int_size_in_bytes (inner_type), 1);
6281 MEM_IN_STRUCT_P (memloc) = AGGREGATE_TYPE_P (inner_type);
6283 mark_temp_addr_taken (memloc);
6284 emit_move_insn (memloc, op0);
6288 if (GET_CODE (op0) != MEM)
6291 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6293 temp = XEXP (op0, 0);
6294 #ifdef POINTERS_EXTEND_UNSIGNED
6295 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6296 && mode == ptr_mode)
6297 temp = convert_memory_address (ptr_mode, temp);
6302 op0 = force_operand (XEXP (op0, 0), target);
6305 if (flag_force_addr && GET_CODE (op0) != REG)
6306 op0 = force_reg (Pmode, op0);
6308 if (GET_CODE (op0) == REG)
6309 mark_reg_pointer (op0);
6311 /* If we might have had a temp slot, add an equivalent address
6314 update_temp_slot_address (temp, op0);
6316 #ifdef POINTERS_EXTEND_UNSIGNED
6317 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
6318 && mode == ptr_mode)
6319 op0 = convert_memory_address (ptr_mode, op0);
6324 case ENTRY_VALUE_EXPR:
6327 /* COMPLEX type for Extended Pascal & Fortran */
6330 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6333 /* Get the rtx code of the operands. */
6334 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6335 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
6338 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6342 /* Move the real (op0) and imaginary (op1) parts to their location. */
6343 emit_move_insn (gen_realpart (mode, target), op0);
6344 emit_move_insn (gen_imagpart (mode, target), op1);
6346 insns = get_insns ();
6349 /* Complex construction should appear as a single unit. */
6350 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
6351 each with a separate pseudo as destination.
6352 It's not correct for flow to treat them as a unit. */
6353 if (GET_CODE (target) != CONCAT)
6354 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
6362 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6363 return gen_realpart (mode, op0);
6366 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6367 return gen_imagpart (mode, op0);
6371 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6375 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
6378 target = gen_reg_rtx (mode);
6382 /* Store the realpart and the negated imagpart to target. */
6383 emit_move_insn (gen_realpart (partmode, target),
6384 gen_realpart (partmode, op0));
6386 imag_t = gen_imagpart (partmode, target);
6387 temp = expand_unop (partmode, neg_optab,
6388 gen_imagpart (partmode, op0), imag_t, 0);
6390 emit_move_insn (imag_t, temp);
6392 insns = get_insns ();
6395 /* Conjugate should appear as a single unit
6396 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6397 each with a separate pseudo as destination.
6398 It's not correct for flow to treat them as a unit. */
6399 if (GET_CODE (target) != CONCAT)
6400 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
6408 op0 = CONST0_RTX (tmode);
6414 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
6417 /* Here to do an ordinary binary operator, generating an instruction
6418 from the optab already placed in `this_optab'. */
6420 preexpand_calls (exp);
6421 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6423 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6424 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6426 temp = expand_binop (mode, this_optab, op0, op1, target,
6427 unsignedp, OPTAB_LIB_WIDEN);
6434 /* Emit bytecode to evaluate the given expression EXP to the stack. */
6436 bc_expand_expr (exp)
6439 enum tree_code code;
6442 struct binary_operator *binoptab;
6443 struct unary_operator *unoptab;
6444 struct increment_operator *incroptab;
6445 struct bc_label *lab, *lab1;
6446 enum bytecode_opcode opcode;
6449 code = TREE_CODE (exp);
6455 if (DECL_RTL (exp) == 0)
6457 error_with_decl (exp, "prior parameter's size depends on `%s'");
6461 bc_load_parmaddr (DECL_RTL (exp));
6462 bc_load_memory (TREE_TYPE (exp), exp);
6468 if (DECL_RTL (exp) == 0)
6472 if (BYTECODE_LABEL (DECL_RTL (exp)))
6473 bc_load_externaddr (DECL_RTL (exp));
6475 bc_load_localaddr (DECL_RTL (exp));
6477 if (TREE_PUBLIC (exp))
6478 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
6479 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
6481 bc_load_localaddr (DECL_RTL (exp));
6483 bc_load_memory (TREE_TYPE (exp), exp);
6488 #ifdef DEBUG_PRINT_CODE
6489 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
6491 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
6493 : TYPE_MODE (TREE_TYPE (exp)))],
6494 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
6500 #ifdef DEBUG_PRINT_CODE
6501 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
6503 /* FIX THIS: find a better way to pass real_cst's. -bson */
6504 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6505 (double) TREE_REAL_CST (exp));
6514 /* We build a call description vector describing the type of
6515 the return value and of the arguments; this call vector,
6516 together with a pointer to a location for the return value
6517 and the base of the argument list, is passed to the low
6518 level machine dependent call subroutine, which is responsible
6519 for putting the arguments wherever real functions expect
6520 them, as well as getting the return value back. */
6522 tree calldesc = 0, arg;
6526 /* Push the evaluated args on the evaluation stack in reverse
6527 order. Also make an entry for each arg in the calldesc
6528 vector while we're at it. */
6530 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6532 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6535 bc_expand_expr (TREE_VALUE (arg));
6537 calldesc = tree_cons ((tree) 0,
6538 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6540 calldesc = tree_cons ((tree) 0,
6541 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6545 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6547 /* Allocate a location for the return value and push its
6548 address on the evaluation stack. Also make an entry
6549 at the front of the calldesc for the return value type. */
6551 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6552 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6553 bc_load_localaddr (retval);
6555 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6556 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6558 /* Prepend the argument count. */
6559 calldesc = tree_cons ((tree) 0,
6560 build_int_2 (nargs, 0),
6563 /* Push the address of the call description vector on the stack. */
6564 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6565 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6566 build_index_type (build_int_2 (nargs * 2, 0)));
6567 r = output_constant_def (calldesc);
6568 bc_load_externaddr (r);
6570 /* Push the address of the function to be called. */
6571 bc_expand_expr (TREE_OPERAND (exp, 0));
6573 /* Call the function, popping its address and the calldesc vector
6574 address off the evaluation stack in the process. */
6575 bc_emit_instruction (call);
6577 /* Pop the arguments off the stack. */
6578 bc_adjust_stack (nargs);
6580 /* Load the return value onto the stack. */
6581 bc_load_localaddr (retval);
6582 bc_load_memory (type, TREE_OPERAND (exp, 0));
6588 if (!SAVE_EXPR_RTL (exp))
6590 /* First time around: copy to local variable */
6591 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6592 TYPE_ALIGN (TREE_TYPE(exp)));
6593 bc_expand_expr (TREE_OPERAND (exp, 0));
6594 bc_emit_instruction (duplicate);
6596 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6597 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6601 /* Consecutive reference: use saved copy */
6602 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6603 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6608 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6609 how are they handled instead? */
6612 TREE_USED (exp) = 1;
6613 bc_expand_expr (STMT_BODY (exp));
6620 bc_expand_expr (TREE_OPERAND (exp, 0));
6621 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6626 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
6631 bc_expand_address (TREE_OPERAND (exp, 0));
6636 bc_expand_expr (TREE_OPERAND (exp, 0));
6637 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6642 bc_expand_expr (bc_canonicalize_array_ref (exp));
6647 bc_expand_component_address (exp);
6649 /* If we have a bitfield, generate a proper load */
6650 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6655 bc_expand_expr (TREE_OPERAND (exp, 0));
6656 bc_emit_instruction (drop);
6657 bc_expand_expr (TREE_OPERAND (exp, 1));
6662 bc_expand_expr (TREE_OPERAND (exp, 0));
6663 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6664 lab = bc_get_bytecode_label ();
6665 bc_emit_bytecode (xjumpifnot);
6666 bc_emit_bytecode_labelref (lab);
6668 #ifdef DEBUG_PRINT_CODE
6669 fputc ('\n', stderr);
6671 bc_expand_expr (TREE_OPERAND (exp, 1));
6672 lab1 = bc_get_bytecode_label ();
6673 bc_emit_bytecode (jump);
6674 bc_emit_bytecode_labelref (lab1);
6676 #ifdef DEBUG_PRINT_CODE
6677 fputc ('\n', stderr);
6680 bc_emit_bytecode_labeldef (lab);
6681 bc_expand_expr (TREE_OPERAND (exp, 2));
6682 bc_emit_bytecode_labeldef (lab1);
6685 case TRUTH_ANDIF_EXPR:
6687 opcode = xjumpifnot;
6690 case TRUTH_ORIF_EXPR:
6697 binoptab = optab_plus_expr;
6702 binoptab = optab_minus_expr;
6707 binoptab = optab_mult_expr;
6710 case TRUNC_DIV_EXPR:
6711 case FLOOR_DIV_EXPR:
6713 case ROUND_DIV_EXPR:
6714 case EXACT_DIV_EXPR:
6716 binoptab = optab_trunc_div_expr;
6719 case TRUNC_MOD_EXPR:
6720 case FLOOR_MOD_EXPR:
6722 case ROUND_MOD_EXPR:
6724 binoptab = optab_trunc_mod_expr;
6727 case FIX_ROUND_EXPR:
6728 case FIX_FLOOR_EXPR:
6730 abort (); /* Not used for C. */
6732 case FIX_TRUNC_EXPR:
6739 abort (); /* FIXME */
6743 binoptab = optab_rdiv_expr;
6748 binoptab = optab_bit_and_expr;
6753 binoptab = optab_bit_ior_expr;
6758 binoptab = optab_bit_xor_expr;
6763 binoptab = optab_lshift_expr;
6768 binoptab = optab_rshift_expr;
6771 case TRUTH_AND_EXPR:
6773 binoptab = optab_truth_and_expr;
6778 binoptab = optab_truth_or_expr;
6783 binoptab = optab_lt_expr;
6788 binoptab = optab_le_expr;
6793 binoptab = optab_ge_expr;
6798 binoptab = optab_gt_expr;
6803 binoptab = optab_eq_expr;
6808 binoptab = optab_ne_expr;
6813 unoptab = optab_negate_expr;
6818 unoptab = optab_bit_not_expr;
6821 case TRUTH_NOT_EXPR:
6823 unoptab = optab_truth_not_expr;
6826 case PREDECREMENT_EXPR:
6828 incroptab = optab_predecrement_expr;
6831 case PREINCREMENT_EXPR:
6833 incroptab = optab_preincrement_expr;
6836 case POSTDECREMENT_EXPR:
6838 incroptab = optab_postdecrement_expr;
6841 case POSTINCREMENT_EXPR:
6843 incroptab = optab_postincrement_expr;
6848 bc_expand_constructor (exp);
6858 tree vars = TREE_OPERAND (exp, 0);
6859 int vars_need_expansion = 0;
6861 /* Need to open a binding contour here because
6862 if there are any cleanups they most be contained here. */
6863 expand_start_bindings (0);
6865 /* Mark the corresponding BLOCK for output. */
6866 if (TREE_OPERAND (exp, 2) != 0)
6867 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6869 /* If VARS have not yet been expanded, expand them now. */
6872 if (DECL_RTL (vars) == 0)
6874 vars_need_expansion = 1;
6877 expand_decl_init (vars);
6878 vars = TREE_CHAIN (vars);
6881 bc_expand_expr (TREE_OPERAND (exp, 1));
6883 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6893 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6894 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6900 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6906 bc_expand_expr (TREE_OPERAND (exp, 0));
6907 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6908 lab = bc_get_bytecode_label ();
6910 bc_emit_instruction (duplicate);
6911 bc_emit_bytecode (opcode);
6912 bc_emit_bytecode_labelref (lab);
6914 #ifdef DEBUG_PRINT_CODE
6915 fputc ('\n', stderr);
6918 bc_emit_instruction (drop);
6920 bc_expand_expr (TREE_OPERAND (exp, 1));
6921 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6922 bc_emit_bytecode_labeldef (lab);
6928 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6930 /* Push the quantum. */
6931 bc_expand_expr (TREE_OPERAND (exp, 1));
6933 /* Convert it to the lvalue's type. */
6934 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6936 /* Push the address of the lvalue */
6937 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
6939 /* Perform actual increment */
6940 bc_expand_increment (incroptab, type);
6944 /* Return the alignment in bits of EXP, a pointer valued expression.
6945 But don't return more than MAX_ALIGN no matter what.
6946 The alignment returned is, by default, the alignment of the thing that
6947 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6949 Otherwise, look at the expression to see if we can do better, i.e., if the
6950 expression is actually pointing at an object whose alignment is tighter. */
6953 get_pointer_alignment (exp, max_align)
6957 unsigned align, inner;
6959 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6962 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6963 align = MIN (align, max_align);
6967 switch (TREE_CODE (exp))
6971 case NON_LVALUE_EXPR:
6972 exp = TREE_OPERAND (exp, 0);
6973 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6975 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6976 align = MIN (inner, max_align);
6980 /* If sum of pointer + int, restrict our maximum alignment to that
6981 imposed by the integer. If not, we can't do any better than
6983 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6986 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6991 exp = TREE_OPERAND (exp, 0);
6995 /* See what we are pointing at and look at its alignment. */
6996 exp = TREE_OPERAND (exp, 0);
6997 if (TREE_CODE (exp) == FUNCTION_DECL)
6998 align = FUNCTION_BOUNDARY;
6999 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7000 align = DECL_ALIGN (exp);
7001 #ifdef CONSTANT_ALIGNMENT
7002 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7003 align = CONSTANT_ALIGNMENT (exp, align);
7005 return MIN (align, max_align);
7013 /* Return the tree node and offset if a given argument corresponds to
7014 a string constant. */
7017 string_constant (arg, ptr_offset)
7023 if (TREE_CODE (arg) == ADDR_EXPR
7024 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7026 *ptr_offset = integer_zero_node;
7027 return TREE_OPERAND (arg, 0);
7029 else if (TREE_CODE (arg) == PLUS_EXPR)
7031 tree arg0 = TREE_OPERAND (arg, 0);
7032 tree arg1 = TREE_OPERAND (arg, 1);
7037 if (TREE_CODE (arg0) == ADDR_EXPR
7038 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7041 return TREE_OPERAND (arg0, 0);
7043 else if (TREE_CODE (arg1) == ADDR_EXPR
7044 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7047 return TREE_OPERAND (arg1, 0);
7054 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7055 way, because it could contain a zero byte in the middle.
7056 TREE_STRING_LENGTH is the size of the character array, not the string.
7058 Unfortunately, string_constant can't access the values of const char
7059 arrays with initializers, so neither can we do so here. */
7069 src = string_constant (src, &offset_node);
7072 max = TREE_STRING_LENGTH (src);
7073 ptr = TREE_STRING_POINTER (src);
7074 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7076 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7077 compute the offset to the following null if we don't know where to
7078 start searching for it. */
7080 for (i = 0; i < max; i++)
7083 /* We don't know the starting offset, but we do know that the string
7084 has no internal zero bytes. We can assume that the offset falls
7085 within the bounds of the string; otherwise, the programmer deserves
7086 what he gets. Subtract the offset from the length of the string,
7088 /* This would perhaps not be valid if we were dealing with named
7089 arrays in addition to literal string constants. */
7090 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7093 /* We have a known offset into the string. Start searching there for
7094 a null character. */
7095 if (offset_node == 0)
7099 /* Did we get a long long offset? If so, punt. */
7100 if (TREE_INT_CST_HIGH (offset_node) != 0)
7102 offset = TREE_INT_CST_LOW (offset_node);
7104 /* If the offset is known to be out of bounds, warn, and call strlen at
7106 if (offset < 0 || offset > max)
7108 warning ("offset outside bounds of constant string");
7111 /* Use strlen to search for the first zero byte. Since any strings
7112 constructed with build_string will have nulls appended, we win even
7113 if we get handed something like (char[4])"abcd".
7115 Since OFFSET is our starting index into the string, no further
7116 calculation is needed. */
7117 return size_int (strlen (ptr + offset));
7120 /* Expand an expression EXP that calls a built-in function,
7121 with result going to TARGET if that's convenient
7122 (and in mode MODE if that's convenient).
7123 SUBTARGET may be used as the target for computing one of EXP's operands.
7124 IGNORE is nonzero if the value is to be ignored. */
7126 #define CALLED_AS_BUILT_IN(NODE) \
7127 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7130 expand_builtin (exp, target, subtarget, mode, ignore)
7134 enum machine_mode mode;
7137 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7138 tree arglist = TREE_OPERAND (exp, 1);
7141 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7142 optab builtin_optab;
7144 switch (DECL_FUNCTION_CODE (fndecl))
7149 /* build_function_call changes these into ABS_EXPR. */
7154 /* Treat these like sqrt, but only if the user asks for them. */
7155 if (! flag_fast_math)
7157 case BUILT_IN_FSQRT:
7158 /* If not optimizing, call the library function. */
7163 /* Arg could be wrong type if user redeclared this fcn wrong. */
7164 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7167 /* Stabilize and compute the argument. */
7168 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7169 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7171 exp = copy_node (exp);
7172 arglist = copy_node (arglist);
7173 TREE_OPERAND (exp, 1) = arglist;
7174 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7176 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7178 /* Make a suitable register to place result in. */
7179 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7184 switch (DECL_FUNCTION_CODE (fndecl))
7187 builtin_optab = sin_optab; break;
7189 builtin_optab = cos_optab; break;
7190 case BUILT_IN_FSQRT:
7191 builtin_optab = sqrt_optab; break;
7196 /* Compute into TARGET.
7197 Set TARGET to wherever the result comes back. */
7198 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7199 builtin_optab, op0, target, 0);
7201 /* If we were unable to expand via the builtin, stop the
7202 sequence (without outputting the insns) and break, causing
7203 a call the the library function. */
7210 /* Check the results by default. But if flag_fast_math is turned on,
7211 then assume sqrt will always be called with valid arguments. */
7213 if (! flag_fast_math)
7215 /* Don't define the builtin FP instructions
7216 if your machine is not IEEE. */
7217 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7220 lab1 = gen_label_rtx ();
7222 /* Test the result; if it is NaN, set errno=EDOM because
7223 the argument was not in the domain. */
7224 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7225 emit_jump_insn (gen_beq (lab1));
7229 #ifdef GEN_ERRNO_RTX
7230 rtx errno_rtx = GEN_ERRNO_RTX;
7233 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7236 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7239 /* We can't set errno=EDOM directly; let the library call do it.
7240 Pop the arguments right away in case the call gets deleted. */
7242 expand_call (exp, target, 0);
7249 /* Output the entire sequence. */
7250 insns = get_insns ();
7256 /* __builtin_apply_args returns block of memory allocated on
7257 the stack into which is stored the arg pointer, structure
7258 value address, static chain, and all the registers that might
7259 possibly be used in performing a function call. The code is
7260 moved to the start of the function so the incoming values are
7262 case BUILT_IN_APPLY_ARGS:
7263 /* Don't do __builtin_apply_args more than once in a function.
7264 Save the result of the first call and reuse it. */
7265 if (apply_args_value != 0)
7266 return apply_args_value;
7268 /* When this function is called, it means that registers must be
7269 saved on entry to this function. So we migrate the
7270 call to the first insn of this function. */
7275 temp = expand_builtin_apply_args ();
7279 apply_args_value = temp;
7281 /* Put the sequence after the NOTE that starts the function.
7282 If this is inside a SEQUENCE, make the outer-level insn
7283 chain current, so the code is placed at the start of the
7285 push_topmost_sequence ();
7286 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7287 pop_topmost_sequence ();
7291 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7292 FUNCTION with a copy of the parameters described by
7293 ARGUMENTS, and ARGSIZE. It returns a block of memory
7294 allocated on the stack into which is stored all the registers
7295 that might possibly be used for returning the result of a
7296 function. ARGUMENTS is the value returned by
7297 __builtin_apply_args. ARGSIZE is the number of bytes of
7298 arguments that must be copied. ??? How should this value be
7299 computed? We'll also need a safe worst case value for varargs
7301 case BUILT_IN_APPLY:
7303 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7304 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7305 || TREE_CHAIN (arglist) == 0
7306 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7307 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7308 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7316 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7317 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
7319 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7322 /* __builtin_return (RESULT) causes the function to return the
7323 value described by RESULT. RESULT is address of the block of
7324 memory returned by __builtin_apply. */
7325 case BUILT_IN_RETURN:
7327 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7328 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7329 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7330 NULL_RTX, VOIDmode, 0));
7333 case BUILT_IN_SAVEREGS:
7334 /* Don't do __builtin_saveregs more than once in a function.
7335 Save the result of the first call and reuse it. */
7336 if (saveregs_value != 0)
7337 return saveregs_value;
7339 /* When this function is called, it means that registers must be
7340 saved on entry to this function. So we migrate the
7341 call to the first insn of this function. */
7345 /* Now really call the function. `expand_call' does not call
7346 expand_builtin, so there is no danger of infinite recursion here. */
7349 #ifdef EXPAND_BUILTIN_SAVEREGS
7350 /* Do whatever the machine needs done in this case. */
7351 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7353 /* The register where the function returns its value
7354 is likely to have something else in it, such as an argument.
7355 So preserve that register around the call. */
7357 if (value_mode != VOIDmode)
7359 rtx valreg = hard_libcall_value (value_mode);
7360 rtx saved_valreg = gen_reg_rtx (value_mode);
7362 emit_move_insn (saved_valreg, valreg);
7363 temp = expand_call (exp, target, ignore);
7364 emit_move_insn (valreg, saved_valreg);
7367 /* Generate the call, putting the value in a pseudo. */
7368 temp = expand_call (exp, target, ignore);
7374 saveregs_value = temp;
7376 /* Put the sequence after the NOTE that starts the function.
7377 If this is inside a SEQUENCE, make the outer-level insn
7378 chain current, so the code is placed at the start of the
7380 push_topmost_sequence ();
7381 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7382 pop_topmost_sequence ();
7386 /* __builtin_args_info (N) returns word N of the arg space info
7387 for the current function. The number and meanings of words
7388 is controlled by the definition of CUMULATIVE_ARGS. */
7389 case BUILT_IN_ARGS_INFO:
7391 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
7393 int *word_ptr = (int *) ¤t_function_args_info;
7394 tree type, elts, result;
7396 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
7397 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
7398 __FILE__, __LINE__);
7402 tree arg = TREE_VALUE (arglist);
7403 if (TREE_CODE (arg) != INTEGER_CST)
7404 error ("argument of `__builtin_args_info' must be constant");
7407 int wordnum = TREE_INT_CST_LOW (arg);
7409 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
7410 error ("argument of `__builtin_args_info' out of range");
7412 return GEN_INT (word_ptr[wordnum]);
7416 error ("missing argument in `__builtin_args_info'");
7421 for (i = 0; i < nwords; i++)
7422 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
7424 type = build_array_type (integer_type_node,
7425 build_index_type (build_int_2 (nwords, 0)));
7426 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
7427 TREE_CONSTANT (result) = 1;
7428 TREE_STATIC (result) = 1;
7429 result = build (INDIRECT_REF, build_pointer_type (type), result);
7430 TREE_CONSTANT (result) = 1;
7431 return expand_expr (result, NULL_RTX, VOIDmode, 0);
7435 /* Return the address of the first anonymous stack arg. */
7436 case BUILT_IN_NEXT_ARG:
7438 tree fntype = TREE_TYPE (current_function_decl);
7440 if ((TYPE_ARG_TYPES (fntype) == 0
7441 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
7443 && ! current_function_varargs)
7445 error ("`va_start' used in function with fixed args");
7451 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
7452 tree arg = TREE_VALUE (arglist);
7454 /* Strip off all nops for the sake of the comparison. This
7455 is not quite the same as STRIP_NOPS. It does more. */
7456 while (TREE_CODE (arg) == NOP_EXPR
7457 || TREE_CODE (arg) == CONVERT_EXPR
7458 || TREE_CODE (arg) == NON_LVALUE_EXPR)
7459 arg = TREE_OPERAND (arg, 0);
7460 if (arg != last_parm)
7461 warning ("second parameter of `va_start' not last named argument");
7463 else if (! current_function_varargs)
7464 /* Evidently an out of date version of <stdarg.h>; can't validate
7465 va_start's second argument, but can still work as intended. */
7466 warning ("`__builtin_next_arg' called without an argument");
7469 return expand_binop (Pmode, add_optab,
7470 current_function_internal_arg_pointer,
7471 current_function_arg_offset_rtx,
7472 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7474 case BUILT_IN_CLASSIFY_TYPE:
7477 tree type = TREE_TYPE (TREE_VALUE (arglist));
7478 enum tree_code code = TREE_CODE (type);
7479 if (code == VOID_TYPE)
7480 return GEN_INT (void_type_class);
7481 if (code == INTEGER_TYPE)
7482 return GEN_INT (integer_type_class);
7483 if (code == CHAR_TYPE)
7484 return GEN_INT (char_type_class);
7485 if (code == ENUMERAL_TYPE)
7486 return GEN_INT (enumeral_type_class);
7487 if (code == BOOLEAN_TYPE)
7488 return GEN_INT (boolean_type_class);
7489 if (code == POINTER_TYPE)
7490 return GEN_INT (pointer_type_class);
7491 if (code == REFERENCE_TYPE)
7492 return GEN_INT (reference_type_class);
7493 if (code == OFFSET_TYPE)
7494 return GEN_INT (offset_type_class);
7495 if (code == REAL_TYPE)
7496 return GEN_INT (real_type_class);
7497 if (code == COMPLEX_TYPE)
7498 return GEN_INT (complex_type_class);
7499 if (code == FUNCTION_TYPE)
7500 return GEN_INT (function_type_class);
7501 if (code == METHOD_TYPE)
7502 return GEN_INT (method_type_class);
7503 if (code == RECORD_TYPE)
7504 return GEN_INT (record_type_class);
7505 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
7506 return GEN_INT (union_type_class);
7507 if (code == ARRAY_TYPE)
7509 if (TYPE_STRING_FLAG (type))
7510 return GEN_INT (string_type_class);
7512 return GEN_INT (array_type_class);
7514 if (code == SET_TYPE)
7515 return GEN_INT (set_type_class);
7516 if (code == FILE_TYPE)
7517 return GEN_INT (file_type_class);
7518 if (code == LANG_TYPE)
7519 return GEN_INT (lang_type_class);
7521 return GEN_INT (no_type_class);
7523 case BUILT_IN_CONSTANT_P:
7528 tree arg = TREE_VALUE (arglist);
7531 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
7532 || (TREE_CODE (arg) == ADDR_EXPR
7533 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7534 ? const1_rtx : const0_rtx);
7537 case BUILT_IN_FRAME_ADDRESS:
7538 /* The argument must be a nonnegative integer constant.
7539 It counts the number of frames to scan up the stack.
7540 The value is the address of that frame. */
7541 case BUILT_IN_RETURN_ADDRESS:
7542 /* The argument must be a nonnegative integer constant.
7543 It counts the number of frames to scan up the stack.
7544 The value is the return address saved in that frame. */
7546 /* Warning about missing arg was already issued. */
7548 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7550 error ("invalid arg to `__builtin_return_address'");
7553 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
7555 error ("invalid arg to `__builtin_return_address'");
7560 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7561 rtx tem = frame_pointer_rtx;
7564 /* Some machines need special handling before we can access arbitrary
7565 frames. For example, on the sparc, we must first flush all
7566 register windows to the stack. */
7567 #ifdef SETUP_FRAME_ADDRESSES
7568 SETUP_FRAME_ADDRESSES ();
7571 /* On the sparc, the return address is not in the frame, it is
7572 in a register. There is no way to access it off of the current
7573 frame pointer, but it can be accessed off the previous frame
7574 pointer by reading the value from the register window save
7576 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7577 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7581 /* Scan back COUNT frames to the specified frame. */
7582 for (i = 0; i < count; i++)
7584 /* Assume the dynamic chain pointer is in the word that
7585 the frame address points to, unless otherwise specified. */
7586 #ifdef DYNAMIC_CHAIN_ADDRESS
7587 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7589 tem = memory_address (Pmode, tem);
7590 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7593 /* For __builtin_frame_address, return what we've got. */
7594 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7597 /* For __builtin_return_address,
7598 Get the return address from that frame. */
7599 #ifdef RETURN_ADDR_RTX
7600 return RETURN_ADDR_RTX (count, tem);
7602 tem = memory_address (Pmode,
7603 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7604 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7608 case BUILT_IN_ALLOCA:
7610 /* Arg could be non-integer if user redeclared this fcn wrong. */
7611 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7614 /* Compute the argument. */
7615 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7617 /* Allocate the desired space. */
7618 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7621 /* If not optimizing, call the library function. */
7622 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7626 /* Arg could be non-integer if user redeclared this fcn wrong. */
7627 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7630 /* Compute the argument. */
7631 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7632 /* Compute ffs, into TARGET if possible.
7633 Set TARGET to wherever the result comes back. */
7634 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7635 ffs_optab, op0, target, 1);
7640 case BUILT_IN_STRLEN:
7641 /* If not optimizing, call the library function. */
7642 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7646 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7647 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7651 tree src = TREE_VALUE (arglist);
7652 tree len = c_strlen (src);
7655 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7657 rtx result, src_rtx, char_rtx;
7658 enum machine_mode insn_mode = value_mode, char_mode;
7659 enum insn_code icode;
7661 /* If the length is known, just return it. */
7663 return expand_expr (len, target, mode, 0);
7665 /* If SRC is not a pointer type, don't do this operation inline. */
7669 /* Call a function if we can't compute strlen in the right mode. */
7671 while (insn_mode != VOIDmode)
7673 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7674 if (icode != CODE_FOR_nothing)
7677 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7679 if (insn_mode == VOIDmode)
7682 /* Make a place to write the result of the instruction. */
7685 && GET_CODE (result) == REG
7686 && GET_MODE (result) == insn_mode
7687 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7688 result = gen_reg_rtx (insn_mode);
7690 /* Make sure the operands are acceptable to the predicates. */
7692 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7693 result = gen_reg_rtx (insn_mode);
7695 src_rtx = memory_address (BLKmode,
7696 expand_expr (src, NULL_RTX, ptr_mode,
7698 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7699 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7701 char_rtx = const0_rtx;
7702 char_mode = insn_operand_mode[(int)icode][2];
7703 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7704 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7706 emit_insn (GEN_FCN (icode) (result,
7707 gen_rtx (MEM, BLKmode, src_rtx),
7708 char_rtx, GEN_INT (align)));
7710 /* Return the value in the proper mode for this function. */
7711 if (GET_MODE (result) == value_mode)
7713 else if (target != 0)
7715 convert_move (target, result, 0);
7719 return convert_to_mode (value_mode, result, 0);
7722 case BUILT_IN_STRCPY:
7723 /* If not optimizing, call the library function. */
7724 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7728 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7729 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7730 || TREE_CHAIN (arglist) == 0
7731 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7735 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
7740 len = size_binop (PLUS_EXPR, len, integer_one_node);
7742 chainon (arglist, build_tree_list (NULL_TREE, len));
7746 case BUILT_IN_MEMCPY:
7747 /* If not optimizing, call the library function. */
7748 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7752 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7753 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7754 || TREE_CHAIN (arglist) == 0
7755 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7756 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7757 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7761 tree dest = TREE_VALUE (arglist);
7762 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7763 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7767 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7769 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7770 rtx dest_rtx, dest_mem, src_mem;
7772 /* If either SRC or DEST is not a pointer type, don't do
7773 this operation in-line. */
7774 if (src_align == 0 || dest_align == 0)
7776 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7777 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7781 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
7782 dest_mem = gen_rtx (MEM, BLKmode,
7783 memory_address (BLKmode, dest_rtx));
7784 /* There could be a void* cast on top of the object. */
7785 if (TREE_CODE (dest) == NOP_EXPR)
7786 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (dest, 0)));
7788 type = TREE_TYPE (TREE_TYPE (dest));
7789 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
7790 src_mem = gen_rtx (MEM, BLKmode,
7791 memory_address (BLKmode,
7792 expand_expr (src, NULL_RTX,
7795 /* There could be a void* cast on top of the object. */
7796 if (TREE_CODE (src) == NOP_EXPR)
7797 type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (src, 0)));
7799 type = TREE_TYPE (TREE_TYPE (src));
7800 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
7802 /* Copy word part most expediently. */
7803 emit_block_move (dest_mem, src_mem,
7804 expand_expr (len, NULL_RTX, VOIDmode, 0),
7805 MIN (src_align, dest_align));
7806 return force_operand (dest_rtx, NULL_RTX);
7809 /* These comparison functions need an instruction that returns an actual
7810 index. An ordinary compare that just sets the condition codes
7812 #ifdef HAVE_cmpstrsi
7813 case BUILT_IN_STRCMP:
7814 /* If not optimizing, call the library function. */
7815 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7819 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7820 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7821 || TREE_CHAIN (arglist) == 0
7822 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7824 else if (!HAVE_cmpstrsi)
7827 tree arg1 = TREE_VALUE (arglist);
7828 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7832 len = c_strlen (arg1);
7834 len = size_binop (PLUS_EXPR, integer_one_node, len);
7835 len2 = c_strlen (arg2);
7837 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7839 /* If we don't have a constant length for the first, use the length
7840 of the second, if we know it. We don't require a constant for
7841 this case; some cost analysis could be done if both are available
7842 but neither is constant. For now, assume they're equally cheap.
7844 If both strings have constant lengths, use the smaller. This
7845 could arise if optimization results in strcpy being called with
7846 two fixed strings, or if the code was machine-generated. We should
7847 add some code to the `memcmp' handler below to deal with such
7848 situations, someday. */
7849 if (!len || TREE_CODE (len) != INTEGER_CST)
7856 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7858 if (tree_int_cst_lt (len2, len))
7862 chainon (arglist, build_tree_list (NULL_TREE, len));
7866 case BUILT_IN_MEMCMP:
7867 /* If not optimizing, call the library function. */
7868 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
7872 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7873 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7874 || TREE_CHAIN (arglist) == 0
7875 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7876 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7877 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7879 else if (!HAVE_cmpstrsi)
7882 tree arg1 = TREE_VALUE (arglist);
7883 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7884 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7888 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7890 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7891 enum machine_mode insn_mode
7892 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
7894 /* If we don't have POINTER_TYPE, call the function. */
7895 if (arg1_align == 0 || arg2_align == 0)
7897 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7898 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7902 /* Make a place to write the result of the instruction. */
7905 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7906 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7907 result = gen_reg_rtx (insn_mode);
7909 emit_insn (gen_cmpstrsi (result,
7910 gen_rtx (MEM, BLKmode,
7911 expand_expr (arg1, NULL_RTX,
7914 gen_rtx (MEM, BLKmode,
7915 expand_expr (arg2, NULL_RTX,
7918 expand_expr (len, NULL_RTX, VOIDmode, 0),
7919 GEN_INT (MIN (arg1_align, arg2_align))));
7921 /* Return the value in the proper mode for this function. */
7922 mode = TYPE_MODE (TREE_TYPE (exp));
7923 if (GET_MODE (result) == mode)
7925 else if (target != 0)
7927 convert_move (target, result, 0);
7931 return convert_to_mode (mode, result, 0);
7934 case BUILT_IN_STRCMP:
7935 case BUILT_IN_MEMCMP:
7939 default: /* just do library call, if unknown builtin */
7940 error ("built-in function `%s' not currently supported",
7941 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7944 /* The switch statement above can drop through to cause the function
7945 to be called normally. */
7947 return expand_call (exp, target, ignore);
7950 /* Built-in functions to perform an untyped call and return. */
7952 /* For each register that may be used for calling a function, this
7953 gives a mode used to copy the register's value. VOIDmode indicates
7954 the register is not used for calling a function. If the machine
7955 has register windows, this gives only the outbound registers.
7956 INCOMING_REGNO gives the corresponding inbound register. */
7957 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
7959 /* For each register that may be used for returning values, this gives
7960 a mode used to copy the register's value. VOIDmode indicates the
7961 register is not used for returning values. If the machine has
7962 register windows, this gives only the outbound registers.
7963 INCOMING_REGNO gives the corresponding inbound register. */
7964 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
7966 /* For each register that may be used for calling a function, this
7967 gives the offset of that register into the block returned by
7968 __builtin_apply_args. 0 indicates that the register is not
7969 used for calling a function. */
7970 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
7972 /* Return the offset of register REGNO into the block returned by
7973 __builtin_apply_args. This is not declared static, since it is
7974 needed in objc-act.c. */
7977 apply_args_register_offset (regno)
7982 /* Arguments are always put in outgoing registers (in the argument
7983 block) if such make sense. */
7984 #ifdef OUTGOING_REGNO
7985 regno = OUTGOING_REGNO(regno);
7987 return apply_args_reg_offset[regno];
7990 /* Return the size required for the block returned by __builtin_apply_args,
7991 and initialize apply_args_mode. */
7996 static int size = -1;
7998 enum machine_mode mode;
8000 /* The values computed by this function never change. */
8003 /* The first value is the incoming arg-pointer. */
8004 size = GET_MODE_SIZE (Pmode);
8006 /* The second value is the structure value address unless this is
8007 passed as an "invisible" first argument. */
8008 if (struct_value_rtx)
8009 size += GET_MODE_SIZE (Pmode);
8011 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8012 if (FUNCTION_ARG_REGNO_P (regno))
8014 /* Search for the proper mode for copying this register's
8015 value. I'm not sure this is right, but it works so far. */
8016 enum machine_mode best_mode = VOIDmode;
8018 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8020 mode = GET_MODE_WIDER_MODE (mode))
8021 if (HARD_REGNO_MODE_OK (regno, mode)
8022 && HARD_REGNO_NREGS (regno, mode) == 1)
8025 if (best_mode == VOIDmode)
8026 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8028 mode = GET_MODE_WIDER_MODE (mode))
8029 if (HARD_REGNO_MODE_OK (regno, mode)
8030 && (mov_optab->handlers[(int) mode].insn_code
8031 != CODE_FOR_nothing))
8035 if (mode == VOIDmode)
8038 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8039 if (size % align != 0)
8040 size = CEIL (size, align) * align;
8041 apply_args_reg_offset[regno] = size;
8042 size += GET_MODE_SIZE (mode);
8043 apply_args_mode[regno] = mode;
8047 apply_args_mode[regno] = VOIDmode;
8048 apply_args_reg_offset[regno] = 0;
8054 /* Return the size required for the block returned by __builtin_apply,
8055 and initialize apply_result_mode. */
8058 apply_result_size ()
8060 static int size = -1;
8062 enum machine_mode mode;
8064 /* The values computed by this function never change. */
8069 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8070 if (FUNCTION_VALUE_REGNO_P (regno))
8072 /* Search for the proper mode for copying this register's
8073 value. I'm not sure this is right, but it works so far. */
8074 enum machine_mode best_mode = VOIDmode;
8076 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8078 mode = GET_MODE_WIDER_MODE (mode))
8079 if (HARD_REGNO_MODE_OK (regno, mode))
8082 if (best_mode == VOIDmode)
8083 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8085 mode = GET_MODE_WIDER_MODE (mode))
8086 if (HARD_REGNO_MODE_OK (regno, mode)
8087 && (mov_optab->handlers[(int) mode].insn_code
8088 != CODE_FOR_nothing))
8092 if (mode == VOIDmode)
8095 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8096 if (size % align != 0)
8097 size = CEIL (size, align) * align;
8098 size += GET_MODE_SIZE (mode);
8099 apply_result_mode[regno] = mode;
8102 apply_result_mode[regno] = VOIDmode;
8104 /* Allow targets that use untyped_call and untyped_return to override
8105 the size so that machine-specific information can be stored here. */
8106 #ifdef APPLY_RESULT_SIZE
8107 size = APPLY_RESULT_SIZE;
8113 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8114 /* Create a vector describing the result block RESULT. If SAVEP is true,
8115 the result block is used to save the values; otherwise it is used to
8116 restore the values. */
8119 result_vector (savep, result)
8123 int regno, size, align, nelts;
8124 enum machine_mode mode;
8126 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8129 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8130 if ((mode = apply_result_mode[regno]) != VOIDmode)
8132 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8133 if (size % align != 0)
8134 size = CEIL (size, align) * align;
8135 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
8136 mem = change_address (result, mode,
8137 plus_constant (XEXP (result, 0), size));
8138 savevec[nelts++] = (savep
8139 ? gen_rtx (SET, VOIDmode, mem, reg)
8140 : gen_rtx (SET, VOIDmode, reg, mem));
8141 size += GET_MODE_SIZE (mode);
8143 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
8145 #endif /* HAVE_untyped_call or HAVE_untyped_return */
8147 /* Save the state required to perform an untyped call with the same
8148 arguments as were passed to the current function. */
8151 expand_builtin_apply_args ()
8154 int size, align, regno;
8155 enum machine_mode mode;
8157 /* Create a block where the arg-pointer, structure value address,
8158 and argument registers can be saved. */
8159 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
8161 /* Walk past the arg-pointer and structure value address. */
8162 size = GET_MODE_SIZE (Pmode);
8163 if (struct_value_rtx)
8164 size += GET_MODE_SIZE (Pmode);
8166 /* Save each register used in calling a function to the block. */
8167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8168 if ((mode = apply_args_mode[regno]) != VOIDmode)
8172 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8173 if (size % align != 0)
8174 size = CEIL (size, align) * align;
8176 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8179 /* For reg-stack.c's stack register household.
8180 Compare with a similar piece of code in function.c. */
8182 emit_insn (gen_rtx (USE, mode, tem));
8185 emit_move_insn (change_address (registers, mode,
8186 plus_constant (XEXP (registers, 0),
8189 size += GET_MODE_SIZE (mode);
8192 /* Save the arg pointer to the block. */
8193 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
8194 copy_to_reg (virtual_incoming_args_rtx));
8195 size = GET_MODE_SIZE (Pmode);
8197 /* Save the structure value address unless this is passed as an
8198 "invisible" first argument. */
8199 if (struct_value_incoming_rtx)
8201 emit_move_insn (change_address (registers, Pmode,
8202 plus_constant (XEXP (registers, 0),
8204 copy_to_reg (struct_value_incoming_rtx));
8205 size += GET_MODE_SIZE (Pmode);
8208 /* Return the address of the block. */
8209 return copy_addr_to_reg (XEXP (registers, 0));
8212 /* Perform an untyped call and save the state required to perform an
8213 untyped return of whatever value was returned by the given function. */
8216 expand_builtin_apply (function, arguments, argsize)
8217 rtx function, arguments, argsize;
8219 int size, align, regno;
8220 enum machine_mode mode;
8221 rtx incoming_args, result, reg, dest, call_insn;
8222 rtx old_stack_level = 0;
8223 rtx call_fusage = 0;
8225 /* Create a block where the return registers can be saved. */
8226 result = assign_stack_local (BLKmode, apply_result_size (), -1);
8228 /* ??? The argsize value should be adjusted here. */
8230 /* Fetch the arg pointer from the ARGUMENTS block. */
8231 incoming_args = gen_reg_rtx (Pmode);
8232 emit_move_insn (incoming_args,
8233 gen_rtx (MEM, Pmode, arguments));
8234 #ifndef STACK_GROWS_DOWNWARD
8235 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
8236 incoming_args, 0, OPTAB_LIB_WIDEN);
8239 /* Perform postincrements before actually calling the function. */
8242 /* Push a new argument block and copy the arguments. */
8243 do_pending_stack_adjust ();
8244 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
8246 /* Push a block of memory onto the stack to store the memory arguments.
8247 Save the address in a register, and copy the memory arguments. ??? I
8248 haven't figured out how the calling convention macros effect this,
8249 but it's likely that the source and/or destination addresses in
8250 the block copy will need updating in machine specific ways. */
8251 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
8252 emit_block_move (gen_rtx (MEM, BLKmode, dest),
8253 gen_rtx (MEM, BLKmode, incoming_args),
8255 PARM_BOUNDARY / BITS_PER_UNIT);
8257 /* Refer to the argument block. */
8259 arguments = gen_rtx (MEM, BLKmode, arguments);
8261 /* Walk past the arg-pointer and structure value address. */
8262 size = GET_MODE_SIZE (Pmode);
8263 if (struct_value_rtx)
8264 size += GET_MODE_SIZE (Pmode);
8266 /* Restore each of the registers previously saved. Make USE insns
8267 for each of these registers for use in making the call. */
8268 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8269 if ((mode = apply_args_mode[regno]) != VOIDmode)
8271 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8272 if (size % align != 0)
8273 size = CEIL (size, align) * align;
8274 reg = gen_rtx (REG, mode, regno);
8275 emit_move_insn (reg,
8276 change_address (arguments, mode,
8277 plus_constant (XEXP (arguments, 0),
8280 use_reg (&call_fusage, reg);
8281 size += GET_MODE_SIZE (mode);
8284 /* Restore the structure value address unless this is passed as an
8285 "invisible" first argument. */
8286 size = GET_MODE_SIZE (Pmode);
8287 if (struct_value_rtx)
8289 rtx value = gen_reg_rtx (Pmode);
8290 emit_move_insn (value,
8291 change_address (arguments, Pmode,
8292 plus_constant (XEXP (arguments, 0),
8294 emit_move_insn (struct_value_rtx, value);
8295 if (GET_CODE (struct_value_rtx) == REG)
8296 use_reg (&call_fusage, struct_value_rtx);
8297 size += GET_MODE_SIZE (Pmode);
8300 /* All arguments and registers used for the call are set up by now! */
8301 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
8303 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
8304 and we don't want to load it into a register as an optimization,
8305 because prepare_call_address already did it if it should be done. */
8306 if (GET_CODE (function) != SYMBOL_REF)
8307 function = memory_address (FUNCTION_MODE, function);
8309 /* Generate the actual call instruction and save the return value. */
8310 #ifdef HAVE_untyped_call
8311 if (HAVE_untyped_call)
8312 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
8313 result, result_vector (1, result)));
8316 #ifdef HAVE_call_value
8317 if (HAVE_call_value)
8321 /* Locate the unique return register. It is not possible to
8322 express a call that sets more than one return register using
8323 call_value; use untyped_call for that. In fact, untyped_call
8324 only needs to save the return registers in the given block. */
8325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8326 if ((mode = apply_result_mode[regno]) != VOIDmode)
8329 abort (); /* HAVE_untyped_call required. */
8330 valreg = gen_rtx (REG, mode, regno);
8333 emit_call_insn (gen_call_value (valreg,
8334 gen_rtx (MEM, FUNCTION_MODE, function),
8335 const0_rtx, NULL_RTX, const0_rtx));
8337 emit_move_insn (change_address (result, GET_MODE (valreg),
8345 /* Find the CALL insn we just emitted. */
8346 for (call_insn = get_last_insn ();
8347 call_insn && GET_CODE (call_insn) != CALL_INSN;
8348 call_insn = PREV_INSN (call_insn))
8354 /* Put the register usage information on the CALL. If there is already
8355 some usage information, put ours at the end. */
8356 if (CALL_INSN_FUNCTION_USAGE (call_insn))
8360 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
8361 link = XEXP (link, 1))
8364 XEXP (link, 1) = call_fusage;
8367 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
8369 /* Restore the stack. */
8370 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
8372 /* Return the address of the result block. */
8373 return copy_addr_to_reg (XEXP (result, 0));
8376 /* Perform an untyped return. */
8379 expand_builtin_return (result)
8382 int size, align, regno;
8383 enum machine_mode mode;
8385 rtx call_fusage = 0;
8387 apply_result_size ();
8388 result = gen_rtx (MEM, BLKmode, result);
8390 #ifdef HAVE_untyped_return
8391 if (HAVE_untyped_return)
8393 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
8399 /* Restore the return value and note that each value is used. */
8401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8402 if ((mode = apply_result_mode[regno]) != VOIDmode)
8404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8405 if (size % align != 0)
8406 size = CEIL (size, align) * align;
8407 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
8408 emit_move_insn (reg,
8409 change_address (result, mode,
8410 plus_constant (XEXP (result, 0),
8413 push_to_sequence (call_fusage);
8414 emit_insn (gen_rtx (USE, VOIDmode, reg));
8415 call_fusage = get_insns ();
8417 size += GET_MODE_SIZE (mode);
8420 /* Put the USE insns before the return. */
8421 emit_insns (call_fusage);
8423 /* Return whatever values was restored by jumping directly to the end
8425 expand_null_return ();
8428 /* Expand code for a post- or pre- increment or decrement
8429 and return the RTX for the result.
8430 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
8433 expand_increment (exp, post)
8437 register rtx op0, op1;
8438 register rtx temp, value;
8439 register tree incremented = TREE_OPERAND (exp, 0);
8440 optab this_optab = add_optab;
8442 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8443 int op0_is_copy = 0;
8444 int single_insn = 0;
8445 /* 1 means we can't store into OP0 directly,
8446 because it is a subreg narrower than a word,
8447 and we don't dare clobber the rest of the word. */
8450 if (output_bytecode)
8452 bc_expand_expr (exp);
8456 /* Stabilize any component ref that might need to be
8457 evaluated more than once below. */
8459 || TREE_CODE (incremented) == BIT_FIELD_REF
8460 || (TREE_CODE (incremented) == COMPONENT_REF
8461 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8462 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8463 incremented = stabilize_reference (incremented);
8464 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8465 ones into save exprs so that they don't accidentally get evaluated
8466 more than once by the code below. */
8467 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8468 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8469 incremented = save_expr (incremented);
8471 /* Compute the operands as RTX.
8472 Note whether OP0 is the actual lvalue or a copy of it:
8473 I believe it is a copy iff it is a register or subreg
8474 and insns were generated in computing it. */
8476 temp = get_last_insn ();
8477 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
8479 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8480 in place but instead must do sign- or zero-extension during assignment,
8481 so we copy it into a new register and let the code below use it as
8484 Note that we can safely modify this SUBREG since it is know not to be
8485 shared (it was made by the expand_expr call above). */
8487 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8490 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8494 else if (GET_CODE (op0) == SUBREG
8495 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8497 /* We cannot increment this SUBREG in place. If we are
8498 post-incrementing, get a copy of the old value. Otherwise,
8499 just mark that we cannot increment in place. */
8501 op0 = copy_to_reg (op0);
8506 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8507 && temp != get_last_insn ());
8508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8510 /* Decide whether incrementing or decrementing. */
8511 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8512 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8513 this_optab = sub_optab;
8515 /* Convert decrement by a constant into a negative increment. */
8516 if (this_optab == sub_optab
8517 && GET_CODE (op1) == CONST_INT)
8519 op1 = GEN_INT (- INTVAL (op1));
8520 this_optab = add_optab;
8523 /* For a preincrement, see if we can do this with a single instruction. */
8526 icode = (int) this_optab->handlers[(int) mode].insn_code;
8527 if (icode != (int) CODE_FOR_nothing
8528 /* Make sure that OP0 is valid for operands 0 and 1
8529 of the insn we want to queue. */
8530 && (*insn_operand_predicate[icode][0]) (op0, mode)
8531 && (*insn_operand_predicate[icode][1]) (op0, mode)
8532 && (*insn_operand_predicate[icode][2]) (op1, mode))
8536 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8537 then we cannot just increment OP0. We must therefore contrive to
8538 increment the original value. Then, for postincrement, we can return
8539 OP0 since it is a copy of the old value. For preincrement, expand here
8540 unless we can do it with a single insn.
8542 Likewise if storing directly into OP0 would clobber high bits
8543 we need to preserve (bad_subreg). */
8544 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
8546 /* This is the easiest way to increment the value wherever it is.
8547 Problems with multiple evaluation of INCREMENTED are prevented
8548 because either (1) it is a component_ref or preincrement,
8549 in which case it was stabilized above, or (2) it is an array_ref
8550 with constant index in an array in a register, which is
8551 safe to reevaluate. */
8552 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8553 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8554 ? MINUS_EXPR : PLUS_EXPR),
8557 TREE_OPERAND (exp, 1));
8559 while (TREE_CODE (incremented) == NOP_EXPR
8560 || TREE_CODE (incremented) == CONVERT_EXPR)
8562 newexp = convert (TREE_TYPE (incremented), newexp);
8563 incremented = TREE_OPERAND (incremented, 0);
8566 temp = expand_assignment (incremented, newexp, ! post, 0);
8567 return post ? op0 : temp;
8572 /* We have a true reference to the value in OP0.
8573 If there is an insn to add or subtract in this mode, queue it.
8574 Queueing the increment insn avoids the register shuffling
8575 that often results if we must increment now and first save
8576 the old value for subsequent use. */
8578 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
8579 op0 = stabilize (op0);
8582 icode = (int) this_optab->handlers[(int) mode].insn_code;
8583 if (icode != (int) CODE_FOR_nothing
8584 /* Make sure that OP0 is valid for operands 0 and 1
8585 of the insn we want to queue. */
8586 && (*insn_operand_predicate[icode][0]) (op0, mode)
8587 && (*insn_operand_predicate[icode][1]) (op0, mode))
8589 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8590 op1 = force_reg (mode, op1);
8592 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8596 /* Preincrement, or we can't increment with one simple insn. */
8598 /* Save a copy of the value before inc or dec, to return it later. */
8599 temp = value = copy_to_reg (op0);
8601 /* Arrange to return the incremented value. */
8602 /* Copy the rtx because expand_binop will protect from the queue,
8603 and the results of that would be invalid for us to return
8604 if our caller does emit_queue before using our result. */
8605 temp = copy_rtx (value = op0);
8607 /* Increment however we can. */
8608 op1 = expand_binop (mode, this_optab, value, op1, op0,
8609 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8610 /* Make sure the value is stored into OP0. */
8612 emit_move_insn (op0, op1);
8617 /* Expand all function calls contained within EXP, innermost ones first.
8618 But don't look within expressions that have sequence points.
8619 For each CALL_EXPR, record the rtx for its value
8620 in the CALL_EXPR_RTL field. */
8623 preexpand_calls (exp)
8626 register int nops, i;
8627 int type = TREE_CODE_CLASS (TREE_CODE (exp));
8629 if (! do_preexpand_calls)
8632 /* Only expressions and references can contain calls. */
8634 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8637 switch (TREE_CODE (exp))
8640 /* Do nothing if already expanded. */
8641 if (CALL_EXPR_RTL (exp) != 0)
8644 /* Do nothing to built-in functions. */
8645 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8646 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8647 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8648 /* Do nothing if the call returns a variable-sized object. */
8649 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST)
8650 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8655 case TRUTH_ANDIF_EXPR:
8656 case TRUTH_ORIF_EXPR:
8657 /* If we find one of these, then we can be sure
8658 the adjust will be done for it (since it makes jumps).
8659 Do it now, so that if this is inside an argument
8660 of a function, we don't get the stack adjustment
8661 after some other args have already been pushed. */
8662 do_pending_stack_adjust ();
8667 case WITH_CLEANUP_EXPR:
8668 case CLEANUP_POINT_EXPR:
8672 if (SAVE_EXPR_RTL (exp) != 0)
8676 nops = tree_code_length[(int) TREE_CODE (exp)];
8677 for (i = 0; i < nops; i++)
8678 if (TREE_OPERAND (exp, i) != 0)
8680 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8681 if (type == 'e' || type == '<' || type == '1' || type == '2'
8683 preexpand_calls (TREE_OPERAND (exp, i));
8687 /* At the start of a function, record that we have no previously-pushed
8688 arguments waiting to be popped. */
8691 init_pending_stack_adjust ()
8693 pending_stack_adjust = 0;
8696 /* When exiting from function, if safe, clear out any pending stack adjust
8697 so the adjustment won't get done. */
8700 clear_pending_stack_adjust ()
8702 #ifdef EXIT_IGNORE_STACK
8703 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8704 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8705 && ! flag_inline_functions)
8706 pending_stack_adjust = 0;
8710 /* Pop any previously-pushed arguments that have not been popped yet. */
8713 do_pending_stack_adjust ()
8715 if (inhibit_defer_pop == 0)
8717 if (pending_stack_adjust != 0)
8718 adjust_stack (GEN_INT (pending_stack_adjust));
8719 pending_stack_adjust = 0;
8723 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
8724 Returns the cleanups to be performed. */
8727 defer_cleanups_to (old_cleanups)
8730 tree new_cleanups = NULL_TREE;
8731 tree cleanups = cleanups_this_call;
8732 tree last = NULL_TREE;
8734 while (cleanups_this_call != old_cleanups)
8736 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8737 last = cleanups_this_call;
8738 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8743 /* Remove the list from the chain of cleanups. */
8744 TREE_CHAIN (last) = NULL_TREE;
8746 /* reverse them so that we can build them in the right order. */
8747 cleanups = nreverse (cleanups);
8752 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
8753 TREE_VALUE (cleanups), new_cleanups);
8755 new_cleanups = TREE_VALUE (cleanups);
8757 cleanups = TREE_CHAIN (cleanups);
8761 return new_cleanups;
8764 /* Expand all cleanups up to OLD_CLEANUPS.
8765 Needed here, and also for language-dependent calls. */
8768 expand_cleanups_to (old_cleanups)
8771 while (cleanups_this_call != old_cleanups)
8773 (*interim_eh_hook) (TREE_VALUE (cleanups_this_call));
8774 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
8775 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8779 /* Expand conditional expressions. */
8781 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8782 LABEL is an rtx of code CODE_LABEL, in this function and all the
8786 jumpifnot (exp, label)
8790 do_jump (exp, label, NULL_RTX);
8793 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
8800 do_jump (exp, NULL_RTX, label);
8803 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8804 the result is zero, or IF_TRUE_LABEL if the result is one.
8805 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8806 meaning fall through in that case.
8808 do_jump always does any pending stack adjust except when it does not
8809 actually perform a jump. An example where there is no jump
8810 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
8812 This function is responsible for optimizing cases such as
8813 &&, || and comparison operators in EXP. */
8816 do_jump (exp, if_false_label, if_true_label)
8818 rtx if_false_label, if_true_label;
8820 register enum tree_code code = TREE_CODE (exp);
8821 /* Some cases need to create a label to jump to
8822 in order to properly fall through.
8823 These cases set DROP_THROUGH_LABEL nonzero. */
8824 rtx drop_through_label = 0;
8829 enum machine_mode mode;
8839 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8845 /* This is not true with #pragma weak */
8847 /* The address of something can never be zero. */
8849 emit_jump (if_true_label);
8854 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8855 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8856 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8859 /* If we are narrowing the operand, we have to do the compare in the
8861 if ((TYPE_PRECISION (TREE_TYPE (exp))
8862 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8864 case NON_LVALUE_EXPR:
8865 case REFERENCE_EXPR:
8870 /* These cannot change zero->non-zero or vice versa. */
8871 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8875 /* This is never less insns than evaluating the PLUS_EXPR followed by
8876 a test and can be longer if the test is eliminated. */
8878 /* Reduce to minus. */
8879 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8880 TREE_OPERAND (exp, 0),
8881 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8882 TREE_OPERAND (exp, 1))));
8883 /* Process as MINUS. */
8887 /* Non-zero iff operands of minus differ. */
8888 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8889 TREE_OPERAND (exp, 0),
8890 TREE_OPERAND (exp, 1)),
8895 /* If we are AND'ing with a small constant, do this comparison in the
8896 smallest type that fits. If the machine doesn't have comparisons
8897 that small, it will be converted back to the wider comparison.
8898 This helps if we are testing the sign bit of a narrower object.
8899 combine can't do this for us because it can't know whether a
8900 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
8902 if (! SLOW_BYTE_ACCESS
8903 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8904 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8905 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8906 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8907 && (type = type_for_mode (mode, 1)) != 0
8908 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8909 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8910 != CODE_FOR_nothing))
8912 do_jump (convert (type, exp), if_false_label, if_true_label);
8917 case TRUTH_NOT_EXPR:
8918 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8921 case TRUTH_ANDIF_EXPR:
8924 tree cleanups, old_cleanups;
8926 if (if_false_label == 0)
8927 if_false_label = drop_through_label = gen_label_rtx ();
8929 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8930 seq1 = get_insns ();
8933 old_cleanups = cleanups_this_call;
8935 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8936 seq2 = get_insns ();
8939 cleanups = defer_cleanups_to (old_cleanups);
8942 rtx flag = gen_reg_rtx (word_mode);
8946 /* Flag cleanups as not needed. */
8947 emit_move_insn (flag, const0_rtx);
8950 /* Flag cleanups as needed. */
8951 emit_move_insn (flag, const1_rtx);
8954 /* convert flag, which is an rtx, into a tree. */
8955 cond = make_node (RTL_EXPR);
8956 TREE_TYPE (cond) = integer_type_node;
8957 RTL_EXPR_RTL (cond) = flag;
8958 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
8959 cond = save_expr (cond);
8961 new_cleanups = build (COND_EXPR, void_type_node,
8962 truthvalue_conversion (cond),
8963 cleanups, integer_zero_node);
8964 new_cleanups = fold (new_cleanups);
8966 /* Now add in the conditionalized cleanups. */
8968 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
8969 (*interim_eh_hook) (NULL_TREE);
8979 case TRUTH_ORIF_EXPR:
8982 tree cleanups, old_cleanups;
8984 if (if_true_label == 0)
8985 if_true_label = drop_through_label = gen_label_rtx ();
8987 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8988 seq1 = get_insns ();
8991 old_cleanups = cleanups_this_call;
8993 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8994 seq2 = get_insns ();
8997 cleanups = defer_cleanups_to (old_cleanups);
9000 rtx flag = gen_reg_rtx (word_mode);
9004 /* Flag cleanups as not needed. */
9005 emit_move_insn (flag, const0_rtx);
9008 /* Flag cleanups as needed. */
9009 emit_move_insn (flag, const1_rtx);
9012 /* convert flag, which is an rtx, into a tree. */
9013 cond = make_node (RTL_EXPR);
9014 TREE_TYPE (cond) = integer_type_node;
9015 RTL_EXPR_RTL (cond) = flag;
9016 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9017 cond = save_expr (cond);
9019 new_cleanups = build (COND_EXPR, void_type_node,
9020 truthvalue_conversion (cond),
9021 cleanups, integer_zero_node);
9022 new_cleanups = fold (new_cleanups);
9024 /* Now add in the conditionalized cleanups. */
9026 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9027 (*interim_eh_hook) (NULL_TREE);
9039 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9043 do_pending_stack_adjust ();
9044 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9051 int bitsize, bitpos, unsignedp;
9052 enum machine_mode mode;
9057 /* Get description of this reference. We don't actually care
9058 about the underlying object here. */
9059 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9060 &mode, &unsignedp, &volatilep);
9062 type = type_for_size (bitsize, unsignedp);
9063 if (! SLOW_BYTE_ACCESS
9064 && type != 0 && bitsize >= 0
9065 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9066 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9067 != CODE_FOR_nothing))
9069 do_jump (convert (type, exp), if_false_label, if_true_label);
9076 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9077 if (integer_onep (TREE_OPERAND (exp, 1))
9078 && integer_zerop (TREE_OPERAND (exp, 2)))
9079 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9081 else if (integer_zerop (TREE_OPERAND (exp, 1))
9082 && integer_onep (TREE_OPERAND (exp, 2)))
9083 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9087 register rtx label1 = gen_label_rtx ();
9088 drop_through_label = gen_label_rtx ();
9089 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9090 /* Now the THEN-expression. */
9091 do_jump (TREE_OPERAND (exp, 1),
9092 if_false_label ? if_false_label : drop_through_label,
9093 if_true_label ? if_true_label : drop_through_label);
9094 /* In case the do_jump just above never jumps. */
9095 do_pending_stack_adjust ();
9096 emit_label (label1);
9097 /* Now the ELSE-expression. */
9098 do_jump (TREE_OPERAND (exp, 2),
9099 if_false_label ? if_false_label : drop_through_label,
9100 if_true_label ? if_true_label : drop_through_label);
9106 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9108 if (integer_zerop (TREE_OPERAND (exp, 1)))
9109 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9110 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9111 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9114 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9115 fold (build (EQ_EXPR, TREE_TYPE (exp),
9116 fold (build1 (REALPART_EXPR,
9117 TREE_TYPE (inner_type),
9118 TREE_OPERAND (exp, 0))),
9119 fold (build1 (REALPART_EXPR,
9120 TREE_TYPE (inner_type),
9121 TREE_OPERAND (exp, 1))))),
9122 fold (build (EQ_EXPR, TREE_TYPE (exp),
9123 fold (build1 (IMAGPART_EXPR,
9124 TREE_TYPE (inner_type),
9125 TREE_OPERAND (exp, 0))),
9126 fold (build1 (IMAGPART_EXPR,
9127 TREE_TYPE (inner_type),
9128 TREE_OPERAND (exp, 1))))))),
9129 if_false_label, if_true_label);
9130 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9131 && !can_compare_p (TYPE_MODE (inner_type)))
9132 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9134 comparison = compare (exp, EQ, EQ);
9140 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9142 if (integer_zerop (TREE_OPERAND (exp, 1)))
9143 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9144 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9145 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9148 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9149 fold (build (NE_EXPR, TREE_TYPE (exp),
9150 fold (build1 (REALPART_EXPR,
9151 TREE_TYPE (inner_type),
9152 TREE_OPERAND (exp, 0))),
9153 fold (build1 (REALPART_EXPR,
9154 TREE_TYPE (inner_type),
9155 TREE_OPERAND (exp, 1))))),
9156 fold (build (NE_EXPR, TREE_TYPE (exp),
9157 fold (build1 (IMAGPART_EXPR,
9158 TREE_TYPE (inner_type),
9159 TREE_OPERAND (exp, 0))),
9160 fold (build1 (IMAGPART_EXPR,
9161 TREE_TYPE (inner_type),
9162 TREE_OPERAND (exp, 1))))))),
9163 if_false_label, if_true_label);
9164 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9165 && !can_compare_p (TYPE_MODE (inner_type)))
9166 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9168 comparison = compare (exp, NE, NE);
9173 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9175 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9176 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9178 comparison = compare (exp, LT, LTU);
9182 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9184 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9185 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9187 comparison = compare (exp, LE, LEU);
9191 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9193 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9194 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9196 comparison = compare (exp, GT, GTU);
9200 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9202 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9203 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9205 comparison = compare (exp, GE, GEU);
9210 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9212 /* This is not needed any more and causes poor code since it causes
9213 comparisons and tests from non-SI objects to have different code
9215 /* Copy to register to avoid generating bad insns by cse
9216 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9217 if (!cse_not_expected && GET_CODE (temp) == MEM)
9218 temp = copy_to_reg (temp);
9220 do_pending_stack_adjust ();
9221 if (GET_CODE (temp) == CONST_INT)
9222 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9223 else if (GET_CODE (temp) == LABEL_REF)
9224 comparison = const_true_rtx;
9225 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9226 && !can_compare_p (GET_MODE (temp)))
9227 /* Note swapping the labels gives us not-equal. */
9228 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9229 else if (GET_MODE (temp) != VOIDmode)
9230 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9231 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9232 GET_MODE (temp), NULL_RTX, 0);
9237 /* Do any postincrements in the expression that was tested. */
9240 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9241 straight into a conditional jump instruction as the jump condition.
9242 Otherwise, all the work has been done already. */
9244 if (comparison == const_true_rtx)
9247 emit_jump (if_true_label);
9249 else if (comparison == const0_rtx)
9252 emit_jump (if_false_label);
9254 else if (comparison)
9255 do_jump_for_compare (comparison, if_false_label, if_true_label);
9257 if (drop_through_label)
9259 /* If do_jump produces code that might be jumped around,
9260 do any stack adjusts from that code, before the place
9261 where control merges in. */
9262 do_pending_stack_adjust ();
9263 emit_label (drop_through_label);
9267 /* Given a comparison expression EXP for values too wide to be compared
9268 with one insn, test the comparison and jump to the appropriate label.
9269 The code of EXP is ignored; we always test GT if SWAP is 0,
9270 and LT if SWAP is 1. */
9273 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9276 rtx if_false_label, if_true_label;
9278 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9279 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9280 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9281 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9282 rtx drop_through_label = 0;
9283 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9286 if (! if_true_label || ! if_false_label)
9287 drop_through_label = gen_label_rtx ();
9288 if (! if_true_label)
9289 if_true_label = drop_through_label;
9290 if (! if_false_label)
9291 if_false_label = drop_through_label;
9293 /* Compare a word at a time, high order first. */
9294 for (i = 0; i < nwords; i++)
9297 rtx op0_word, op1_word;
9299 if (WORDS_BIG_ENDIAN)
9301 op0_word = operand_subword_force (op0, i, mode);
9302 op1_word = operand_subword_force (op1, i, mode);
9306 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9307 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9310 /* All but high-order word must be compared as unsigned. */
9311 comp = compare_from_rtx (op0_word, op1_word,
9312 (unsignedp || i > 0) ? GTU : GT,
9313 unsignedp, word_mode, NULL_RTX, 0);
9314 if (comp == const_true_rtx)
9315 emit_jump (if_true_label);
9316 else if (comp != const0_rtx)
9317 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9319 /* Consider lower words only if these are equal. */
9320 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9322 if (comp == const_true_rtx)
9323 emit_jump (if_false_label);
9324 else if (comp != const0_rtx)
9325 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9329 emit_jump (if_false_label);
9330 if (drop_through_label)
9331 emit_label (drop_through_label);
9334 /* Compare OP0 with OP1, word at a time, in mode MODE.
9335 UNSIGNEDP says to do unsigned comparison.
9336 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9339 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9340 enum machine_mode mode;
9343 rtx if_false_label, if_true_label;
9345 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9346 rtx drop_through_label = 0;
9349 if (! if_true_label || ! if_false_label)
9350 drop_through_label = gen_label_rtx ();
9351 if (! if_true_label)
9352 if_true_label = drop_through_label;
9353 if (! if_false_label)
9354 if_false_label = drop_through_label;
9356 /* Compare a word at a time, high order first. */
9357 for (i = 0; i < nwords; i++)
9360 rtx op0_word, op1_word;
9362 if (WORDS_BIG_ENDIAN)
9364 op0_word = operand_subword_force (op0, i, mode);
9365 op1_word = operand_subword_force (op1, i, mode);
9369 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9370 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9373 /* All but high-order word must be compared as unsigned. */
9374 comp = compare_from_rtx (op0_word, op1_word,
9375 (unsignedp || i > 0) ? GTU : GT,
9376 unsignedp, word_mode, NULL_RTX, 0);
9377 if (comp == const_true_rtx)
9378 emit_jump (if_true_label);
9379 else if (comp != const0_rtx)
9380 do_jump_for_compare (comp, NULL_RTX, if_true_label);
9382 /* Consider lower words only if these are equal. */
9383 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
9385 if (comp == const_true_rtx)
9386 emit_jump (if_false_label);
9387 else if (comp != const0_rtx)
9388 do_jump_for_compare (comp, NULL_RTX, if_false_label);
9392 emit_jump (if_false_label);
9393 if (drop_through_label)
9394 emit_label (drop_through_label);
9397 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9398 with one insn, test the comparison and jump to the appropriate label. */
9401 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9403 rtx if_false_label, if_true_label;
9405 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9406 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9407 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9408 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9410 rtx drop_through_label = 0;
9412 if (! if_false_label)
9413 drop_through_label = if_false_label = gen_label_rtx ();
9415 for (i = 0; i < nwords; i++)
9417 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
9418 operand_subword_force (op1, i, mode),
9419 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9420 word_mode, NULL_RTX, 0);
9421 if (comp == const_true_rtx)
9422 emit_jump (if_false_label);
9423 else if (comp != const0_rtx)
9424 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9428 emit_jump (if_true_label);
9429 if (drop_through_label)
9430 emit_label (drop_through_label);
9433 /* Jump according to whether OP0 is 0.
9434 We assume that OP0 has an integer mode that is too wide
9435 for the available compare insns. */
9438 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9440 rtx if_false_label, if_true_label;
9442 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9444 rtx drop_through_label = 0;
9446 if (! if_false_label)
9447 drop_through_label = if_false_label = gen_label_rtx ();
9449 for (i = 0; i < nwords; i++)
9451 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
9453 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
9454 if (comp == const_true_rtx)
9455 emit_jump (if_false_label);
9456 else if (comp != const0_rtx)
9457 do_jump_for_compare (comp, if_false_label, NULL_RTX);
9461 emit_jump (if_true_label);
9462 if (drop_through_label)
9463 emit_label (drop_through_label);
9466 /* Given a comparison expression in rtl form, output conditional branches to
9467 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
9470 do_jump_for_compare (comparison, if_false_label, if_true_label)
9471 rtx comparison, if_false_label, if_true_label;
9475 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9476 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
9481 emit_jump (if_false_label);
9483 else if (if_false_label)
9486 rtx prev = get_last_insn ();
9489 /* Output the branch with the opposite condition. Then try to invert
9490 what is generated. If more than one insn is a branch, or if the
9491 branch is not the last insn written, abort. If we can't invert
9492 the branch, emit make a true label, redirect this jump to that,
9493 emit a jump to the false label and define the true label. */
9495 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
9496 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
9500 /* Here we get the first insn that was just emitted. It used to be the
9501 case that, on some machines, emitting the branch would discard
9502 the previous compare insn and emit a replacement. This isn't
9503 done anymore, but abort if we see that PREV is deleted. */
9506 insn = get_insns ();
9507 else if (INSN_DELETED_P (prev))
9510 insn = NEXT_INSN (prev);
9512 for (; insn; insn = NEXT_INSN (insn))
9513 if (GET_CODE (insn) == JUMP_INSN)
9520 if (branch != get_last_insn ())
9523 JUMP_LABEL (branch) = if_false_label;
9524 if (! invert_jump (branch, if_false_label))
9526 if_true_label = gen_label_rtx ();
9527 redirect_jump (branch, if_true_label);
9528 emit_jump (if_false_label);
9529 emit_label (if_true_label);
9534 /* Generate code for a comparison expression EXP
9535 (including code to compute the values to be compared)
9536 and set (CC0) according to the result.
9537 SIGNED_CODE should be the rtx operation for this comparison for
9538 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9540 We force a stack adjustment unless there are currently
9541 things pushed on the stack that aren't yet used. */
9544 compare (exp, signed_code, unsigned_code)
9546 enum rtx_code signed_code, unsigned_code;
9549 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9551 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9552 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
9553 register enum machine_mode mode = TYPE_MODE (type);
9554 int unsignedp = TREE_UNSIGNED (type);
9555 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
9557 return compare_from_rtx (op0, op1, code, unsignedp, mode,
9559 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9560 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
9563 /* Like compare but expects the values to compare as two rtx's.
9564 The decision as to signed or unsigned comparison must be made by the caller.
9566 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9569 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9570 size of MODE should be used. */
9573 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9574 register rtx op0, op1;
9577 enum machine_mode mode;
9583 /* If one operand is constant, make it the second one. Only do this
9584 if the other operand is not constant as well. */
9586 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9587 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
9592 code = swap_condition (code);
9597 op0 = force_not_mem (op0);
9598 op1 = force_not_mem (op1);
9601 do_pending_stack_adjust ();
9603 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9604 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9608 /* There's no need to do this now that combine.c can eliminate lots of
9609 sign extensions. This can be less efficient in certain cases on other
9612 /* If this is a signed equality comparison, we can do it as an
9613 unsigned comparison since zero-extension is cheaper than sign
9614 extension and comparisons with zero are done as unsigned. This is
9615 the case even on machines that can do fast sign extension, since
9616 zero-extension is easier to combine with other operations than
9617 sign-extension is. If we are comparing against a constant, we must
9618 convert it to what it would look like unsigned. */
9619 if ((code == EQ || code == NE) && ! unsignedp
9620 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9622 if (GET_CODE (op1) == CONST_INT
9623 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9624 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9629 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
9631 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
9634 /* Generate code to calculate EXP using a store-flag instruction
9635 and return an rtx for the result. EXP is either a comparison
9636 or a TRUTH_NOT_EXPR whose operand is a comparison.
9638 If TARGET is nonzero, store the result there if convenient.
9640 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9643 Return zero if there is no suitable set-flag instruction
9644 available on this machine.
9646 Once expand_expr has been called on the arguments of the comparison,
9647 we are committed to doing the store flag, since it is not safe to
9648 re-evaluate the expression. We emit the store-flag insn by calling
9649 emit_store_flag, but only expand the arguments if we have a reason
9650 to believe that emit_store_flag will be successful. If we think that
9651 it will, but it isn't, we have to simulate the store-flag with a
9652 set/jump/set sequence. */
9655 do_store_flag (exp, target, mode, only_cheap)
9658 enum machine_mode mode;
9662 tree arg0, arg1, type;
9664 enum machine_mode operand_mode;
9668 enum insn_code icode;
9669 rtx subtarget = target;
9670 rtx result, label, pattern, jump_pat;
9672 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9673 result at the end. We can't simply invert the test since it would
9674 have already been inverted if it were valid. This case occurs for
9675 some floating-point comparisons. */
9677 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9678 invert = 1, exp = TREE_OPERAND (exp, 0);
9680 arg0 = TREE_OPERAND (exp, 0);
9681 arg1 = TREE_OPERAND (exp, 1);
9682 type = TREE_TYPE (arg0);
9683 operand_mode = TYPE_MODE (type);
9684 unsignedp = TREE_UNSIGNED (type);
9686 /* We won't bother with BLKmode store-flag operations because it would mean
9687 passing a lot of information to emit_store_flag. */
9688 if (operand_mode == BLKmode)
9694 /* Get the rtx comparison code to use. We know that EXP is a comparison
9695 operation of some type. Some comparisons against 1 and -1 can be
9696 converted to comparisons with zero. Do so here so that the tests
9697 below will be aware that we have a comparison with zero. These
9698 tests will not catch constants in the first operand, but constants
9699 are rarely passed as the first operand. */
9701 switch (TREE_CODE (exp))
9710 if (integer_onep (arg1))
9711 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9713 code = unsignedp ? LTU : LT;
9716 if (! unsignedp && integer_all_onesp (arg1))
9717 arg1 = integer_zero_node, code = LT;
9719 code = unsignedp ? LEU : LE;
9722 if (! unsignedp && integer_all_onesp (arg1))
9723 arg1 = integer_zero_node, code = GE;
9725 code = unsignedp ? GTU : GT;
9728 if (integer_onep (arg1))
9729 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9731 code = unsignedp ? GEU : GE;
9737 /* Put a constant second. */
9738 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9740 tem = arg0; arg0 = arg1; arg1 = tem;
9741 code = swap_condition (code);
9744 /* If this is an equality or inequality test of a single bit, we can
9745 do this by shifting the bit being tested to the low-order bit and
9746 masking the result with the constant 1. If the condition was EQ,
9747 we xor it with 1. This does not require an scc insn and is faster
9748 than an scc insn even if we have it. */
9750 if ((code == NE || code == EQ)
9751 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9752 && integer_pow2p (TREE_OPERAND (arg0, 1))
9753 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9755 tree inner = TREE_OPERAND (arg0, 0);
9756 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9757 NULL_RTX, VOIDmode, 0)));
9760 /* If INNER is a right shift of a constant and it plus BITNUM does
9761 not overflow, adjust BITNUM and INNER. */
9763 if (TREE_CODE (inner) == RSHIFT_EXPR
9764 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9765 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9766 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9767 < TYPE_PRECISION (type)))
9769 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9770 inner = TREE_OPERAND (inner, 0);
9773 /* If we are going to be able to omit the AND below, we must do our
9774 operations as unsigned. If we must use the AND, we have a choice.
9775 Normally unsigned is faster, but for some machines signed is. */
9776 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9777 #ifdef LOAD_EXTEND_OP
9778 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9784 if (subtarget == 0 || GET_CODE (subtarget) != REG
9785 || GET_MODE (subtarget) != operand_mode
9786 || ! safe_from_p (subtarget, inner))
9789 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9792 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9793 size_int (bitnum), subtarget, ops_unsignedp);
9795 if (GET_MODE (op0) != mode)
9796 op0 = convert_to_mode (mode, op0, ops_unsignedp);
9798 if ((code == EQ && ! invert) || (code == NE && invert))
9799 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9800 ops_unsignedp, OPTAB_LIB_WIDEN);
9802 /* Put the AND last so it can combine with more things. */
9803 if (bitnum != TYPE_PRECISION (type) - 1)
9804 op0 = expand_and (op0, const1_rtx, subtarget);
9809 /* Now see if we are likely to be able to do this. Return if not. */
9810 if (! can_compare_p (operand_mode))
9812 icode = setcc_gen_code[(int) code];
9813 if (icode == CODE_FOR_nothing
9814 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9816 /* We can only do this if it is one of the special cases that
9817 can be handled without an scc insn. */
9818 if ((code == LT && integer_zerop (arg1))
9819 || (! only_cheap && code == GE && integer_zerop (arg1)))
9821 else if (BRANCH_COST >= 0
9822 && ! only_cheap && (code == NE || code == EQ)
9823 && TREE_CODE (type) != REAL_TYPE
9824 && ((abs_optab->handlers[(int) operand_mode].insn_code
9825 != CODE_FOR_nothing)
9826 || (ffs_optab->handlers[(int) operand_mode].insn_code
9827 != CODE_FOR_nothing)))
9833 preexpand_calls (exp);
9834 if (subtarget == 0 || GET_CODE (subtarget) != REG
9835 || GET_MODE (subtarget) != operand_mode
9836 || ! safe_from_p (subtarget, arg1))
9839 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9840 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9843 target = gen_reg_rtx (mode);
9845 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9846 because, if the emit_store_flag does anything it will succeed and
9847 OP0 and OP1 will not be used subsequently. */
9849 result = emit_store_flag (target, code,
9850 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9851 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9852 operand_mode, unsignedp, 1);
9857 result = expand_binop (mode, xor_optab, result, const1_rtx,
9858 result, 0, OPTAB_LIB_WIDEN);
9862 /* If this failed, we have to do this with set/compare/jump/set code. */
9863 if (target == 0 || GET_CODE (target) != REG
9864 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9865 target = gen_reg_rtx (GET_MODE (target));
9867 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9868 result = compare_from_rtx (op0, op1, code, unsignedp,
9869 operand_mode, NULL_RTX, 0);
9870 if (GET_CODE (result) == CONST_INT)
9871 return (((result == const0_rtx && ! invert)
9872 || (result != const0_rtx && invert))
9873 ? const0_rtx : const1_rtx);
9875 label = gen_label_rtx ();
9876 if (bcc_gen_fctn[(int) code] == 0)
9879 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9880 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9886 /* Generate a tablejump instruction (used for switch statements). */
9888 #ifdef HAVE_tablejump
9890 /* INDEX is the value being switched on, with the lowest value
9891 in the table already subtracted.
9892 MODE is its expected mode (needed if INDEX is constant).
9893 RANGE is the length of the jump table.
9894 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9896 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9897 index value is out of range. */
9900 do_tablejump (index, mode, range, table_label, default_label)
9901 rtx index, range, table_label, default_label;
9902 enum machine_mode mode;
9904 register rtx temp, vector;
9906 /* Do an unsigned comparison (in the proper mode) between the index
9907 expression and the value which represents the length of the range.
9908 Since we just finished subtracting the lower bound of the range
9909 from the index expression, this comparison allows us to simultaneously
9910 check that the original index expression value is both greater than
9911 or equal to the minimum value of the range and less than or equal to
9912 the maximum value of the range. */
9914 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9915 emit_jump_insn (gen_bgtu (default_label));
9917 /* If index is in range, it must fit in Pmode.
9918 Convert to Pmode so we can index with it. */
9920 index = convert_to_mode (Pmode, index, 1);
9922 /* Don't let a MEM slip thru, because then INDEX that comes
9923 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9924 and break_out_memory_refs will go to work on it and mess it up. */
9925 #ifdef PIC_CASE_VECTOR_ADDRESS
9926 if (flag_pic && GET_CODE (index) != REG)
9927 index = copy_to_mode_reg (Pmode, index);
9930 /* If flag_force_addr were to affect this address
9931 it could interfere with the tricky assumptions made
9932 about addresses that contain label-refs,
9933 which may be valid only very near the tablejump itself. */
9934 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9935 GET_MODE_SIZE, because this indicates how large insns are. The other
9936 uses should all be Pmode, because they are addresses. This code
9937 could fail if addresses and insns are not the same size. */
9938 index = gen_rtx (PLUS, Pmode,
9939 gen_rtx (MULT, Pmode, index,
9940 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9941 gen_rtx (LABEL_REF, Pmode, table_label));
9942 #ifdef PIC_CASE_VECTOR_ADDRESS
9944 index = PIC_CASE_VECTOR_ADDRESS (index);
9947 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9948 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9949 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9950 RTX_UNCHANGING_P (vector) = 1;
9951 convert_move (temp, vector, 0);
9953 emit_jump_insn (gen_tablejump (temp, table_label));
9955 #ifndef CASE_VECTOR_PC_RELATIVE
9956 /* If we are generating PIC code or if the table is PC-relative, the
9957 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9963 #endif /* HAVE_tablejump */
9966 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
9967 to that value is on the top of the stack. The resulting type is TYPE, and
9968 the source declaration is DECL. */
9971 bc_load_memory (type, decl)
9974 enum bytecode_opcode opcode;
9977 /* Bit fields are special. We only know about signed and
9978 unsigned ints, and enums. The latter are treated as
9981 if (DECL_BIT_FIELD (decl))
9982 if (TREE_CODE (type) == ENUMERAL_TYPE
9983 || TREE_CODE (type) == INTEGER_TYPE)
9984 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9988 /* See corresponding comment in bc_store_memory(). */
9989 if (TYPE_MODE (type) == BLKmode
9990 || TYPE_MODE (type) == VOIDmode)
9993 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
9995 if (opcode == neverneverland)
9998 bc_emit_bytecode (opcode);
10000 #ifdef DEBUG_PRINT_CODE
10001 fputc ('\n', stderr);
10006 /* Store the contents of the second stack slot to the address in the
10007 top stack slot. DECL is the declaration of the destination and is used
10008 to determine whether we're dealing with a bitfield. */
10011 bc_store_memory (type, decl)
10014 enum bytecode_opcode opcode;
10017 if (DECL_BIT_FIELD (decl))
10019 if (TREE_CODE (type) == ENUMERAL_TYPE
10020 || TREE_CODE (type) == INTEGER_TYPE)
10026 if (TYPE_MODE (type) == BLKmode)
10028 /* Copy structure. This expands to a block copy instruction, storeBLK.
10029 In addition to the arguments expected by the other store instructions,
10030 it also expects a type size (SImode) on top of the stack, which is the
10031 structure size in size units (usually bytes). The two first arguments
10032 are already on the stack; so we just put the size on level 1. For some
10033 other languages, the size may be variable, this is why we don't encode
10034 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
10036 bc_expand_expr (TYPE_SIZE (type));
10040 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
10042 if (opcode == neverneverland)
10045 bc_emit_bytecode (opcode);
10047 #ifdef DEBUG_PRINT_CODE
10048 fputc ('\n', stderr);
10053 /* Allocate local stack space sufficient to hold a value of the given
10054 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
10055 integral power of 2. A special case is locals of type VOID, which
10056 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
10057 remapped into the corresponding attribute of SI. */
10060 bc_allocate_local (size, alignment)
10061 int size, alignment;
10064 int byte_alignment;
10069 /* Normalize size and alignment */
10071 size = UNITS_PER_WORD;
10073 if (alignment < BITS_PER_UNIT)
10074 byte_alignment = 1 << (INT_ALIGN - 1);
10077 byte_alignment = alignment / BITS_PER_UNIT;
10079 if (local_vars_size & (byte_alignment - 1))
10080 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
10082 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10083 local_vars_size += size;
10089 /* Allocate variable-sized local array. Variable-sized arrays are
10090 actually pointers to the address in memory where they are stored. */
10093 bc_allocate_variable_array (size)
10097 const int ptralign = (1 << (PTR_ALIGN - 1));
10099 /* Align pointer */
10100 if (local_vars_size & ptralign)
10101 local_vars_size += ptralign - (local_vars_size & ptralign);
10103 /* Note down local space needed: pointer to block; also return
10106 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
10107 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
10112 /* Push the machine address for the given external variable offset. */
10114 bc_load_externaddr (externaddr)
10117 bc_emit_bytecode (constP);
10118 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
10119 BYTECODE_BC_LABEL (externaddr)->offset);
10121 #ifdef DEBUG_PRINT_CODE
10122 fputc ('\n', stderr);
10131 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
10137 /* Like above, but expects an IDENTIFIER. */
10139 bc_load_externaddr_id (id, offset)
10143 if (!IDENTIFIER_POINTER (id))
10146 bc_emit_bytecode (constP);
10147 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
10149 #ifdef DEBUG_PRINT_CODE
10150 fputc ('\n', stderr);
10155 /* Push the machine address for the given local variable offset. */
10157 bc_load_localaddr (localaddr)
10160 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
10164 /* Push the machine address for the given parameter offset.
10165 NOTE: offset is in bits. */
10167 bc_load_parmaddr (parmaddr)
10170 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
10175 /* Convert a[i] into *(a + i). */
10177 bc_canonicalize_array_ref (exp)
10180 tree type = TREE_TYPE (exp);
10181 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
10182 TREE_OPERAND (exp, 0));
10183 tree index = TREE_OPERAND (exp, 1);
10186 /* Convert the integer argument to a type the same size as a pointer
10187 so the multiply won't overflow spuriously. */
10189 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
10190 index = convert (type_for_size (POINTER_SIZE, 0), index);
10192 /* The array address isn't volatile even if the array is.
10193 (Of course this isn't terribly relevant since the bytecode
10194 translator treats nearly everything as volatile anyway.) */
10195 TREE_THIS_VOLATILE (array_adr) = 0;
10197 return build1 (INDIRECT_REF, type,
10198 fold (build (PLUS_EXPR,
10199 TYPE_POINTER_TO (type),
10201 fold (build (MULT_EXPR,
10202 TYPE_POINTER_TO (type),
10204 size_in_bytes (type))))));
10208 /* Load the address of the component referenced by the given
10209 COMPONENT_REF expression.
10211 Returns innermost lvalue. */
10214 bc_expand_component_address (exp)
10218 enum machine_mode mode;
10220 HOST_WIDE_INT SIval;
10223 tem = TREE_OPERAND (exp, 1);
10224 mode = DECL_MODE (tem);
10227 /* Compute cumulative bit offset for nested component refs
10228 and array refs, and find the ultimate containing object. */
10230 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
10232 if (TREE_CODE (tem) == COMPONENT_REF)
10233 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
10235 if (TREE_CODE (tem) == ARRAY_REF
10236 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10237 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
10239 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
10240 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
10241 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
10246 bc_expand_expr (tem);
10249 /* For bitfields also push their offset and size */
10250 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
10251 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
10253 if (SIval = bitpos / BITS_PER_UNIT)
10254 bc_emit_instruction (addconstPSI, SIval);
10256 return (TREE_OPERAND (exp, 1));
10260 /* Emit code to push two SI constants */
10262 bc_push_offset_and_size (offset, size)
10263 HOST_WIDE_INT offset, size;
10265 bc_emit_instruction (constSI, offset);
10266 bc_emit_instruction (constSI, size);
10270 /* Emit byte code to push the address of the given lvalue expression to
10271 the stack. If it's a bit field, we also push offset and size info.
10273 Returns innermost component, which allows us to determine not only
10274 its type, but also whether it's a bitfield. */
10277 bc_expand_address (exp)
10281 if (!exp || TREE_CODE (exp) == ERROR_MARK)
10285 switch (TREE_CODE (exp))
10289 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
10291 case COMPONENT_REF:
10293 return (bc_expand_component_address (exp));
10297 bc_expand_expr (TREE_OPERAND (exp, 0));
10299 /* For variable-sized types: retrieve pointer. Sometimes the
10300 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
10301 also make sure we have an operand, just in case... */
10303 if (TREE_OPERAND (exp, 0)
10304 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
10305 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
10306 bc_emit_instruction (loadP);
10308 /* If packed, also return offset and size */
10309 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
10311 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
10312 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
10314 return (TREE_OPERAND (exp, 0));
10316 case FUNCTION_DECL:
10318 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10319 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
10324 bc_load_parmaddr (DECL_RTL (exp));
10326 /* For variable-sized types: retrieve pointer */
10327 if (TYPE_SIZE (TREE_TYPE (exp))
10328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10329 bc_emit_instruction (loadP);
10331 /* If packed, also return offset and size */
10332 if (DECL_BIT_FIELD (exp))
10333 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10334 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10340 bc_emit_instruction (returnP);
10346 if (BYTECODE_LABEL (DECL_RTL (exp)))
10347 bc_load_externaddr (DECL_RTL (exp));
10350 if (DECL_EXTERNAL (exp))
10351 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
10352 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
10354 bc_load_localaddr (DECL_RTL (exp));
10356 /* For variable-sized types: retrieve pointer */
10357 if (TYPE_SIZE (TREE_TYPE (exp))
10358 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
10359 bc_emit_instruction (loadP);
10361 /* If packed, also return offset and size */
10362 if (DECL_BIT_FIELD (exp))
10363 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
10364 TREE_INT_CST_LOW (DECL_SIZE (exp)));
10372 bc_emit_bytecode (constP);
10373 r = output_constant_def (exp);
10374 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
10376 #ifdef DEBUG_PRINT_CODE
10377 fputc ('\n', stderr);
10388 /* Most lvalues don't have components. */
10393 /* Emit a type code to be used by the runtime support in handling
10394 parameter passing. The type code consists of the machine mode
10395 plus the minimal alignment shifted left 8 bits. */
10398 bc_runtime_type_code (type)
10403 switch (TREE_CODE (type))
10409 case ENUMERAL_TYPE:
10413 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
10425 return build_int_2 (val, 0);
10429 /* Generate constructor label */
10431 bc_gen_constr_label ()
10433 static int label_counter;
10434 static char label[20];
10436 sprintf (label, "*LR%d", label_counter++);
10438 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
10442 /* Evaluate constructor CONSTR and return pointer to it on level one. We
10443 expand the constructor data as static data, and push a pointer to it.
10444 The pointer is put in the pointer table and is retrieved by a constP
10445 bytecode instruction. We then loop and store each constructor member in
10446 the corresponding component. Finally, we return the original pointer on
10450 bc_expand_constructor (constr)
10454 HOST_WIDE_INT ptroffs;
10458 /* Literal constructors are handled as constants, whereas
10459 non-literals are evaluated and stored element by element
10460 into the data segment. */
10462 /* Allocate space in proper segment and push pointer to space on stack.
10465 l = bc_gen_constr_label ();
10467 if (TREE_CONSTANT (constr))
10471 bc_emit_const_labeldef (l);
10472 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
10478 bc_emit_data_labeldef (l);
10479 bc_output_data_constructor (constr);
10483 /* Add reference to pointer table and recall pointer to stack;
10484 this code is common for both types of constructors: literals
10485 and non-literals. */
10487 ptroffs = bc_define_pointer (l);
10488 bc_emit_instruction (constP, ptroffs);
10490 /* This is all that has to be done if it's a literal. */
10491 if (TREE_CONSTANT (constr))
10495 /* At this point, we have the pointer to the structure on top of the stack.
10496 Generate sequences of store_memory calls for the constructor. */
10498 /* constructor type is structure */
10499 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
10503 /* If the constructor has fewer fields than the structure,
10504 clear the whole structure first. */
10506 if (list_length (CONSTRUCTOR_ELTS (constr))
10507 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
10509 bc_emit_instruction (duplicate);
10510 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10511 bc_emit_instruction (clearBLK);
10514 /* Store each element of the constructor into the corresponding
10515 field of TARGET. */
10517 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
10519 register tree field = TREE_PURPOSE (elt);
10520 register enum machine_mode mode;
10525 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
10526 mode = DECL_MODE (field);
10527 unsignedp = TREE_UNSIGNED (field);
10529 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
10531 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10532 /* The alignment of TARGET is
10533 at least what its type requires. */
10535 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10536 int_size_in_bytes (TREE_TYPE (constr)));
10541 /* Constructor type is array */
10542 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
10546 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
10547 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
10548 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
10549 tree elttype = TREE_TYPE (TREE_TYPE (constr));
10551 /* If the constructor has fewer fields than the structure,
10552 clear the whole structure first. */
10554 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
10556 bc_emit_instruction (duplicate);
10557 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
10558 bc_emit_instruction (clearBLK);
10562 /* Store each element of the constructor into the corresponding
10563 element of TARGET, determined by counting the elements. */
10565 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
10567 elt = TREE_CHAIN (elt), i++)
10569 register enum machine_mode mode;
10574 mode = TYPE_MODE (elttype);
10575 bitsize = GET_MODE_BITSIZE (mode);
10576 unsignedp = TREE_UNSIGNED (elttype);
10578 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
10579 /* * TYPE_SIZE_UNIT (elttype) */ );
10581 bc_store_field (elt, bitsize, bitpos, mode,
10582 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
10583 /* The alignment of TARGET is
10584 at least what its type requires. */
10586 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
10587 int_size_in_bytes (TREE_TYPE (constr)));
10594 /* Store the value of EXP (an expression tree) into member FIELD of
10595 structure at address on stack, which has type TYPE, mode MODE and
10596 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
10599 ALIGN is the alignment that TARGET is known to have, measured in bytes.
10600 TOTAL_SIZE is its size in bytes, or -1 if variable. */
10603 bc_store_field (field, bitsize, bitpos, mode, exp, type,
10604 value_mode, unsignedp, align, total_size)
10605 int bitsize, bitpos;
10606 enum machine_mode mode;
10607 tree field, exp, type;
10608 enum machine_mode value_mode;
10614 /* Expand expression and copy pointer */
10615 bc_expand_expr (exp);
10616 bc_emit_instruction (over);
10619 /* If the component is a bit field, we cannot use addressing to access
10620 it. Use bit-field techniques to store in it. */
10622 if (DECL_BIT_FIELD (field))
10624 bc_store_bit_field (bitpos, bitsize, unsignedp);
10628 /* Not bit field */
10630 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
10632 /* Advance pointer to the desired member */
10634 bc_emit_instruction (addconstPSI, offset);
10637 bc_store_memory (type, field);
10642 /* Store SI/SU in bitfield */
10644 bc_store_bit_field (offset, size, unsignedp)
10645 int offset, size, unsignedp;
10647 /* Push bitfield offset and size */
10648 bc_push_offset_and_size (offset, size);
10651 bc_emit_instruction (sstoreBI);
10655 /* Load SI/SU from bitfield */
10657 bc_load_bit_field (offset, size, unsignedp)
10658 int offset, size, unsignedp;
10660 /* Push bitfield offset and size */
10661 bc_push_offset_and_size (offset, size);
10663 /* Load: sign-extend if signed, else zero-extend */
10664 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
10668 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
10669 (adjust stack pointer upwards), negative means add that number of
10670 levels (adjust the stack pointer downwards). Only positive values
10671 normally make sense. */
10674 bc_adjust_stack (nlevels)
10683 bc_emit_instruction (drop);
10686 bc_emit_instruction (drop);
10691 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
10692 stack_depth -= nlevels;
10695 #if defined (VALIDATE_STACK_FOR_BC)
10696 VALIDATE_STACK_FOR_BC ();