1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "hard-reg-set.h"
32 #include "insn-flags.h"
33 #include "insn-codes.h"
35 #include "insn-config.h"
38 #include "typeclass.h"
41 #include "bc-opcode.h"
42 #include "bc-typecd.h"
47 #define CEIL(x,y) (((x) + (y) - 1) / (y))
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
72 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
74 /* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
82 /* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85 int do_preexpand_calls = 1;
87 /* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89 int pending_stack_adjust;
91 /* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95 int inhibit_defer_pop;
97 /* A list of all cleanups which belong to the arguments of
98 function calls being expanded by expand_call. */
99 tree cleanups_this_call;
101 /* When temporaries are created by TARGET_EXPRs, they are created at
102 this level of temp_slot_level, so that they can remain allocated
103 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
105 int target_temp_slot_level;
107 /* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
110 static rtx saveregs_value;
112 /* Similarly for __builtin_apply_args. */
113 static rtx apply_args_value;
115 /* This structure is used by move_by_pieces to describe the move to
118 struct move_by_pieces
128 int explicit_inc_from;
135 /* This structure is used by clear_by_pieces to describe the clear to
138 struct clear_by_pieces
150 /* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
154 extern int local_vars_size;
155 extern int stack_depth;
156 extern int max_stack_depth;
157 extern struct obstack permanent_obstack;
158 extern rtx arg_pointer_save_area;
160 static rtx enqueue_insn PROTO((rtx, rtx));
161 static int queued_subexp_p PROTO((rtx));
162 static void init_queue PROTO((void));
163 static void move_by_pieces PROTO((rtx, rtx, int, int));
164 static int move_by_pieces_ninsns PROTO((unsigned int, int));
165 static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
166 struct move_by_pieces *));
167 static void clear_by_pieces PROTO((rtx, int, int));
168 static void clear_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
169 struct clear_by_pieces *));
170 static int is_zeros_p PROTO((tree));
171 static int mostly_zeros_p PROTO((tree));
172 static void store_constructor PROTO((tree, rtx, int));
173 static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
174 enum machine_mode, int, int, int));
175 static int get_inner_unaligned_p PROTO((tree));
176 static tree save_noncopied_parts PROTO((tree, tree));
177 static tree init_noncopied_parts PROTO((tree, tree));
178 static int safe_from_p PROTO((rtx, tree));
179 static int fixed_type_p PROTO((tree));
180 static int get_pointer_alignment PROTO((tree, unsigned));
181 static tree string_constant PROTO((tree, tree *));
182 static tree c_strlen PROTO((tree));
183 static rtx expand_builtin PROTO((tree, rtx, rtx,
184 enum machine_mode, int));
185 static int apply_args_size PROTO((void));
186 static int apply_result_size PROTO((void));
187 static rtx result_vector PROTO((int, rtx));
188 static rtx expand_builtin_apply_args PROTO((void));
189 static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
190 static void expand_builtin_return PROTO((rtx));
191 static rtx expand_increment PROTO((tree, int, int));
192 void bc_expand_increment PROTO((struct increment_operator *, tree));
193 rtx bc_allocate_local PROTO((int, int));
194 void bc_store_memory PROTO((tree, tree));
195 tree bc_expand_component_address PROTO((tree));
196 tree bc_expand_address PROTO((tree));
197 void bc_expand_constructor PROTO((tree));
198 void bc_adjust_stack PROTO((int));
199 tree bc_canonicalize_array_ref PROTO((tree));
200 void bc_load_memory PROTO((tree, tree));
201 void bc_load_externaddr PROTO((rtx));
202 void bc_load_externaddr_id PROTO((tree, int));
203 void bc_load_localaddr PROTO((rtx));
204 void bc_load_parmaddr PROTO((rtx));
205 static void preexpand_calls PROTO((tree));
206 static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
207 void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
208 static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
209 static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
210 static void do_jump_for_compare PROTO((rtx, rtx, rtx));
211 static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
212 static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
213 static tree defer_cleanups_to PROTO((tree));
214 extern tree truthvalue_conversion PROTO((tree));
216 /* Record for each mode whether we can move a register directly to or
217 from an object of that mode in memory. If we can't, we won't try
218 to use that mode directly when accessing a field of that mode. */
220 static char direct_load[NUM_MACHINE_MODES];
221 static char direct_store[NUM_MACHINE_MODES];
223 /* MOVE_RATIO is the number of move instructions that is better than
227 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
230 /* A value of around 6 would minimize code size; infinity would minimize
232 #define MOVE_RATIO 15
236 /* This array records the insn_code of insns to perform block moves. */
237 enum insn_code movstr_optab[NUM_MACHINE_MODES];
239 /* This array records the insn_code of insns to perform block clears. */
240 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
242 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
244 #ifndef SLOW_UNALIGNED_ACCESS
245 #define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
248 /* Register mappings for target machines without register windows. */
249 #ifndef INCOMING_REGNO
250 #define INCOMING_REGNO(OUT) (OUT)
252 #ifndef OUTGOING_REGNO
253 #define OUTGOING_REGNO(IN) (IN)
256 /* Maps used to convert modes to const, load, and store bytecodes. */
257 enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
258 enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
259 enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
261 /* Initialize maps used to convert modes to const, load, and store
265 bc_init_mode_to_opcode_maps ()
269 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
270 mode_to_const_map[mode] =
271 mode_to_load_map[mode] =
272 mode_to_store_map[mode] = neverneverland;
274 #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
275 mode_to_const_map[(int) SYM] = CONST; \
276 mode_to_load_map[(int) SYM] = LOAD; \
277 mode_to_store_map[(int) SYM] = STORE;
279 #include "modemap.def"
283 /* This is run once per compilation to set up which modes can be used
284 directly in memory and to initialize the block move optab. */
290 enum machine_mode mode;
291 /* Try indexing by frame ptr and try by stack ptr.
292 It is known that on the Convex the stack ptr isn't a valid index.
293 With luck, one or the other is valid on any machine. */
294 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
295 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
298 insn = emit_insn (gen_rtx (SET, 0, 0));
299 pat = PATTERN (insn);
301 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
302 mode = (enum machine_mode) ((int) mode + 1))
308 direct_load[(int) mode] = direct_store[(int) mode] = 0;
309 PUT_MODE (mem, mode);
310 PUT_MODE (mem1, mode);
312 /* See if there is some register that can be used in this mode and
313 directly loaded or stored from memory. */
315 if (mode != VOIDmode && mode != BLKmode)
316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
317 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
320 if (! HARD_REGNO_MODE_OK (regno, mode))
323 reg = gen_rtx (REG, mode, regno);
326 SET_DEST (pat) = reg;
327 if (recog (pat, insn, &num_clobbers) >= 0)
328 direct_load[(int) mode] = 1;
330 SET_SRC (pat) = mem1;
331 SET_DEST (pat) = reg;
332 if (recog (pat, insn, &num_clobbers) >= 0)
333 direct_load[(int) mode] = 1;
336 SET_DEST (pat) = mem;
337 if (recog (pat, insn, &num_clobbers) >= 0)
338 direct_store[(int) mode] = 1;
341 SET_DEST (pat) = mem1;
342 if (recog (pat, insn, &num_clobbers) >= 0)
343 direct_store[(int) mode] = 1;
350 /* This is run at the start of compiling a function. */
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
359 cleanups_this_call = 0;
361 apply_args_value = 0;
365 /* Save all variables describing the current status into the structure *P.
366 This is used before starting a nested function. */
372 /* Instead of saving the postincrement queue, empty it. */
375 p->pending_stack_adjust = pending_stack_adjust;
376 p->inhibit_defer_pop = inhibit_defer_pop;
377 p->cleanups_this_call = cleanups_this_call;
378 p->saveregs_value = saveregs_value;
379 p->apply_args_value = apply_args_value;
380 p->forced_labels = forced_labels;
382 pending_stack_adjust = 0;
383 inhibit_defer_pop = 0;
384 cleanups_this_call = 0;
386 apply_args_value = 0;
390 /* Restore all variables describing the current status from the structure *P.
391 This is used after a nested function. */
394 restore_expr_status (p)
397 pending_stack_adjust = p->pending_stack_adjust;
398 inhibit_defer_pop = p->inhibit_defer_pop;
399 cleanups_this_call = p->cleanups_this_call;
400 saveregs_value = p->saveregs_value;
401 apply_args_value = p->apply_args_value;
402 forced_labels = p->forced_labels;
405 /* Manage the queue of increment instructions to be output
406 for POSTINCREMENT_EXPR expressions, etc. */
408 static rtx pending_chain;
410 /* Queue up to increment (or change) VAR later. BODY says how:
411 BODY should be the same thing you would pass to emit_insn
412 to increment right away. It will go to emit_insn later on.
414 The value is a QUEUED expression to be used in place of VAR
415 where you want to guarantee the pre-incrementation value of VAR. */
418 enqueue_insn (var, body)
421 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
422 var, NULL_RTX, NULL_RTX, body, pending_chain);
423 return pending_chain;
426 /* Use protect_from_queue to convert a QUEUED expression
427 into something that you can put immediately into an instruction.
428 If the queued incrementation has not happened yet,
429 protect_from_queue returns the variable itself.
430 If the incrementation has happened, protect_from_queue returns a temp
431 that contains a copy of the old value of the variable.
433 Any time an rtx which might possibly be a QUEUED is to be put
434 into an instruction, it must be passed through protect_from_queue first.
435 QUEUED expressions are not meaningful in instructions.
437 Do not pass a value through protect_from_queue and then hold
438 on to it for a while before putting it in an instruction!
439 If the queue is flushed in between, incorrect code will result. */
442 protect_from_queue (x, modify)
446 register RTX_CODE code = GET_CODE (x);
448 #if 0 /* A QUEUED can hang around after the queue is forced out. */
449 /* Shortcut for most common case. */
450 if (pending_chain == 0)
456 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
457 use of autoincrement. Make a copy of the contents of the memory
458 location rather than a copy of the address, but not if the value is
459 of mode BLKmode. Don't modify X in place since it might be
461 if (code == MEM && GET_MODE (x) != BLKmode
462 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
464 register rtx y = XEXP (x, 0);
465 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
467 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
468 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
469 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
473 register rtx temp = gen_reg_rtx (GET_MODE (new));
474 emit_insn_before (gen_move_insn (temp, new),
480 /* Otherwise, recursively protect the subexpressions of all
481 the kinds of rtx's that can contain a QUEUED. */
484 rtx tem = protect_from_queue (XEXP (x, 0), 0);
485 if (tem != XEXP (x, 0))
491 else if (code == PLUS || code == MULT)
493 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
494 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
495 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
504 /* If the increment has not happened, use the variable itself. */
505 if (QUEUED_INSN (x) == 0)
506 return QUEUED_VAR (x);
507 /* If the increment has happened and a pre-increment copy exists,
509 if (QUEUED_COPY (x) != 0)
510 return QUEUED_COPY (x);
511 /* The increment has happened but we haven't set up a pre-increment copy.
512 Set one up now, and use it. */
513 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
514 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
516 return QUEUED_COPY (x);
519 /* Return nonzero if X contains a QUEUED expression:
520 if it contains anything that will be altered by a queued increment.
521 We handle only combinations of MEM, PLUS, MINUS and MULT operators
522 since memory addresses generally contain only those. */
528 register enum rtx_code code = GET_CODE (x);
534 return queued_subexp_p (XEXP (x, 0));
538 return queued_subexp_p (XEXP (x, 0))
539 || queued_subexp_p (XEXP (x, 1));
544 /* Perform all the pending incrementations. */
550 while (p = pending_chain)
552 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
553 pending_chain = QUEUED_NEXT (p);
564 /* Copy data from FROM to TO, where the machine modes are not the same.
565 Both modes may be integer, or both may be floating.
566 UNSIGNEDP should be nonzero if FROM is an unsigned type.
567 This causes zero-extension instead of sign-extension. */
570 convert_move (to, from, unsignedp)
571 register rtx to, from;
574 enum machine_mode to_mode = GET_MODE (to);
575 enum machine_mode from_mode = GET_MODE (from);
576 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
577 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
581 /* rtx code for making an equivalent value. */
582 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
584 to = protect_from_queue (to, 1);
585 from = protect_from_queue (from, 0);
587 if (to_real != from_real)
590 /* If FROM is a SUBREG that indicates that we have already done at least
591 the required extension, strip it. We don't handle such SUBREGs as
594 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
595 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
596 >= GET_MODE_SIZE (to_mode))
597 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
598 from = gen_lowpart (to_mode, from), from_mode = to_mode;
600 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
603 if (to_mode == from_mode
604 || (from_mode == VOIDmode && CONSTANT_P (from)))
606 emit_move_insn (to, from);
614 #ifdef HAVE_extendqfhf2
615 if (HAVE_extendqfhf2 && from_mode == QFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_extendqfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_extendqfsf2
622 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
624 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
628 #ifdef HAVE_extendqfdf2
629 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
631 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
635 #ifdef HAVE_extendqfxf2
636 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
638 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
642 #ifdef HAVE_extendqftf2
643 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
645 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
650 #ifdef HAVE_extendhftqf2
651 if (HAVE_extendhftqf2 && from_mode == HFmode && to_mode == TQFmode)
653 emit_unop_insn (CODE_FOR_extendhftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_extendhfsf2
659 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
661 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
665 #ifdef HAVE_extendhfdf2
666 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
668 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
672 #ifdef HAVE_extendhfxf2
673 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
675 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
679 #ifdef HAVE_extendhftf2
680 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
682 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
687 #ifdef HAVE_extendsfdf2
688 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
690 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
694 #ifdef HAVE_extendsfxf2
695 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
697 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
701 #ifdef HAVE_extendsftf2
702 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
704 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
708 #ifdef HAVE_extenddfxf2
709 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
711 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
715 #ifdef HAVE_extenddftf2
716 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
718 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
723 #ifdef HAVE_trunchfqf2
724 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
726 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
730 #ifdef HAVE_truncsfqf2
731 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
733 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
737 #ifdef HAVE_truncdfqf2
738 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
740 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
744 #ifdef HAVE_truncxfqf2
745 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
747 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
751 #ifdef HAVE_trunctfqf2
752 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
754 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
759 #ifdef HAVE_trunctqfhf2
760 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
762 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
766 #ifdef HAVE_truncsfhf2
767 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
769 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
773 #ifdef HAVE_truncdfhf2
774 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
776 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
780 #ifdef HAVE_truncxfhf2
781 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
783 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
787 #ifdef HAVE_trunctfhf2
788 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
790 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
794 #ifdef HAVE_truncdfsf2
795 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
797 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
801 #ifdef HAVE_truncxfsf2
802 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
804 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
808 #ifdef HAVE_trunctfsf2
809 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
811 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
815 #ifdef HAVE_truncxfdf2
816 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
818 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
822 #ifdef HAVE_trunctfdf2
823 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
825 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
837 libcall = extendsfdf2_libfunc;
841 libcall = extendsfxf2_libfunc;
845 libcall = extendsftf2_libfunc;
854 libcall = truncdfsf2_libfunc;
858 libcall = extenddfxf2_libfunc;
862 libcall = extenddftf2_libfunc;
871 libcall = truncxfsf2_libfunc;
875 libcall = truncxfdf2_libfunc;
884 libcall = trunctfsf2_libfunc;
888 libcall = trunctfdf2_libfunc;
894 if (libcall == (rtx) 0)
895 /* This conversion is not implemented yet. */
898 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
900 emit_move_insn (to, value);
904 /* Now both modes are integers. */
906 /* Handle expanding beyond a word. */
907 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
908 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
915 enum machine_mode lowpart_mode;
916 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
918 /* Try converting directly if the insn is supported. */
919 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
922 /* If FROM is a SUBREG, put it into a register. Do this
923 so that we always generate the same set of insns for
924 better cse'ing; if an intermediate assignment occurred,
925 we won't be doing the operation directly on the SUBREG. */
926 if (optimize > 0 && GET_CODE (from) == SUBREG)
927 from = force_reg (from_mode, from);
928 emit_unop_insn (code, to, from, equiv_code);
931 /* Next, try converting via full word. */
932 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
933 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
934 != CODE_FOR_nothing))
936 if (GET_CODE (to) == REG)
937 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
938 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
939 emit_unop_insn (code, to,
940 gen_lowpart (word_mode, to), equiv_code);
944 /* No special multiword conversion insn; do it by hand. */
947 /* Since we will turn this into a no conflict block, we must ensure
948 that the source does not overlap the target. */
950 if (reg_overlap_mentioned_p (to, from))
951 from = force_reg (from_mode, from);
953 /* Get a copy of FROM widened to a word, if necessary. */
954 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
955 lowpart_mode = word_mode;
957 lowpart_mode = from_mode;
959 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
961 lowpart = gen_lowpart (lowpart_mode, to);
962 emit_move_insn (lowpart, lowfrom);
964 /* Compute the value to put in each remaining word. */
966 fill_value = const0_rtx;
971 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
972 && STORE_FLAG_VALUE == -1)
974 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
976 fill_value = gen_reg_rtx (word_mode);
977 emit_insn (gen_slt (fill_value));
983 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
984 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
986 fill_value = convert_to_mode (word_mode, fill_value, 1);
990 /* Fill the remaining words. */
991 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
993 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
994 rtx subword = operand_subword (to, index, 1, to_mode);
999 if (fill_value != subword)
1000 emit_move_insn (subword, fill_value);
1003 insns = get_insns ();
1006 emit_no_conflict_block (insns, to, from, NULL_RTX,
1007 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
1011 /* Truncating multi-word to a word or less. */
1012 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
1013 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
1022 convert_move (to, gen_lowpart (word_mode, from), 0);
1026 /* Handle pointer conversion */ /* SPEE 900220 */
1027 if (to_mode == PSImode)
1029 if (from_mode != SImode)
1030 from = convert_to_mode (SImode, from, unsignedp);
1032 #ifdef HAVE_truncsipsi2
1033 if (HAVE_truncsipsi2)
1035 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1038 #endif /* HAVE_truncsipsi2 */
1042 if (from_mode == PSImode)
1044 if (to_mode != SImode)
1046 from = convert_to_mode (SImode, from, unsignedp);
1051 #ifdef HAVE_extendpsisi2
1052 if (HAVE_extendpsisi2)
1054 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1057 #endif /* HAVE_extendpsisi2 */
1062 if (to_mode == PDImode)
1064 if (from_mode != DImode)
1065 from = convert_to_mode (DImode, from, unsignedp);
1067 #ifdef HAVE_truncdipdi2
1068 if (HAVE_truncdipdi2)
1070 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 #endif /* HAVE_truncdipdi2 */
1077 if (from_mode == PDImode)
1079 if (to_mode != DImode)
1081 from = convert_to_mode (DImode, from, unsignedp);
1086 #ifdef HAVE_extendpdidi2
1087 if (HAVE_extendpdidi2)
1089 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 #endif /* HAVE_extendpdidi2 */
1097 /* Now follow all the conversions between integers
1098 no more than a word long. */
1100 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1101 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1102 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1103 GET_MODE_BITSIZE (from_mode)))
1105 if (!((GET_CODE (from) == MEM
1106 && ! MEM_VOLATILE_P (from)
1107 && direct_load[(int) to_mode]
1108 && ! mode_dependent_address_p (XEXP (from, 0)))
1109 || GET_CODE (from) == REG
1110 || GET_CODE (from) == SUBREG))
1111 from = force_reg (from_mode, from);
1112 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1113 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1114 from = copy_to_reg (from);
1115 emit_move_insn (to, gen_lowpart (to_mode, from));
1119 /* Handle extension. */
1120 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1122 /* Convert directly if that works. */
1123 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1124 != CODE_FOR_nothing)
1126 emit_unop_insn (code, to, from, equiv_code);
1131 enum machine_mode intermediate;
1133 /* Search for a mode to convert via. */
1134 for (intermediate = from_mode; intermediate != VOIDmode;
1135 intermediate = GET_MODE_WIDER_MODE (intermediate))
1136 if (((can_extend_p (to_mode, intermediate, unsignedp)
1137 != CODE_FOR_nothing)
1138 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1139 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
1140 && (can_extend_p (intermediate, from_mode, unsignedp)
1141 != CODE_FOR_nothing))
1143 convert_move (to, convert_to_mode (intermediate, from,
1144 unsignedp), unsignedp);
1148 /* No suitable intermediate mode. */
1153 /* Support special truncate insns for certain modes. */
1155 if (from_mode == DImode && to_mode == SImode)
1157 #ifdef HAVE_truncdisi2
1158 if (HAVE_truncdisi2)
1160 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1164 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 if (from_mode == DImode && to_mode == HImode)
1170 #ifdef HAVE_truncdihi2
1171 if (HAVE_truncdihi2)
1173 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1177 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 if (from_mode == DImode && to_mode == QImode)
1183 #ifdef HAVE_truncdiqi2
1184 if (HAVE_truncdiqi2)
1186 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1190 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 if (from_mode == SImode && to_mode == HImode)
1196 #ifdef HAVE_truncsihi2
1197 if (HAVE_truncsihi2)
1199 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1203 convert_move (to, force_reg (from_mode, from), unsignedp);
1207 if (from_mode == SImode && to_mode == QImode)
1209 #ifdef HAVE_truncsiqi2
1210 if (HAVE_truncsiqi2)
1212 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1216 convert_move (to, force_reg (from_mode, from), unsignedp);
1220 if (from_mode == HImode && to_mode == QImode)
1222 #ifdef HAVE_trunchiqi2
1223 if (HAVE_trunchiqi2)
1225 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1229 convert_move (to, force_reg (from_mode, from), unsignedp);
1233 if (from_mode == TImode && to_mode == DImode)
1235 #ifdef HAVE_trunctidi2
1236 if (HAVE_trunctidi2)
1238 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1242 convert_move (to, force_reg (from_mode, from), unsignedp);
1246 if (from_mode == TImode && to_mode == SImode)
1248 #ifdef HAVE_trunctisi2
1249 if (HAVE_trunctisi2)
1251 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1255 convert_move (to, force_reg (from_mode, from), unsignedp);
1259 if (from_mode == TImode && to_mode == HImode)
1261 #ifdef HAVE_trunctihi2
1262 if (HAVE_trunctihi2)
1264 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1268 convert_move (to, force_reg (from_mode, from), unsignedp);
1272 if (from_mode == TImode && to_mode == QImode)
1274 #ifdef HAVE_trunctiqi2
1275 if (HAVE_trunctiqi2)
1277 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1281 convert_move (to, force_reg (from_mode, from), unsignedp);
1285 /* Handle truncation of volatile memrefs, and so on;
1286 the things that couldn't be truncated directly,
1287 and for which there was no special instruction. */
1288 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1290 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1291 emit_move_insn (to, temp);
1295 /* Mode combination is not recognized. */
1299 /* Return an rtx for a value that would result
1300 from converting X to mode MODE.
1301 Both X and MODE may be floating, or both integer.
1302 UNSIGNEDP is nonzero if X is an unsigned value.
1303 This can be done by referring to a part of X in place
1304 or by copying to a new temporary with conversion.
1306 This function *must not* call protect_from_queue
1307 except when putting X into an insn (in which case convert_move does it). */
1310 convert_to_mode (mode, x, unsignedp)
1311 enum machine_mode mode;
1315 return convert_modes (mode, VOIDmode, x, unsignedp);
1318 /* Return an rtx for a value that would result
1319 from converting X from mode OLDMODE to mode MODE.
1320 Both modes may be floating, or both integer.
1321 UNSIGNEDP is nonzero if X is an unsigned value.
1323 This can be done by referring to a part of X in place
1324 or by copying to a new temporary with conversion.
1326 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1328 This function *must not* call protect_from_queue
1329 except when putting X into an insn (in which case convert_move does it). */
1332 convert_modes (mode, oldmode, x, unsignedp)
1333 enum machine_mode mode, oldmode;
1339 /* If FROM is a SUBREG that indicates that we have already done at least
1340 the required extension, strip it. */
1342 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1343 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1344 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1345 x = gen_lowpart (mode, x);
1347 if (GET_MODE (x) != VOIDmode)
1348 oldmode = GET_MODE (x);
1350 if (mode == oldmode)
1353 /* There is one case that we must handle specially: If we are converting
1354 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1355 we are to interpret the constant as unsigned, gen_lowpart will do
1356 the wrong if the constant appears negative. What we want to do is
1357 make the high-order word of the constant zero, not all ones. */
1359 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1360 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1361 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1362 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1364 /* We can do this with a gen_lowpart if both desired and current modes
1365 are integer, and this is either a constant integer, a register, or a
1366 non-volatile MEM. Except for the constant case where MODE is no
1367 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1369 if ((GET_CODE (x) == CONST_INT
1370 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1371 || (GET_MODE_CLASS (mode) == MODE_INT
1372 && GET_MODE_CLASS (oldmode) == MODE_INT
1373 && (GET_CODE (x) == CONST_DOUBLE
1374 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1375 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1376 && direct_load[(int) mode])
1377 || (GET_CODE (x) == REG
1378 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1379 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1381 /* ?? If we don't know OLDMODE, we have to assume here that
1382 X does not need sign- or zero-extension. This may not be
1383 the case, but it's the best we can do. */
1384 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1385 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1387 HOST_WIDE_INT val = INTVAL (x);
1388 int width = GET_MODE_BITSIZE (oldmode);
1390 /* We must sign or zero-extend in this case. Start by
1391 zero-extending, then sign extend if we need to. */
1392 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1394 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1395 val |= (HOST_WIDE_INT) (-1) << width;
1397 return GEN_INT (val);
1400 return gen_lowpart (mode, x);
1403 temp = gen_reg_rtx (mode);
1404 convert_move (temp, x, unsignedp);
1408 /* Generate several move instructions to copy LEN bytes
1409 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1410 The caller must pass FROM and TO
1411 through protect_from_queue before calling.
1412 ALIGN (in bytes) is maximum alignment we can assume. */
1415 move_by_pieces (to, from, len, align)
1419 struct move_by_pieces data;
1420 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1421 int max_size = MOVE_MAX + 1;
1424 data.to_addr = to_addr;
1425 data.from_addr = from_addr;
1429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1432 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1433 || GET_CODE (from_addr) == POST_INC
1434 || GET_CODE (from_addr) == POST_DEC);
1436 data.explicit_inc_from = 0;
1437 data.explicit_inc_to = 0;
1439 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1440 if (data.reverse) data.offset = len;
1443 data.to_struct = MEM_IN_STRUCT_P (to);
1444 data.from_struct = MEM_IN_STRUCT_P (from);
1446 /* If copying requires more than two move insns,
1447 copy addresses to registers (to make displacements shorter)
1448 and use post-increment if available. */
1449 if (!(data.autinc_from && data.autinc_to)
1450 && move_by_pieces_ninsns (len, align) > 2)
1452 #ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_from)
1455 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = -1;
1460 #ifdef HAVE_POST_INCREMENT
1461 if (! data.autinc_from)
1463 data.from_addr = copy_addr_to_reg (from_addr);
1464 data.autinc_from = 1;
1465 data.explicit_inc_from = 1;
1468 if (!data.autinc_from && CONSTANT_P (from_addr))
1469 data.from_addr = copy_addr_to_reg (from_addr);
1470 #ifdef HAVE_PRE_DECREMENT
1471 if (data.reverse && ! data.autinc_to)
1473 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1475 data.explicit_inc_to = -1;
1478 #ifdef HAVE_POST_INCREMENT
1479 if (! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1490 if (! SLOW_UNALIGNED_ACCESS
1491 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size > 1)
1499 enum machine_mode mode = VOIDmode, tmode;
1500 enum insn_code icode;
1502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1503 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1504 if (GET_MODE_SIZE (tmode) < max_size)
1507 if (mode == VOIDmode)
1510 icode = mov_optab->handlers[(int) mode].insn_code;
1511 if (icode != CODE_FOR_nothing
1512 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1513 GET_MODE_SIZE (mode)))
1514 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1516 max_size = GET_MODE_SIZE (mode);
1519 /* The code above should have handled everything. */
1524 /* Return number of insns required to move L bytes by pieces.
1525 ALIGN (in bytes) is maximum alignment we can assume. */
1528 move_by_pieces_ninsns (l, align)
1532 register int n_insns = 0;
1533 int max_size = MOVE_MAX + 1;
1535 if (! SLOW_UNALIGNED_ACCESS
1536 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1539 while (max_size > 1)
1541 enum machine_mode mode = VOIDmode, tmode;
1542 enum insn_code icode;
1544 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1545 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1546 if (GET_MODE_SIZE (tmode) < max_size)
1549 if (mode == VOIDmode)
1552 icode = mov_optab->handlers[(int) mode].insn_code;
1553 if (icode != CODE_FOR_nothing
1554 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1555 GET_MODE_SIZE (mode)))
1556 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1558 max_size = GET_MODE_SIZE (mode);
1564 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1565 with move instructions for mode MODE. GENFUN is the gen_... function
1566 to make a move insn for that mode. DATA has all the other info. */
1569 move_by_pieces_1 (genfun, mode, data)
1571 enum machine_mode mode;
1572 struct move_by_pieces *data;
1574 register int size = GET_MODE_SIZE (mode);
1575 register rtx to1, from1;
1577 while (data->len >= size)
1579 if (data->reverse) data->offset -= size;
1581 to1 = (data->autinc_to
1582 ? gen_rtx (MEM, mode, data->to_addr)
1583 : change_address (data->to, mode,
1584 plus_constant (data->to_addr, data->offset)));
1585 MEM_IN_STRUCT_P (to1) = data->to_struct;
1588 ? gen_rtx (MEM, mode, data->from_addr)
1589 : change_address (data->from, mode,
1590 plus_constant (data->from_addr, data->offset)));
1591 MEM_IN_STRUCT_P (from1) = data->from_struct;
1593 #ifdef HAVE_PRE_DECREMENT
1594 if (data->explicit_inc_to < 0)
1595 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1596 if (data->explicit_inc_from < 0)
1597 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1600 emit_insn ((*genfun) (to1, from1));
1601 #ifdef HAVE_POST_INCREMENT
1602 if (data->explicit_inc_to > 0)
1603 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1604 if (data->explicit_inc_from > 0)
1605 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1608 if (! data->reverse) data->offset += size;
1614 /* Emit code to move a block Y to a block X.
1615 This may be done with string-move instructions,
1616 with multiple scalar move instructions, or with a library call.
1618 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1620 SIZE is an rtx that says how long they are.
1621 ALIGN is the maximum alignment we can assume they have,
1622 measured in bytes. */
1625 emit_block_move (x, y, size, align)
1630 if (GET_MODE (x) != BLKmode)
1633 if (GET_MODE (y) != BLKmode)
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
1638 size = protect_from_queue (size, 0);
1640 if (GET_CODE (x) != MEM)
1642 if (GET_CODE (y) != MEM)
1647 if (GET_CODE (size) == CONST_INT
1648 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1649 move_by_pieces (x, y, INTVAL (size), align);
1652 /* Try the most limited insn first, because there's no point
1653 including more than one in the machine description unless
1654 the more limited one has some advantage. */
1656 rtx opalign = GEN_INT (align);
1657 enum machine_mode mode;
1659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1660 mode = GET_MODE_WIDER_MODE (mode))
1662 enum insn_code code = movstr_optab[(int) mode];
1664 if (code != CODE_FOR_nothing
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
1667 returned by the macro, it will definitely be less than the
1668 actual mode mask. */
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1671 <= GET_MODE_MASK (mode)))
1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1673 && (insn_operand_predicate[(int) code][0] == 0
1674 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1675 && (insn_operand_predicate[(int) code][1] == 0
1676 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1677 && (insn_operand_predicate[(int) code][3] == 0
1678 || (*insn_operand_predicate[(int) code][3]) (opalign,
1682 rtx last = get_last_insn ();
1685 op2 = convert_to_mode (mode, size, 1);
1686 if (insn_operand_predicate[(int) code][2] != 0
1687 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1688 op2 = copy_to_mode_reg (mode, op2);
1690 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1697 delete_insns_since (last);
1701 #ifdef TARGET_MEM_FUNCTIONS
1702 emit_library_call (memcpy_libfunc, 0,
1703 VOIDmode, 3, XEXP (x, 0), Pmode,
1705 convert_to_mode (TYPE_MODE (sizetype), size,
1706 TREE_UNSIGNED (sizetype)),
1707 TYPE_MODE (sizetype));
1709 emit_library_call (bcopy_libfunc, 0,
1710 VOIDmode, 3, XEXP (y, 0), Pmode,
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (regno, x, nregs, mode)
1727 enum machine_mode mode;
1735 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1736 x = validize_mem (force_const_mem (mode, x));
1738 /* See if the machine can do this with a load multiple insn. */
1739 #ifdef HAVE_load_multiple
1740 if (HAVE_load_multiple)
1742 last = get_last_insn ();
1743 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1751 delete_insns_since (last);
1755 for (i = 0; i < nregs; i++)
1756 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1757 operand_subword_force (x, i, mode));
1760 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1761 The number of registers to be filled is NREGS. SIZE indicates the number
1762 of bytes in the object X. */
1766 move_block_from_reg (regno, x, nregs, size)
1775 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1776 to the left before storing to memory. */
1777 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1779 rtx tem = operand_subword (x, 0, 1, BLKmode);
1785 shift = expand_shift (LSHIFT_EXPR, word_mode,
1786 gen_rtx (REG, word_mode, regno),
1787 build_int_2 ((UNITS_PER_WORD - size)
1788 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1789 emit_move_insn (tem, shift);
1793 /* See if the machine can do this with a store multiple insn. */
1794 #ifdef HAVE_store_multiple
1795 if (HAVE_store_multiple)
1797 last = get_last_insn ();
1798 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1806 delete_insns_since (last);
1810 for (i = 0; i < nregs; i++)
1812 rtx tem = operand_subword (x, i, 1, BLKmode);
1817 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1821 /* Emit code to move a block Y to a block X, where X is non-consecutive
1822 registers represented by a PARALLEL. */
1825 emit_group_load (x, y)
1828 rtx target_reg, source;
1831 if (GET_CODE (x) != PARALLEL)
1834 /* Check for a NULL entry, used to indicate that the parameter goes
1835 both on the stack and in registers. */
1836 if (XEXP (XVECEXP (x, 0, 0), 0))
1841 for (; i < XVECLEN (x, 0); i++)
1843 rtx element = XVECEXP (x, 0, i);
1845 target_reg = XEXP (element, 0);
1847 if (GET_CODE (y) == MEM)
1848 source = change_address (y, GET_MODE (target_reg),
1849 plus_constant (XEXP (y, 0),
1850 INTVAL (XEXP (element, 1))));
1851 else if (XEXP (element, 1) == const0_rtx)
1853 if (GET_MODE (target_reg) == GET_MODE (y))
1855 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1856 == GET_MODE_SIZE (GET_MODE (y)))
1857 source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
1864 emit_move_insn (target_reg, source);
1868 /* Emit code to move a block Y to a block X, where Y is non-consecutive
1869 registers represented by a PARALLEL. */
1872 emit_group_store (x, y)
1875 rtx source_reg, target;
1878 if (GET_CODE (y) != PARALLEL)
1881 /* Check for a NULL entry, used to indicate that the parameter goes
1882 both on the stack and in registers. */
1883 if (XEXP (XVECEXP (y, 0, 0), 0))
1888 for (; i < XVECLEN (y, 0); i++)
1890 rtx element = XVECEXP (y, 0, i);
1892 source_reg = XEXP (element, 0);
1894 if (GET_CODE (x) == MEM)
1895 target = change_address (x, GET_MODE (source_reg),
1896 plus_constant (XEXP (x, 0),
1897 INTVAL (XEXP (element, 1))));
1898 else if (XEXP (element, 1) == const0_rtx)
1903 emit_move_insn (target, source_reg);
1907 /* Add a USE expression for REG to the (possibly empty) list pointed
1908 to by CALL_FUSAGE. REG must denote a hard register. */
1911 use_reg (call_fusage, reg)
1912 rtx *call_fusage, reg;
1914 if (GET_CODE (reg) != REG
1915 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1919 = gen_rtx (EXPR_LIST, VOIDmode,
1920 gen_rtx (USE, VOIDmode, reg), *call_fusage);
1923 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1924 starting at REGNO. All of these registers must be hard registers. */
1927 use_regs (call_fusage, regno, nregs)
1934 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1937 for (i = 0; i < nregs; i++)
1938 use_reg (call_fusage, gen_rtx (REG, reg_raw_mode[regno + i], regno + i));
1941 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1942 PARALLEL REGS. This is for calls that pass values in multiple
1943 non-contiguous locations. The Irix 6 ABI has examples of this. */
1946 use_group_regs (call_fusage, regs)
1952 /* Check for a NULL entry, used to indicate that the parameter goes
1953 both on the stack and in registers. */
1954 if (XEXP (XVECEXP (regs, 0, 0), 0))
1959 for (; i < XVECLEN (regs, 0); i++)
1960 use_reg (call_fusage, XEXP (XVECEXP (regs, 0, i), 0));
1963 /* Generate several move instructions to clear LEN bytes of block TO.
1964 (A MEM rtx with BLKmode). The caller must pass TO through
1965 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1969 clear_by_pieces (to, len, align)
1973 struct clear_by_pieces data;
1974 rtx to_addr = XEXP (to, 0);
1975 int max_size = MOVE_MAX + 1;
1978 data.to_addr = to_addr;
1981 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1982 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1984 data.explicit_inc_to = 0;
1986 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1987 if (data.reverse) data.offset = len;
1990 data.to_struct = MEM_IN_STRUCT_P (to);
1992 /* If copying requires more than two move insns,
1993 copy addresses to registers (to make displacements shorter)
1994 and use post-increment if available. */
1996 && move_by_pieces_ninsns (len, align) > 2)
1998 #ifdef HAVE_PRE_DECREMENT
1999 if (data.reverse && ! data.autinc_to)
2001 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2003 data.explicit_inc_to = -1;
2006 #ifdef HAVE_POST_INCREMENT
2007 if (! data.reverse && ! data.autinc_to)
2009 data.to_addr = copy_addr_to_reg (to_addr);
2011 data.explicit_inc_to = 1;
2014 if (!data.autinc_to && CONSTANT_P (to_addr))
2015 data.to_addr = copy_addr_to_reg (to_addr);
2018 if (! SLOW_UNALIGNED_ACCESS
2019 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2022 /* First move what we can in the largest integer mode, then go to
2023 successively smaller modes. */
2025 while (max_size > 1)
2027 enum machine_mode mode = VOIDmode, tmode;
2028 enum insn_code icode;
2030 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2031 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2032 if (GET_MODE_SIZE (tmode) < max_size)
2035 if (mode == VOIDmode)
2038 icode = mov_optab->handlers[(int) mode].insn_code;
2039 if (icode != CODE_FOR_nothing
2040 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2041 GET_MODE_SIZE (mode)))
2042 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2044 max_size = GET_MODE_SIZE (mode);
2047 /* The code above should have handled everything. */
2052 /* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2053 with move instructions for mode MODE. GENFUN is the gen_... function
2054 to make a move insn for that mode. DATA has all the other info. */
2057 clear_by_pieces_1 (genfun, mode, data)
2059 enum machine_mode mode;
2060 struct clear_by_pieces *data;
2062 register int size = GET_MODE_SIZE (mode);
2065 while (data->len >= size)
2067 if (data->reverse) data->offset -= size;
2069 to1 = (data->autinc_to
2070 ? gen_rtx (MEM, mode, data->to_addr)
2071 : change_address (data->to, mode,
2072 plus_constant (data->to_addr, data->offset)));
2073 MEM_IN_STRUCT_P (to1) = data->to_struct;
2075 #ifdef HAVE_PRE_DECREMENT
2076 if (data->explicit_inc_to < 0)
2077 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2080 emit_insn ((*genfun) (to1, const0_rtx));
2081 #ifdef HAVE_POST_INCREMENT
2082 if (data->explicit_inc_to > 0)
2083 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2086 if (! data->reverse) data->offset += size;
2092 /* Write zeros through the storage of OBJECT.
2093 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
2094 the maximum alignment we can is has, measured in bytes. */
2097 clear_storage (object, size, align)
2102 if (GET_MODE (object) == BLKmode)
2104 object = protect_from_queue (object, 1);
2105 size = protect_from_queue (size, 0);
2107 if (GET_CODE (size) == CONST_INT
2108 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2109 clear_by_pieces (object, INTVAL (size), align);
2113 /* Try the most limited insn first, because there's no point
2114 including more than one in the machine description unless
2115 the more limited one has some advantage. */
2117 rtx opalign = GEN_INT (align);
2118 enum machine_mode mode;
2120 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2121 mode = GET_MODE_WIDER_MODE (mode))
2123 enum insn_code code = clrstr_optab[(int) mode];
2125 if (code != CODE_FOR_nothing
2126 /* We don't need MODE to be narrower than
2127 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2128 the mode mask, as it is returned by the macro, it will
2129 definitely be less than the actual mode mask. */
2130 && ((GET_CODE (size) == CONST_INT
2131 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2132 <= GET_MODE_MASK (mode)))
2133 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2134 && (insn_operand_predicate[(int) code][0] == 0
2135 || (*insn_operand_predicate[(int) code][0]) (object,
2137 && (insn_operand_predicate[(int) code][2] == 0
2138 || (*insn_operand_predicate[(int) code][2]) (opalign,
2142 rtx last = get_last_insn ();
2145 op1 = convert_to_mode (mode, size, 1);
2146 if (insn_operand_predicate[(int) code][1] != 0
2147 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2149 op1 = copy_to_mode_reg (mode, op1);
2151 pat = GEN_FCN ((int) code) (object, op1, opalign);
2158 delete_insns_since (last);
2163 #ifdef TARGET_MEM_FUNCTIONS
2164 emit_library_call (memset_libfunc, 0,
2166 XEXP (object, 0), Pmode,
2167 const0_rtx, TYPE_MODE (integer_type_node),
2168 convert_to_mode (TYPE_MODE (sizetype),
2169 size, TREE_UNSIGNED (sizetype)),
2170 TYPE_MODE (sizetype));
2172 emit_library_call (bzero_libfunc, 0,
2174 XEXP (object, 0), Pmode,
2175 convert_to_mode (TYPE_MODE (integer_type_node),
2177 TREE_UNSIGNED (integer_type_node)),
2178 TYPE_MODE (integer_type_node));
2183 emit_move_insn (object, const0_rtx);
2186 /* Generate code to copy Y into X.
2187 Both Y and X must have the same mode, except that
2188 Y can be a constant with VOIDmode.
2189 This mode cannot be BLKmode; use emit_block_move for that.
2191 Return the last instruction emitted. */
2194 emit_move_insn (x, y)
2197 enum machine_mode mode = GET_MODE (x);
2199 x = protect_from_queue (x, 1);
2200 y = protect_from_queue (y, 0);
2202 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2205 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2206 y = force_const_mem (mode, y);
2208 /* If X or Y are memory references, verify that their addresses are valid
2210 if (GET_CODE (x) == MEM
2211 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2212 && ! push_operand (x, GET_MODE (x)))
2214 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2215 x = change_address (x, VOIDmode, XEXP (x, 0));
2217 if (GET_CODE (y) == MEM
2218 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2220 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2221 y = change_address (y, VOIDmode, XEXP (y, 0));
2223 if (mode == BLKmode)
2226 return emit_move_insn_1 (x, y);
2229 /* Low level part of emit_move_insn.
2230 Called just like emit_move_insn, but assumes X and Y
2231 are basically valid. */
2234 emit_move_insn_1 (x, y)
2237 enum machine_mode mode = GET_MODE (x);
2238 enum machine_mode submode;
2239 enum mode_class class = GET_MODE_CLASS (mode);
2242 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2244 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2246 /* Expand complex moves by moving real part and imag part, if possible. */
2247 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2248 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2250 (class == MODE_COMPLEX_INT
2251 ? MODE_INT : MODE_FLOAT),
2253 && (mov_optab->handlers[(int) submode].insn_code
2254 != CODE_FOR_nothing))
2256 /* Don't split destination if it is a stack push. */
2257 int stack = push_operand (x, GET_MODE (x));
2260 /* If this is a stack, push the highpart first, so it
2261 will be in the argument order.
2263 In that case, change_address is used only to convert
2264 the mode, not to change the address. */
2267 /* Note that the real part always precedes the imag part in memory
2268 regardless of machine's endianness. */
2269 #ifdef STACK_GROWS_DOWNWARD
2270 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2271 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2272 gen_imagpart (submode, y)));
2273 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2274 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2275 gen_realpart (submode, y)));
2277 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2278 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2279 gen_realpart (submode, y)));
2280 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2281 (gen_rtx (MEM, submode, (XEXP (x, 0))),
2282 gen_imagpart (submode, y)));
2287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2288 (gen_realpart (submode, x), gen_realpart (submode, y)));
2289 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2290 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
2293 return get_last_insn ();
2296 /* This will handle any multi-word mode that lacks a move_insn pattern.
2297 However, you will get better code if you define such patterns,
2298 even if they must turn into multiple assembler instructions. */
2299 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2304 #ifdef PUSH_ROUNDING
2306 /* If X is a push on the stack, do the push now and replace
2307 X with a reference to the stack pointer. */
2308 if (push_operand (x, GET_MODE (x)))
2310 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2311 x = change_address (x, VOIDmode, stack_pointer_rtx);
2315 /* Show the output dies here. */
2317 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
2320 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2323 rtx xpart = operand_subword (x, i, 1, mode);
2324 rtx ypart = operand_subword (y, i, 1, mode);
2326 /* If we can't get a part of Y, put Y into memory if it is a
2327 constant. Otherwise, force it into a register. If we still
2328 can't get a part of Y, abort. */
2329 if (ypart == 0 && CONSTANT_P (y))
2331 y = force_const_mem (mode, y);
2332 ypart = operand_subword (y, i, 1, mode);
2334 else if (ypart == 0)
2335 ypart = operand_subword_force (y, i, mode);
2337 if (xpart == 0 || ypart == 0)
2340 last_insn = emit_move_insn (xpart, ypart);
2349 /* Pushing data onto the stack. */
2351 /* Push a block of length SIZE (perhaps variable)
2352 and return an rtx to address the beginning of the block.
2353 Note that it is not possible for the value returned to be a QUEUED.
2354 The value may be virtual_outgoing_args_rtx.
2356 EXTRA is the number of bytes of padding to push in addition to SIZE.
2357 BELOW nonzero means this padding comes at low addresses;
2358 otherwise, the padding comes at high addresses. */
2361 push_block (size, extra, below)
2367 size = convert_modes (Pmode, ptr_mode, size, 1);
2368 if (CONSTANT_P (size))
2369 anti_adjust_stack (plus_constant (size, extra));
2370 else if (GET_CODE (size) == REG && extra == 0)
2371 anti_adjust_stack (size);
2374 rtx temp = copy_to_mode_reg (Pmode, size);
2376 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2377 temp, 0, OPTAB_LIB_WIDEN);
2378 anti_adjust_stack (temp);
2381 #ifdef STACK_GROWS_DOWNWARD
2382 temp = virtual_outgoing_args_rtx;
2383 if (extra != 0 && below)
2384 temp = plus_constant (temp, extra);
2386 if (GET_CODE (size) == CONST_INT)
2387 temp = plus_constant (virtual_outgoing_args_rtx,
2388 - INTVAL (size) - (below ? 0 : extra));
2389 else if (extra != 0 && !below)
2390 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2391 negate_rtx (Pmode, plus_constant (size, extra)));
2393 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
2394 negate_rtx (Pmode, size));
2397 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2403 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
2406 /* Generate code to push X onto the stack, assuming it has mode MODE and
2408 MODE is redundant except when X is a CONST_INT (since they don't
2410 SIZE is an rtx for the size of data to be copied (in bytes),
2411 needed only if X is BLKmode.
2413 ALIGN (in bytes) is maximum alignment we can assume.
2415 If PARTIAL and REG are both nonzero, then copy that many of the first
2416 words of X into registers starting with REG, and push the rest of X.
2417 The amount of space pushed is decreased by PARTIAL words,
2418 rounded *down* to a multiple of PARM_BOUNDARY.
2419 REG must be a hard register in this case.
2420 If REG is zero but PARTIAL is not, take any all others actions for an
2421 argument partially in registers, but do not actually load any
2424 EXTRA is the amount in bytes of extra space to leave next to this arg.
2425 This is ignored if an argument block has already been allocated.
2427 On a machine that lacks real push insns, ARGS_ADDR is the address of
2428 the bottom of the argument block for this call. We use indexing off there
2429 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2430 argument block has not been preallocated.
2432 ARGS_SO_FAR is the size of args previously pushed for this call. */
2435 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2436 args_addr, args_so_far)
2438 enum machine_mode mode;
2449 enum direction stack_direction
2450 #ifdef STACK_GROWS_DOWNWARD
2456 /* Decide where to pad the argument: `downward' for below,
2457 `upward' for above, or `none' for don't pad it.
2458 Default is below for small data on big-endian machines; else above. */
2459 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2461 /* If we're placing part of X into a register and part of X onto
2462 the stack, indicate that the entire register is clobbered to
2463 keep flow from thinking the unused part of the register is live. */
2464 if (partial > 0 && reg != 0)
2465 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
2467 /* Invert direction if stack is post-update. */
2468 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2469 if (where_pad != none)
2470 where_pad = (where_pad == downward ? upward : downward);
2472 xinner = x = protect_from_queue (x, 0);
2474 if (mode == BLKmode)
2476 /* Copy a block into the stack, entirely or partially. */
2479 int used = partial * UNITS_PER_WORD;
2480 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2488 /* USED is now the # of bytes we need not copy to the stack
2489 because registers will take care of them. */
2492 xinner = change_address (xinner, BLKmode,
2493 plus_constant (XEXP (xinner, 0), used));
2495 /* If the partial register-part of the arg counts in its stack size,
2496 skip the part of stack space corresponding to the registers.
2497 Otherwise, start copying to the beginning of the stack space,
2498 by setting SKIP to 0. */
2499 #ifndef REG_PARM_STACK_SPACE
2505 #ifdef PUSH_ROUNDING
2506 /* Do it with several push insns if that doesn't take lots of insns
2507 and if there is no difficulty with push insns that skip bytes
2508 on the stack for alignment purposes. */
2510 && GET_CODE (size) == CONST_INT
2512 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2514 /* Here we avoid the case of a structure whose weak alignment
2515 forces many pushes of a small amount of data,
2516 and such small pushes do rounding that causes trouble. */
2517 && ((! SLOW_UNALIGNED_ACCESS)
2518 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
2519 || PUSH_ROUNDING (align) == align)
2520 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2522 /* Push padding now if padding above and stack grows down,
2523 or if padding below and stack grows up.
2524 But if space already allocated, this has already been done. */
2525 if (extra && args_addr == 0
2526 && where_pad != none && where_pad != stack_direction)
2527 anti_adjust_stack (GEN_INT (extra));
2529 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2530 INTVAL (size) - used, align);
2533 #endif /* PUSH_ROUNDING */
2535 /* Otherwise make space on the stack and copy the data
2536 to the address of that space. */
2538 /* Deduct words put into registers from the size we must copy. */
2541 if (GET_CODE (size) == CONST_INT)
2542 size = GEN_INT (INTVAL (size) - used);
2544 size = expand_binop (GET_MODE (size), sub_optab, size,
2545 GEN_INT (used), NULL_RTX, 0,
2549 /* Get the address of the stack space.
2550 In this case, we do not deal with EXTRA separately.
2551 A single stack adjust will do. */
2554 temp = push_block (size, extra, where_pad == downward);
2557 else if (GET_CODE (args_so_far) == CONST_INT)
2558 temp = memory_address (BLKmode,
2559 plus_constant (args_addr,
2560 skip + INTVAL (args_so_far)));
2562 temp = memory_address (BLKmode,
2563 plus_constant (gen_rtx (PLUS, Pmode,
2564 args_addr, args_so_far),
2567 /* TEMP is the address of the block. Copy the data there. */
2568 if (GET_CODE (size) == CONST_INT
2569 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2572 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2573 INTVAL (size), align);
2576 /* Try the most limited insn first, because there's no point
2577 including more than one in the machine description unless
2578 the more limited one has some advantage. */
2579 #ifdef HAVE_movstrqi
2581 && GET_CODE (size) == CONST_INT
2582 && ((unsigned) INTVAL (size)
2583 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2585 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2586 xinner, size, GEN_INT (align));
2594 #ifdef HAVE_movstrhi
2596 && GET_CODE (size) == CONST_INT
2597 && ((unsigned) INTVAL (size)
2598 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2600 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2601 xinner, size, GEN_INT (align));
2609 #ifdef HAVE_movstrsi
2612 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2613 xinner, size, GEN_INT (align));
2621 #ifdef HAVE_movstrdi
2624 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2625 xinner, size, GEN_INT (align));
2634 #ifndef ACCUMULATE_OUTGOING_ARGS
2635 /* If the source is referenced relative to the stack pointer,
2636 copy it to another register to stabilize it. We do not need
2637 to do this if we know that we won't be changing sp. */
2639 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2640 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2641 temp = copy_to_reg (temp);
2644 /* Make inhibit_defer_pop nonzero around the library call
2645 to force it to pop the bcopy-arguments right away. */
2647 #ifdef TARGET_MEM_FUNCTIONS
2648 emit_library_call (memcpy_libfunc, 0,
2649 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
2650 convert_to_mode (TYPE_MODE (sizetype),
2651 size, TREE_UNSIGNED (sizetype)),
2652 TYPE_MODE (sizetype));
2654 emit_library_call (bcopy_libfunc, 0,
2655 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2656 convert_to_mode (TYPE_MODE (integer_type_node),
2658 TREE_UNSIGNED (integer_type_node)),
2659 TYPE_MODE (integer_type_node));
2664 else if (partial > 0)
2666 /* Scalar partly in registers. */
2668 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2671 /* # words of start of argument
2672 that we must make space for but need not store. */
2673 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2674 int args_offset = INTVAL (args_so_far);
2677 /* Push padding now if padding above and stack grows down,
2678 or if padding below and stack grows up.
2679 But if space already allocated, this has already been done. */
2680 if (extra && args_addr == 0
2681 && where_pad != none && where_pad != stack_direction)
2682 anti_adjust_stack (GEN_INT (extra));
2684 /* If we make space by pushing it, we might as well push
2685 the real data. Otherwise, we can leave OFFSET nonzero
2686 and leave the space uninitialized. */
2690 /* Now NOT_STACK gets the number of words that we don't need to
2691 allocate on the stack. */
2692 not_stack = partial - offset;
2694 /* If the partial register-part of the arg counts in its stack size,
2695 skip the part of stack space corresponding to the registers.
2696 Otherwise, start copying to the beginning of the stack space,
2697 by setting SKIP to 0. */
2698 #ifndef REG_PARM_STACK_SPACE
2704 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2705 x = validize_mem (force_const_mem (mode, x));
2707 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2708 SUBREGs of such registers are not allowed. */
2709 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2710 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2711 x = copy_to_reg (x);
2713 /* Loop over all the words allocated on the stack for this arg. */
2714 /* We can do it by words, because any scalar bigger than a word
2715 has a size a multiple of a word. */
2716 #ifndef PUSH_ARGS_REVERSED
2717 for (i = not_stack; i < size; i++)
2719 for (i = size - 1; i >= not_stack; i--)
2721 if (i >= not_stack + offset)
2722 emit_push_insn (operand_subword_force (x, i, mode),
2723 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2725 GEN_INT (args_offset + ((i - not_stack + skip)
2726 * UNITS_PER_WORD)));
2732 /* Push padding now if padding above and stack grows down,
2733 or if padding below and stack grows up.
2734 But if space already allocated, this has already been done. */
2735 if (extra && args_addr == 0
2736 && where_pad != none && where_pad != stack_direction)
2737 anti_adjust_stack (GEN_INT (extra));
2739 #ifdef PUSH_ROUNDING
2741 addr = gen_push_operand ();
2744 if (GET_CODE (args_so_far) == CONST_INT)
2746 = memory_address (mode,
2747 plus_constant (args_addr, INTVAL (args_so_far)));
2749 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2752 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2756 /* If part should go in registers, copy that part
2757 into the appropriate registers. Do this now, at the end,
2758 since mem-to-mem copies above may do function calls. */
2759 if (partial > 0 && reg != 0)
2761 /* Handle calls that pass values in multiple non-contiguous locations.
2762 The Irix 6 ABI has examples of this. */
2763 if (GET_CODE (reg) == PARALLEL)
2764 emit_group_load (reg, x);
2766 move_block_to_reg (REGNO (reg), x, partial, mode);
2769 if (extra && args_addr == 0 && where_pad == stack_direction)
2770 anti_adjust_stack (GEN_INT (extra));
2773 /* Expand an assignment that stores the value of FROM into TO.
2774 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2775 (This may contain a QUEUED rtx;
2776 if the value is constant, this rtx is a constant.)
2777 Otherwise, the returned value is NULL_RTX.
2779 SUGGEST_REG is no longer actually used.
2780 It used to mean, copy the value through a register
2781 and return that register, if that is possible.
2782 We now use WANT_VALUE to decide whether to do this. */
2785 expand_assignment (to, from, want_value, suggest_reg)
2790 register rtx to_rtx = 0;
2793 /* Don't crash if the lhs of the assignment was erroneous. */
2795 if (TREE_CODE (to) == ERROR_MARK)
2797 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2798 return want_value ? result : NULL_RTX;
2801 if (output_bytecode)
2803 tree dest_innermost;
2805 bc_expand_expr (from);
2806 bc_emit_instruction (duplicate);
2808 dest_innermost = bc_expand_address (to);
2810 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2811 take care of it here. */
2813 bc_store_memory (TREE_TYPE (to), dest_innermost);
2817 /* Assignment of a structure component needs special treatment
2818 if the structure component's rtx is not simply a MEM.
2819 Assignment of an array element at a constant index, and assignment of
2820 an array element in an unaligned packed structure field, has the same
2823 if (TREE_CODE (to) == COMPONENT_REF
2824 || TREE_CODE (to) == BIT_FIELD_REF
2825 || (TREE_CODE (to) == ARRAY_REF
2826 && ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2827 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
2828 || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
2830 enum machine_mode mode1;
2840 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2841 &mode1, &unsignedp, &volatilep);
2843 /* If we are going to use store_bit_field and extract_bit_field,
2844 make sure to_rtx will be safe for multiple use. */
2846 if (mode1 == VOIDmode && want_value)
2847 tem = stabilize_reference (tem);
2849 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
2850 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2853 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2855 if (GET_CODE (to_rtx) != MEM)
2857 to_rtx = change_address (to_rtx, VOIDmode,
2858 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
2859 force_reg (ptr_mode, offset_rtx)));
2860 /* If we have a variable offset, the known alignment
2861 is only that of the innermost structure containing the field.
2862 (Actually, we could sometimes do better by using the
2863 align of an element of the innermost array, but no need.) */
2864 if (TREE_CODE (to) == COMPONENT_REF
2865 || TREE_CODE (to) == BIT_FIELD_REF)
2867 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
2871 if (GET_CODE (to_rtx) == MEM)
2873 /* When the offset is zero, to_rtx is the address of the
2874 structure we are storing into, and hence may be shared.
2875 We must make a new MEM before setting the volatile bit. */
2877 to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
2878 MEM_VOLATILE_P (to_rtx) = 1;
2880 #if 0 /* This was turned off because, when a field is volatile
2881 in an object which is not volatile, the object may be in a register,
2882 and then we would abort over here. */
2888 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2890 /* Spurious cast makes HPUX compiler happy. */
2891 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2894 /* Required alignment of containing datum. */
2896 int_size_in_bytes (TREE_TYPE (tem)));
2897 preserve_temp_slots (result);
2901 /* If the value is meaningful, convert RESULT to the proper mode.
2902 Otherwise, return nothing. */
2903 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2904 TYPE_MODE (TREE_TYPE (from)),
2906 TREE_UNSIGNED (TREE_TYPE (to)))
2910 /* If the rhs is a function call and its value is not an aggregate,
2911 call the function before we start to compute the lhs.
2912 This is needed for correct code for cases such as
2913 val = setjmp (buf) on machines where reference to val
2914 requires loading up part of an address in a separate insn.
2916 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2917 a promoted variable where the zero- or sign- extension needs to be done.
2918 Handling this in the normal way is safe because no computation is done
2920 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2921 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
2922 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
2927 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2929 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2931 /* Handle calls that return values in multiple non-contiguous locations.
2932 The Irix 6 ABI has examples of this. */
2933 if (GET_CODE (to_rtx) == PARALLEL)
2934 emit_group_load (to_rtx, value);
2935 else if (GET_MODE (to_rtx) == BLKmode)
2936 emit_block_move (to_rtx, value, expr_size (from),
2937 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
2939 emit_move_insn (to_rtx, value);
2940 preserve_temp_slots (to_rtx);
2943 return want_value ? to_rtx : NULL_RTX;
2946 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2947 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2950 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2952 /* Don't move directly into a return register. */
2953 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2958 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2959 emit_move_insn (to_rtx, temp);
2960 preserve_temp_slots (to_rtx);
2963 return want_value ? to_rtx : NULL_RTX;
2966 /* In case we are returning the contents of an object which overlaps
2967 the place the value is being stored, use a safe function when copying
2968 a value through a pointer into a structure value return block. */
2969 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2970 && current_function_returns_struct
2971 && !current_function_returns_pcc_struct)
2976 size = expr_size (from);
2977 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2979 #ifdef TARGET_MEM_FUNCTIONS
2980 emit_library_call (memcpy_libfunc, 0,
2981 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2982 XEXP (from_rtx, 0), Pmode,
2983 convert_to_mode (TYPE_MODE (sizetype),
2984 size, TREE_UNSIGNED (sizetype)),
2985 TYPE_MODE (sizetype));
2987 emit_library_call (bcopy_libfunc, 0,
2988 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2989 XEXP (to_rtx, 0), Pmode,
2990 convert_to_mode (TYPE_MODE (integer_type_node),
2991 size, TREE_UNSIGNED (integer_type_node)),
2992 TYPE_MODE (integer_type_node));
2995 preserve_temp_slots (to_rtx);
2998 return want_value ? to_rtx : NULL_RTX;
3001 /* Compute FROM and store the value in the rtx we got. */
3004 result = store_expr (from, to_rtx, want_value);
3005 preserve_temp_slots (result);
3008 return want_value ? result : NULL_RTX;
3011 /* Generate code for computing expression EXP,
3012 and storing the value into TARGET.
3013 TARGET may contain a QUEUED rtx.
3015 If WANT_VALUE is nonzero, return a copy of the value
3016 not in TARGET, so that we can be sure to use the proper
3017 value in a containing expression even if TARGET has something
3018 else stored in it. If possible, we copy the value through a pseudo
3019 and return that pseudo. Or, if the value is constant, we try to
3020 return the constant. In some cases, we return a pseudo
3021 copied *from* TARGET.
3023 If the mode is BLKmode then we may return TARGET itself.
3024 It turns out that in BLKmode it doesn't cause a problem.
3025 because C has no operators that could combine two different
3026 assignments into the same BLKmode object with different values
3027 with no sequence point. Will other languages need this to
3030 If WANT_VALUE is 0, we return NULL, to make sure
3031 to catch quickly any cases where the caller uses the value
3032 and fails to set WANT_VALUE. */
3035 store_expr (exp, target, want_value)
3037 register rtx target;
3041 int dont_return_target = 0;
3043 if (TREE_CODE (exp) == COMPOUND_EXPR)
3045 /* Perform first part of compound expression, then assign from second
3047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3049 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3051 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3053 /* For conditional expression, get safe form of the target. Then
3054 test the condition, doing the appropriate assignment on either
3055 side. This avoids the creation of unnecessary temporaries.
3056 For non-BLKmode, it is more efficient not to do this. */
3058 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3061 target = protect_from_queue (target, 1);
3063 do_pending_stack_adjust ();
3065 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3066 store_expr (TREE_OPERAND (exp, 1), target, 0);
3068 emit_jump_insn (gen_jump (lab2));
3071 store_expr (TREE_OPERAND (exp, 2), target, 0);
3075 return want_value ? target : NULL_RTX;
3077 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3078 && GET_MODE (target) != BLKmode)
3079 /* If target is in memory and caller wants value in a register instead,
3080 arrange that. Pass TARGET as target for expand_expr so that,
3081 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3082 We know expand_expr will not use the target in that case.
3083 Don't do this if TARGET is volatile because we are supposed
3084 to write it and then read it. */
3086 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3087 GET_MODE (target), 0);
3088 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3089 temp = copy_to_reg (temp);
3090 dont_return_target = 1;
3092 else if (queued_subexp_p (target))
3093 /* If target contains a postincrement, let's not risk
3094 using it as the place to generate the rhs. */
3096 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3098 /* Expand EXP into a new pseudo. */
3099 temp = gen_reg_rtx (GET_MODE (target));
3100 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3103 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3105 /* If target is volatile, ANSI requires accessing the value
3106 *from* the target, if it is accessed. So make that happen.
3107 In no case return the target itself. */
3108 if (! MEM_VOLATILE_P (target) && want_value)
3109 dont_return_target = 1;
3111 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3112 /* If this is an scalar in a register that is stored in a wider mode
3113 than the declared mode, compute the result into its declared mode
3114 and then convert to the wider mode. Our value is the computed
3117 /* If we don't want a value, we can do the conversion inside EXP,
3118 which will often result in some optimizations. Do the conversion
3119 in two steps: first change the signedness, if needed, then
3123 if (TREE_UNSIGNED (TREE_TYPE (exp))
3124 != SUBREG_PROMOTED_UNSIGNED_P (target))
3127 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3131 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3132 SUBREG_PROMOTED_UNSIGNED_P (target)),
3136 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
3138 /* If TEMP is a volatile MEM and we want a result value, make
3139 the access now so it gets done only once. Likewise if
3140 it contains TARGET. */
3141 if (GET_CODE (temp) == MEM && want_value
3142 && (MEM_VOLATILE_P (temp)
3143 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
3144 temp = copy_to_reg (temp);
3146 /* If TEMP is a VOIDmode constant, use convert_modes to make
3147 sure that we properly convert it. */
3148 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3149 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3150 TYPE_MODE (TREE_TYPE (exp)), temp,
3151 SUBREG_PROMOTED_UNSIGNED_P (target));
3153 convert_move (SUBREG_REG (target), temp,
3154 SUBREG_PROMOTED_UNSIGNED_P (target));
3155 return want_value ? temp : NULL_RTX;
3159 temp = expand_expr (exp, target, GET_MODE (target), 0);
3160 /* Return TARGET if it's a specified hardware register.
3161 If TARGET is a volatile mem ref, either return TARGET
3162 or return a reg copied *from* TARGET; ANSI requires this.
3164 Otherwise, if TEMP is not TARGET, return TEMP
3165 if it is constant (for efficiency),
3166 or if we really want the correct value. */
3167 if (!(target && GET_CODE (target) == REG
3168 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3169 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
3171 && (CONSTANT_P (temp) || want_value))
3172 dont_return_target = 1;
3175 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3176 the same as that of TARGET, adjust the constant. This is needed, for
3177 example, in case it is a CONST_DOUBLE and we want only a word-sized
3179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3180 && TREE_CODE (exp) != ERROR_MARK
3181 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3182 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3183 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3185 /* If value was not generated in the target, store it there.
3186 Convert the value to TARGET's type first if nec. */
3188 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
3190 target = protect_from_queue (target, 1);
3191 if (GET_MODE (temp) != GET_MODE (target)
3192 && GET_MODE (temp) != VOIDmode)
3194 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3195 if (dont_return_target)
3197 /* In this case, we will return TEMP,
3198 so make sure it has the proper mode.
3199 But don't forget to store the value into TARGET. */
3200 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3201 emit_move_insn (target, temp);
3204 convert_move (target, temp, unsignedp);
3207 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3209 /* Handle copying a string constant into an array.
3210 The string constant may be shorter than the array.
3211 So copy just the string's actual length, and clear the rest. */
3215 /* Get the size of the data type of the string,
3216 which is actually the size of the target. */
3217 size = expr_size (exp);
3218 if (GET_CODE (size) == CONST_INT
3219 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3220 emit_block_move (target, temp, size,
3221 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3224 /* Compute the size of the data to copy from the string. */
3226 = size_binop (MIN_EXPR,
3227 make_tree (sizetype, size),
3229 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
3230 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3234 /* Copy that much. */
3235 emit_block_move (target, temp, copy_size_rtx,
3236 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3238 /* Figure out how much is left in TARGET that we have to clear.
3239 Do all calculations in ptr_mode. */
3241 addr = XEXP (target, 0);
3242 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3244 if (GET_CODE (copy_size_rtx) == CONST_INT)
3246 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3247 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
3251 addr = force_reg (ptr_mode, addr);
3252 addr = expand_binop (ptr_mode, add_optab, addr,
3253 copy_size_rtx, NULL_RTX, 0,
3256 size = expand_binop (ptr_mode, sub_optab, size,
3257 copy_size_rtx, NULL_RTX, 0,
3260 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
3261 GET_MODE (size), 0, 0);
3262 label = gen_label_rtx ();
3263 emit_jump_insn (gen_blt (label));
3266 if (size != const0_rtx)
3268 #ifdef TARGET_MEM_FUNCTIONS
3269 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
3271 const0_rtx, TYPE_MODE (integer_type_node),
3272 convert_to_mode (TYPE_MODE (sizetype),
3274 TREE_UNSIGNED (sizetype)),
3275 TYPE_MODE (sizetype));
3277 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
3279 convert_to_mode (TYPE_MODE (integer_type_node),
3281 TREE_UNSIGNED (integer_type_node)),
3282 TYPE_MODE (integer_type_node));
3290 /* Handle calls that return values in multiple non-contiguous locations.
3291 The Irix 6 ABI has examples of this. */
3292 else if (GET_CODE (target) == PARALLEL)
3293 emit_group_load (target, temp);
3294 else if (GET_MODE (temp) == BLKmode)
3295 emit_block_move (target, temp, expr_size (exp),
3296 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3298 emit_move_insn (target, temp);
3301 /* If we don't want a value, return NULL_RTX. */
3305 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3306 ??? The latter test doesn't seem to make sense. */
3307 else if (dont_return_target && GET_CODE (temp) != MEM)
3310 /* Return TARGET itself if it is a hard register. */
3311 else if (want_value && GET_MODE (target) != BLKmode
3312 && ! (GET_CODE (target) == REG
3313 && REGNO (target) < FIRST_PSEUDO_REGISTER))
3314 return copy_to_reg (target);
3320 /* Return 1 if EXP just contains zeros. */
3328 switch (TREE_CODE (exp))
3332 case NON_LVALUE_EXPR:
3333 return is_zeros_p (TREE_OPERAND (exp, 0));
3336 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3340 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3343 return REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst0);
3346 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3347 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
3348 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3349 if (! is_zeros_p (TREE_VALUE (elt)))
3358 /* Return 1 if EXP contains mostly (3/4) zeros. */
3361 mostly_zeros_p (exp)
3364 if (TREE_CODE (exp) == CONSTRUCTOR)
3366 int elts = 0, zeros = 0;
3367 tree elt = CONSTRUCTOR_ELTS (exp);
3368 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3370 /* If there are no ranges of true bits, it is all zero. */
3371 return elt == NULL_TREE;
3373 for (; elt; elt = TREE_CHAIN (elt))
3375 /* We do not handle the case where the index is a RANGE_EXPR,
3376 so the statistic will be somewhat inaccurate.
3377 We do make a more accurate count in store_constructor itself,
3378 so since this function is only used for nested array elements,
3379 this should be close enough. */
3380 if (mostly_zeros_p (TREE_VALUE (elt)))
3385 return 4 * zeros >= 3 * elts;
3388 return is_zeros_p (exp);
3391 /* Helper function for store_constructor.
3392 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3393 TYPE is the type of the CONSTRUCTOR, not the element type.
3394 CLEARED is as for store_constructor.
3396 This provides a recursive shortcut back to store_constructor when it isn't
3397 necessary to go through store_field. This is so that we can pass through
3398 the cleared field to let store_constructor know that we may not have to
3399 clear a substructure if the outer structure has already been cleared. */
3402 store_constructor_field (target, bitsize, bitpos,
3403 mode, exp, type, cleared)
3405 int bitsize, bitpos;
3406 enum machine_mode mode;
3410 if (TREE_CODE (exp) == CONSTRUCTOR
3411 && bitpos % BITS_PER_UNIT == 0
3412 /* If we have a non-zero bitpos for a register target, then we just
3413 let store_field do the bitfield handling. This is unlikely to
3414 generate unnecessary clear instructions anyways. */
3415 && (bitpos == 0 || GET_CODE (target) == MEM))
3418 target = change_address (target, VOIDmode,
3419 plus_constant (XEXP (target, 0),
3420 bitpos / BITS_PER_UNIT));
3421 store_constructor (exp, target, cleared);
3424 store_field (target, bitsize, bitpos, mode, exp,
3425 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3426 int_size_in_bytes (type));
3429 /* Store the value of constructor EXP into the rtx TARGET.
3430 TARGET is either a REG or a MEM.
3431 CLEARED is true if TARGET is known to have been zero'd. */
3434 store_constructor (exp, target, cleared)
3439 tree type = TREE_TYPE (exp);
3441 /* We know our target cannot conflict, since safe_from_p has been called. */
3443 /* Don't try copying piece by piece into a hard register
3444 since that is vulnerable to being clobbered by EXP.
3445 Instead, construct in a pseudo register and then copy it all. */
3446 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3448 rtx temp = gen_reg_rtx (GET_MODE (target));
3449 store_constructor (exp, temp, 0);
3450 emit_move_insn (target, temp);
3455 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3456 || TREE_CODE (type) == QUAL_UNION_TYPE)
3460 /* Inform later passes that the whole union value is dead. */
3461 if (TREE_CODE (type) == UNION_TYPE
3462 || TREE_CODE (type) == QUAL_UNION_TYPE)
3463 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3465 /* If we are building a static constructor into a register,
3466 set the initial value as zero so we can fold the value into
3467 a constant. But if more than one register is involved,
3468 this probably loses. */
3469 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3470 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
3473 emit_move_insn (target, const0_rtx);
3478 /* If the constructor has fewer fields than the structure
3479 or if we are initializing the structure to mostly zeros,
3480 clear the whole structure first. */
3481 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3482 != list_length (TYPE_FIELDS (type)))
3483 || mostly_zeros_p (exp))
3486 clear_storage (target, expr_size (exp),
3487 TYPE_ALIGN (type) / BITS_PER_UNIT);
3492 /* Inform later passes that the old value is dead. */
3493 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3495 /* Store each element of the constructor into
3496 the corresponding field of TARGET. */
3498 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3500 register tree field = TREE_PURPOSE (elt);
3501 register enum machine_mode mode;
3505 tree pos, constant = 0, offset = 0;
3506 rtx to_rtx = target;
3508 /* Just ignore missing fields.
3509 We cleared the whole structure, above,
3510 if any fields are missing. */
3514 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3517 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3518 unsignedp = TREE_UNSIGNED (field);
3519 mode = DECL_MODE (field);
3520 if (DECL_BIT_FIELD (field))
3523 pos = DECL_FIELD_BITPOS (field);
3524 if (TREE_CODE (pos) == INTEGER_CST)
3526 else if (TREE_CODE (pos) == PLUS_EXPR
3527 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3528 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3533 bitpos = TREE_INT_CST_LOW (constant);
3539 if (contains_placeholder_p (offset))
3540 offset = build (WITH_RECORD_EXPR, sizetype,
3543 offset = size_binop (FLOOR_DIV_EXPR, offset,
3544 size_int (BITS_PER_UNIT));
3546 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3547 if (GET_CODE (to_rtx) != MEM)
3551 = change_address (to_rtx, VOIDmode,
3552 gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
3553 force_reg (ptr_mode, offset_rtx)));
3555 if (TREE_READONLY (field))
3557 if (GET_CODE (to_rtx) == MEM)
3558 to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
3560 RTX_UNCHANGING_P (to_rtx) = 1;
3563 store_constructor_field (to_rtx, bitsize, bitpos,
3564 mode, TREE_VALUE (elt), type, cleared);
3567 else if (TREE_CODE (type) == ARRAY_TYPE)
3572 tree domain = TYPE_DOMAIN (type);
3573 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3574 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
3575 tree elttype = TREE_TYPE (type);
3577 /* If the constructor has fewer elements than the array,
3578 clear the whole array first. Similarly if this this is
3579 static constructor of a non-BLKmode object. */
3580 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3584 HOST_WIDE_INT count = 0, zero_count = 0;
3586 /* This loop is a more accurate version of the loop in
3587 mostly_zeros_p (it handles RANGE_EXPR in an index).
3588 It is also needed to check for missing elements. */
3589 for (elt = CONSTRUCTOR_ELTS (exp);
3591 elt = TREE_CHAIN (elt), i++)
3593 tree index = TREE_PURPOSE (elt);
3594 HOST_WIDE_INT this_node_count;
3595 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3597 tree lo_index = TREE_OPERAND (index, 0);
3598 tree hi_index = TREE_OPERAND (index, 1);
3599 if (TREE_CODE (lo_index) != INTEGER_CST
3600 || TREE_CODE (hi_index) != INTEGER_CST)
3605 this_node_count = TREE_INT_CST_LOW (hi_index)
3606 - TREE_INT_CST_LOW (lo_index) + 1;
3609 this_node_count = 1;
3610 count += this_node_count;
3611 if (mostly_zeros_p (TREE_VALUE (elt)))
3612 zero_count += this_node_count;
3614 /* Clear the entire array first if there are any missing elements,
3615 or if the incidence of zero elements is >= 75%. */
3616 if (count < maxelt - minelt + 1
3617 || 4 * zero_count >= 3 * count)
3623 clear_storage (target, expr_size (exp),
3624 TYPE_ALIGN (type) / BITS_PER_UNIT);
3628 /* Inform later passes that the old value is dead. */
3629 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3631 /* Store each element of the constructor into
3632 the corresponding element of TARGET, determined
3633 by counting the elements. */
3634 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3636 elt = TREE_CHAIN (elt), i++)
3638 register enum machine_mode mode;
3642 tree value = TREE_VALUE (elt);
3643 tree index = TREE_PURPOSE (elt);
3644 rtx xtarget = target;
3646 if (cleared && is_zeros_p (value))
3649 mode = TYPE_MODE (elttype);
3650 bitsize = GET_MODE_BITSIZE (mode);
3651 unsignedp = TREE_UNSIGNED (elttype);
3653 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3655 tree lo_index = TREE_OPERAND (index, 0);
3656 tree hi_index = TREE_OPERAND (index, 1);
3657 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3658 struct nesting *loop;
3659 HOST_WIDE_INT lo, hi, count;
3662 /* If the range is constant and "small", unroll the loop. */
3663 if (TREE_CODE (lo_index) == INTEGER_CST
3664 && TREE_CODE (hi_index) == INTEGER_CST
3665 && (lo = TREE_INT_CST_LOW (lo_index),
3666 hi = TREE_INT_CST_LOW (hi_index),
3667 count = hi - lo + 1,
3668 (GET_CODE (target) != MEM
3670 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3671 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3674 lo -= minelt; hi -= minelt;
3675 for (; lo <= hi; lo++)
3677 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3678 store_constructor_field (target, bitsize, bitpos,
3679 mode, value, type, cleared);
3684 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3685 loop_top = gen_label_rtx ();
3686 loop_end = gen_label_rtx ();
3688 unsignedp = TREE_UNSIGNED (domain);
3690 index = build_decl (VAR_DECL, NULL_TREE, domain);
3692 DECL_RTL (index) = index_r
3693 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3696 if (TREE_CODE (value) == SAVE_EXPR
3697 && SAVE_EXPR_RTL (value) == 0)
3699 /* Make sure value gets expanded once before the
3701 expand_expr (value, const0_rtx, VOIDmode, 0);
3704 store_expr (lo_index, index_r, 0);
3705 loop = expand_start_loop (0);
3707 /* Assign value to element index. */
3708 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3709 size_int (BITS_PER_UNIT));
3710 position = size_binop (MULT_EXPR,
3711 size_binop (MINUS_EXPR, index,
3712 TYPE_MIN_VALUE (domain)),
3714 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3715 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3716 xtarget = change_address (target, mode, addr);
3717 if (TREE_CODE (value) == CONSTRUCTOR)
3718 store_constructor (value, xtarget, cleared);
3720 store_expr (value, xtarget, 0);
3722 expand_exit_loop_if_false (loop,
3723 build (LT_EXPR, integer_type_node,
3726 expand_increment (build (PREINCREMENT_EXPR,
3728 index, integer_one_node), 0, 0);
3730 emit_label (loop_end);
3732 /* Needed by stupid register allocation. to extend the
3733 lifetime of pseudo-regs used by target past the end
3735 emit_insn (gen_rtx (USE, GET_MODE (target), target));
3738 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
3739 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
3745 index = size_int (i);
3748 index = size_binop (MINUS_EXPR, index,
3749 TYPE_MIN_VALUE (domain));
3750 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3751 size_int (BITS_PER_UNIT));
3752 position = size_binop (MULT_EXPR, index, position);
3753 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
3754 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
3755 xtarget = change_address (target, mode, addr);
3756 store_expr (value, xtarget, 0);
3761 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
3762 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3764 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3765 store_constructor_field (target, bitsize, bitpos,
3766 mode, value, type, cleared);
3770 /* set constructor assignments */
3771 else if (TREE_CODE (type) == SET_TYPE)
3773 tree elt = CONSTRUCTOR_ELTS (exp);
3774 rtx xtarget = XEXP (target, 0);
3775 int set_word_size = TYPE_ALIGN (type);
3776 int nbytes = int_size_in_bytes (type), nbits;
3777 tree domain = TYPE_DOMAIN (type);
3778 tree domain_min, domain_max, bitlength;
3780 /* The default implementation strategy is to extract the constant
3781 parts of the constructor, use that to initialize the target,
3782 and then "or" in whatever non-constant ranges we need in addition.
3784 If a large set is all zero or all ones, it is
3785 probably better to set it using memset (if available) or bzero.
3786 Also, if a large set has just a single range, it may also be
3787 better to first clear all the first clear the set (using
3788 bzero/memset), and set the bits we want. */
3790 /* Check for all zeros. */
3791 if (elt == NULL_TREE)
3794 clear_storage (target, expr_size (exp),
3795 TYPE_ALIGN (type) / BITS_PER_UNIT);
3799 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3800 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3801 bitlength = size_binop (PLUS_EXPR,
3802 size_binop (MINUS_EXPR, domain_max, domain_min),
3805 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3807 nbits = TREE_INT_CST_LOW (bitlength);
3809 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3810 are "complicated" (more than one range), initialize (the
3811 constant parts) by copying from a constant. */
3812 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3813 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
3815 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3816 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
3817 char *bit_buffer = (char *) alloca (nbits);
3818 HOST_WIDE_INT word = 0;
3821 int offset = 0; /* In bytes from beginning of set. */
3822 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
3825 if (bit_buffer[ibit])
3827 if (BYTES_BIG_ENDIAN)
3828 word |= (1 << (set_word_size - 1 - bit_pos));
3830 word |= 1 << bit_pos;
3833 if (bit_pos >= set_word_size || ibit == nbits)
3835 if (word != 0 || ! cleared)
3837 rtx datum = GEN_INT (word);
3839 /* The assumption here is that it is safe to use
3840 XEXP if the set is multi-word, but not if
3841 it's single-word. */
3842 if (GET_CODE (target) == MEM)
3844 to_rtx = plus_constant (XEXP (target, 0), offset);
3845 to_rtx = change_address (target, mode, to_rtx);
3847 else if (offset == 0)
3851 emit_move_insn (to_rtx, datum);
3857 offset += set_word_size / BITS_PER_UNIT;
3863 /* Don't bother clearing storage if the set is all ones. */
3864 if (TREE_CHAIN (elt) != NULL_TREE
3865 || (TREE_PURPOSE (elt) == NULL_TREE
3867 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3868 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3869 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3870 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3872 clear_storage (target, expr_size (exp),
3873 TYPE_ALIGN (type) / BITS_PER_UNIT);
3876 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
3878 /* start of range of element or NULL */
3879 tree startbit = TREE_PURPOSE (elt);
3880 /* end of range of element, or element value */
3881 tree endbit = TREE_VALUE (elt);
3882 HOST_WIDE_INT startb, endb;
3883 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3885 bitlength_rtx = expand_expr (bitlength,
3886 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3888 /* handle non-range tuple element like [ expr ] */
3889 if (startbit == NULL_TREE)
3891 startbit = save_expr (endbit);
3894 startbit = convert (sizetype, startbit);
3895 endbit = convert (sizetype, endbit);
3896 if (! integer_zerop (domain_min))
3898 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
3899 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
3901 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
3902 EXPAND_CONST_ADDRESS);
3903 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
3904 EXPAND_CONST_ADDRESS);
3908 targetx = assign_stack_temp (GET_MODE (target),
3909 GET_MODE_SIZE (GET_MODE (target)),
3911 emit_move_insn (targetx, target);
3913 else if (GET_CODE (target) == MEM)
3918 #ifdef TARGET_MEM_FUNCTIONS
3919 /* Optimization: If startbit and endbit are
3920 constants divisible by BITS_PER_UNIT,
3921 call memset instead. */
3922 if (TREE_CODE (startbit) == INTEGER_CST
3923 && TREE_CODE (endbit) == INTEGER_CST
3924 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
3925 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
3927 emit_library_call (memset_libfunc, 0,
3929 plus_constant (XEXP (targetx, 0),
3930 startb / BITS_PER_UNIT),
3932 constm1_rtx, TYPE_MODE (integer_type_node),
3933 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3934 TYPE_MODE (sizetype));
3939 emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__setbits"),
3940 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
3941 bitlength_rtx, TYPE_MODE (sizetype),
3942 startbit_rtx, TYPE_MODE (sizetype),
3943 endbit_rtx, TYPE_MODE (sizetype));
3946 emit_move_insn (target, targetx);
3954 /* Store the value of EXP (an expression tree)
3955 into a subfield of TARGET which has mode MODE and occupies
3956 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3957 If MODE is VOIDmode, it means that we are storing into a bit-field.
3959 If VALUE_MODE is VOIDmode, return nothing in particular.
3960 UNSIGNEDP is not used in this case.
3962 Otherwise, return an rtx for the value stored. This rtx
3963 has mode VALUE_MODE if that is convenient to do.
3964 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3966 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3967 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3970 store_field (target, bitsize, bitpos, mode, exp, value_mode,
3971 unsignedp, align, total_size)
3973 int bitsize, bitpos;
3974 enum machine_mode mode;
3976 enum machine_mode value_mode;
3981 HOST_WIDE_INT width_mask = 0;
3983 if (bitsize < HOST_BITS_PER_WIDE_INT)
3984 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
3986 /* If we are storing into an unaligned field of an aligned union that is
3987 in a register, we may have the mode of TARGET being an integer mode but
3988 MODE == BLKmode. In that case, get an aligned object whose size and
3989 alignment are the same as TARGET and store TARGET into it (we can avoid
3990 the store if the field being stored is the entire width of TARGET). Then
3991 call ourselves recursively to store the field into a BLKmode version of
3992 that object. Finally, load from the object into TARGET. This is not
3993 very efficient in general, but should only be slightly more expensive
3994 than the otherwise-required unaligned accesses. Perhaps this can be
3995 cleaned up later. */
3998 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4000 rtx object = assign_stack_temp (GET_MODE (target),
4001 GET_MODE_SIZE (GET_MODE (target)), 0);
4002 rtx blk_object = copy_rtx (object);
4004 MEM_IN_STRUCT_P (object) = 1;
4005 MEM_IN_STRUCT_P (blk_object) = 1;
4006 PUT_MODE (blk_object, BLKmode);
4008 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4009 emit_move_insn (object, target);
4011 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4014 /* Even though we aren't returning target, we need to
4015 give it the updated value. */
4016 emit_move_insn (target, object);
4021 /* If the structure is in a register or if the component
4022 is a bit field, we cannot use addressing to access it.
4023 Use bit-field techniques or SUBREG to store in it. */
4025 if (mode == VOIDmode
4026 || (mode != BLKmode && ! direct_store[(int) mode])
4027 || GET_CODE (target) == REG
4028 || GET_CODE (target) == SUBREG
4029 /* If the field isn't aligned enough to store as an ordinary memref,
4030 store it as a bit field. */
4031 || (SLOW_UNALIGNED_ACCESS
4032 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
4033 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
4035 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4037 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4039 if (mode != VOIDmode && mode != BLKmode
4040 && mode != TYPE_MODE (TREE_TYPE (exp)))
4041 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4043 /* If the modes of TARGET and TEMP are both BLKmode, both
4044 must be in memory and BITPOS must be aligned on a byte
4045 boundary. If so, we simply do a block copy. */
4046 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4048 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4049 || bitpos % BITS_PER_UNIT != 0)
4052 target = change_address (target, VOIDmode,
4053 plus_constant (XEXP (target, 0),
4054 bitpos / BITS_PER_UNIT));
4056 emit_block_move (target, temp,
4057 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4061 return value_mode == VOIDmode ? const0_rtx : target;
4064 /* Store the value in the bitfield. */
4065 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4066 if (value_mode != VOIDmode)
4068 /* The caller wants an rtx for the value. */
4069 /* If possible, avoid refetching from the bitfield itself. */
4071 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
4074 enum machine_mode tmode;
4077 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4078 tmode = GET_MODE (temp);
4079 if (tmode == VOIDmode)
4081 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4082 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4083 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4085 return extract_bit_field (target, bitsize, bitpos, unsignedp,
4086 NULL_RTX, value_mode, 0, align,
4093 rtx addr = XEXP (target, 0);
4096 /* If a value is wanted, it must be the lhs;
4097 so make the address stable for multiple use. */
4099 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4100 && ! CONSTANT_ADDRESS_P (addr)
4101 /* A frame-pointer reference is already stable. */
4102 && ! (GET_CODE (addr) == PLUS
4103 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4104 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4105 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4106 addr = copy_to_reg (addr);
4108 /* Now build a reference to just the desired component. */
4110 to_rtx = change_address (target, mode,
4111 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
4112 MEM_IN_STRUCT_P (to_rtx) = 1;
4114 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4118 /* Return true if any object containing the innermost array is an unaligned
4119 packed structure field. */
4122 get_inner_unaligned_p (exp)
4125 int needed_alignment = TYPE_ALIGN (TREE_TYPE (exp));
4129 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4131 if (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4135 else if (TREE_CODE (exp) != ARRAY_REF
4136 && TREE_CODE (exp) != NON_LVALUE_EXPR
4137 && ! ((TREE_CODE (exp) == NOP_EXPR
4138 || TREE_CODE (exp) == CONVERT_EXPR)
4139 && (TYPE_MODE (TREE_TYPE (exp))
4140 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4143 exp = TREE_OPERAND (exp, 0);
4149 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4150 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
4151 ARRAY_REFs and find the ultimate containing object, which we return.
4153 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4154 bit position, and *PUNSIGNEDP to the signedness of the field.
4155 If the position of the field is variable, we store a tree
4156 giving the variable offset (in units) in *POFFSET.
4157 This offset is in addition to the bit position.
4158 If the position is not variable, we store 0 in *POFFSET.
4160 If any of the extraction expressions is volatile,
4161 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4163 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4164 is a mode that can be used to access the field. In that case, *PBITSIZE
4167 If the field describes a variable-sized object, *PMODE is set to
4168 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
4169 this case, but the address of the object can be found. */
4172 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
4173 punsignedp, pvolatilep)
4178 enum machine_mode *pmode;
4182 tree orig_exp = exp;
4184 enum machine_mode mode = VOIDmode;
4185 tree offset = integer_zero_node;
4187 if (TREE_CODE (exp) == COMPONENT_REF)
4189 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4190 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4191 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4192 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4194 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4196 size_tree = TREE_OPERAND (exp, 1);
4197 *punsignedp = TREE_UNSIGNED (exp);
4201 mode = TYPE_MODE (TREE_TYPE (exp));
4202 *pbitsize = GET_MODE_BITSIZE (mode);
4203 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4208 if (TREE_CODE (size_tree) != INTEGER_CST)
4209 mode = BLKmode, *pbitsize = -1;
4211 *pbitsize = TREE_INT_CST_LOW (size_tree);
4214 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4215 and find the ultimate containing object. */
4221 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
4223 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4224 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4225 : TREE_OPERAND (exp, 2));
4226 tree constant = integer_zero_node, var = pos;
4228 /* If this field hasn't been filled in yet, don't go
4229 past it. This should only happen when folding expressions
4230 made during type construction. */
4234 /* Assume here that the offset is a multiple of a unit.
4235 If not, there should be an explicitly added constant. */
4236 if (TREE_CODE (pos) == PLUS_EXPR
4237 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4238 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
4239 else if (TREE_CODE (pos) == INTEGER_CST)
4240 constant = pos, var = integer_zero_node;
4242 *pbitpos += TREE_INT_CST_LOW (constant);
4243 offset = size_binop (PLUS_EXPR, offset,
4244 size_binop (EXACT_DIV_EXPR, var,
4245 size_int (BITS_PER_UNIT)));
4248 else if (TREE_CODE (exp) == ARRAY_REF)
4250 /* This code is based on the code in case ARRAY_REF in expand_expr
4251 below. We assume here that the size of an array element is
4252 always an integral multiple of BITS_PER_UNIT. */
4254 tree index = TREE_OPERAND (exp, 1);
4255 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4257 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4258 tree index_type = TREE_TYPE (index);
4260 if (! integer_zerop (low_bound))
4261 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4263 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
4265 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4267 index_type = TREE_TYPE (index);
4270 index = fold (build (MULT_EXPR, index_type, index,
4271 TYPE_SIZE (TREE_TYPE (exp))));
4273 if (TREE_CODE (index) == INTEGER_CST
4274 && TREE_INT_CST_HIGH (index) == 0)
4275 *pbitpos += TREE_INT_CST_LOW (index);
4277 offset = size_binop (PLUS_EXPR, offset,
4278 size_binop (FLOOR_DIV_EXPR, index,
4279 size_int (BITS_PER_UNIT)));
4281 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4282 && ! ((TREE_CODE (exp) == NOP_EXPR
4283 || TREE_CODE (exp) == CONVERT_EXPR)
4284 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4285 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4287 && (TYPE_MODE (TREE_TYPE (exp))
4288 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4291 /* If any reference in the chain is volatile, the effect is volatile. */
4292 if (TREE_THIS_VOLATILE (exp))
4294 exp = TREE_OPERAND (exp, 0);
4297 if (integer_zerop (offset))
4300 if (offset != 0 && contains_placeholder_p (offset))
4301 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4308 /* Given an rtx VALUE that may contain additions and multiplications,
4309 return an equivalent value that just refers to a register or memory.
4310 This is done by generating instructions to perform the arithmetic
4311 and returning a pseudo-register containing the value.
4313 The returned value may be a REG, SUBREG, MEM or constant. */
4316 force_operand (value, target)
4319 register optab binoptab = 0;
4320 /* Use a temporary to force order of execution of calls to
4324 /* Use subtarget as the target for operand 0 of a binary operation. */
4325 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4327 if (GET_CODE (value) == PLUS)
4328 binoptab = add_optab;
4329 else if (GET_CODE (value) == MINUS)
4330 binoptab = sub_optab;
4331 else if (GET_CODE (value) == MULT)
4333 op2 = XEXP (value, 1);
4334 if (!CONSTANT_P (op2)
4335 && !(GET_CODE (op2) == REG && op2 != subtarget))
4337 tmp = force_operand (XEXP (value, 0), subtarget);
4338 return expand_mult (GET_MODE (value), tmp,
4339 force_operand (op2, NULL_RTX),
4345 op2 = XEXP (value, 1);
4346 if (!CONSTANT_P (op2)
4347 && !(GET_CODE (op2) == REG && op2 != subtarget))
4349 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4351 binoptab = add_optab;
4352 op2 = negate_rtx (GET_MODE (value), op2);
4355 /* Check for an addition with OP2 a constant integer and our first
4356 operand a PLUS of a virtual register and something else. In that
4357 case, we want to emit the sum of the virtual register and the
4358 constant first and then add the other value. This allows virtual
4359 register instantiation to simply modify the constant rather than
4360 creating another one around this addition. */
4361 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4362 && GET_CODE (XEXP (value, 0)) == PLUS
4363 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4364 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4365 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4367 rtx temp = expand_binop (GET_MODE (value), binoptab,
4368 XEXP (XEXP (value, 0), 0), op2,
4369 subtarget, 0, OPTAB_LIB_WIDEN);
4370 return expand_binop (GET_MODE (value), binoptab, temp,
4371 force_operand (XEXP (XEXP (value, 0), 1), 0),
4372 target, 0, OPTAB_LIB_WIDEN);
4375 tmp = force_operand (XEXP (value, 0), subtarget);
4376 return expand_binop (GET_MODE (value), binoptab, tmp,
4377 force_operand (op2, NULL_RTX),
4378 target, 0, OPTAB_LIB_WIDEN);
4379 /* We give UNSIGNEDP = 0 to expand_binop
4380 because the only operations we are expanding here are signed ones. */
4385 /* Subroutine of expand_expr:
4386 save the non-copied parts (LIST) of an expr (LHS), and return a list
4387 which can restore these values to their previous values,
4388 should something modify their storage. */
4391 save_noncopied_parts (lhs, list)
4398 for (tail = list; tail; tail = TREE_CHAIN (tail))
4399 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4400 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4403 tree part = TREE_VALUE (tail);
4404 tree part_type = TREE_TYPE (part);
4405 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
4406 rtx target = assign_temp (part_type, 0, 1, 1);
4407 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
4408 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
4409 parts = tree_cons (to_be_saved,
4410 build (RTL_EXPR, part_type, NULL_TREE,
4413 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4418 /* Subroutine of expand_expr:
4419 record the non-copied parts (LIST) of an expr (LHS), and return a list
4420 which specifies the initial values of these parts. */
4423 init_noncopied_parts (lhs, list)
4430 for (tail = list; tail; tail = TREE_CHAIN (tail))
4431 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4432 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4435 tree part = TREE_VALUE (tail);
4436 tree part_type = TREE_TYPE (part);
4437 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
4438 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4443 /* Subroutine of expand_expr: return nonzero iff there is no way that
4444 EXP can reference X, which is being modified. */
4447 safe_from_p (x, exp)
4455 /* If EXP has varying size, we MUST use a target since we currently
4456 have no way of allocating temporaries of variable size
4457 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4458 So we assume here that something at a higher level has prevented a
4459 clash. This is somewhat bogus, but the best we can do. Only
4460 do this when X is BLKmode. */
4461 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4462 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
4463 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4464 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4465 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4467 && GET_MODE (x) == BLKmode))
4470 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4471 find the underlying pseudo. */
4472 if (GET_CODE (x) == SUBREG)
4475 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4479 /* If X is a location in the outgoing argument area, it is always safe. */
4480 if (GET_CODE (x) == MEM
4481 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4482 || (GET_CODE (XEXP (x, 0)) == PLUS
4483 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4486 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4489 exp_rtl = DECL_RTL (exp);
4496 if (TREE_CODE (exp) == TREE_LIST)
4497 return ((TREE_VALUE (exp) == 0
4498 || safe_from_p (x, TREE_VALUE (exp)))
4499 && (TREE_CHAIN (exp) == 0
4500 || safe_from_p (x, TREE_CHAIN (exp))));
4505 return safe_from_p (x, TREE_OPERAND (exp, 0));
4509 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4510 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4514 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4515 the expression. If it is set, we conflict iff we are that rtx or
4516 both are in memory. Otherwise, we check all operands of the
4517 expression recursively. */
4519 switch (TREE_CODE (exp))
4522 return (staticp (TREE_OPERAND (exp, 0))
4523 || safe_from_p (x, TREE_OPERAND (exp, 0)));
4526 if (GET_CODE (x) == MEM)
4531 exp_rtl = CALL_EXPR_RTL (exp);
4534 /* Assume that the call will clobber all hard registers and
4536 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4537 || GET_CODE (x) == MEM)
4544 /* If a sequence exists, we would have to scan every instruction
4545 in the sequence to see if it was safe. This is probably not
4547 if (RTL_EXPR_SEQUENCE (exp))
4550 exp_rtl = RTL_EXPR_RTL (exp);
4553 case WITH_CLEANUP_EXPR:
4554 exp_rtl = RTL_EXPR_RTL (exp);
4557 case CLEANUP_POINT_EXPR:
4558 return safe_from_p (x, TREE_OPERAND (exp, 0));
4561 exp_rtl = SAVE_EXPR_RTL (exp);
4565 /* The only operand we look at is operand 1. The rest aren't
4566 part of the expression. */
4567 return safe_from_p (x, TREE_OPERAND (exp, 1));
4569 case METHOD_CALL_EXPR:
4570 /* This takes a rtx argument, but shouldn't appear here. */
4574 /* If we have an rtx, we do not need to scan our operands. */
4578 nops = tree_code_length[(int) TREE_CODE (exp)];
4579 for (i = 0; i < nops; i++)
4580 if (TREE_OPERAND (exp, i) != 0
4581 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4585 /* If we have an rtl, find any enclosed object. Then see if we conflict
4589 if (GET_CODE (exp_rtl) == SUBREG)
4591 exp_rtl = SUBREG_REG (exp_rtl);
4592 if (GET_CODE (exp_rtl) == REG
4593 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4597 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4598 are memory and EXP is not readonly. */
4599 return ! (rtx_equal_p (x, exp_rtl)
4600 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4601 && ! TREE_READONLY (exp)));
4604 /* If we reach here, it is safe. */
4608 /* Subroutine of expand_expr: return nonzero iff EXP is an
4609 expression whose type is statically determinable. */
4615 if (TREE_CODE (exp) == PARM_DECL
4616 || TREE_CODE (exp) == VAR_DECL
4617 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4618 || TREE_CODE (exp) == COMPONENT_REF
4619 || TREE_CODE (exp) == ARRAY_REF)
4624 /* expand_expr: generate code for computing expression EXP.
4625 An rtx for the computed value is returned. The value is never null.
4626 In the case of a void EXP, const0_rtx is returned.
4628 The value may be stored in TARGET if TARGET is nonzero.
4629 TARGET is just a suggestion; callers must assume that
4630 the rtx returned may not be the same as TARGET.
4632 If TARGET is CONST0_RTX, it means that the value will be ignored.
4634 If TMODE is not VOIDmode, it suggests generating the
4635 result in mode TMODE. But this is done only when convenient.
4636 Otherwise, TMODE is ignored and the value generated in its natural mode.
4637 TMODE is just a suggestion; callers must assume that
4638 the rtx returned may not have mode TMODE.
4640 Note that TARGET may have neither TMODE nor MODE. In that case, it
4641 probably will not be used.
4643 If MODIFIER is EXPAND_SUM then when EXP is an addition
4644 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4645 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4646 products as above, or REG or MEM, or constant.
4647 Ordinarily in such cases we would output mul or add instructions
4648 and then return a pseudo reg containing the sum.
4650 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4651 it also marks a label as absolutely required (it can't be dead).
4652 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
4653 This is used for outputting expressions used in initializers.
4655 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4656 with a constant address even if that address is not normally legitimate.
4657 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
4660 expand_expr (exp, target, tmode, modifier)
4663 enum machine_mode tmode;
4664 enum expand_modifier modifier;
4666 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4667 This is static so it will be accessible to our recursive callees. */
4668 static tree placeholder_list = 0;
4669 register rtx op0, op1, temp;
4670 tree type = TREE_TYPE (exp);
4671 int unsignedp = TREE_UNSIGNED (type);
4672 register enum machine_mode mode = TYPE_MODE (type);
4673 register enum tree_code code = TREE_CODE (exp);
4675 /* Use subtarget as the target for operand 0 of a binary operation. */
4676 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4677 rtx original_target = target;
4678 /* Maybe defer this until sure not doing bytecode? */
4679 int ignore = (target == const0_rtx
4680 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4681 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4682 || code == COND_EXPR)
4683 && TREE_CODE (type) == VOID_TYPE));
4687 if (output_bytecode && modifier != EXPAND_INITIALIZER)
4689 bc_expand_expr (exp);
4693 /* Don't use hard regs as subtargets, because the combiner
4694 can only handle pseudo regs. */
4695 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4697 /* Avoid subtargets inside loops,
4698 since they hide some invariant expressions. */
4699 if (preserve_subexpressions_p ())
4702 /* If we are going to ignore this result, we need only do something
4703 if there is a side-effect somewhere in the expression. If there
4704 is, short-circuit the most common cases here. Note that we must
4705 not call expand_expr with anything but const0_rtx in case this
4706 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
4710 if (! TREE_SIDE_EFFECTS (exp))
4713 /* Ensure we reference a volatile object even if value is ignored. */
4714 if (TREE_THIS_VOLATILE (exp)
4715 && TREE_CODE (exp) != FUNCTION_DECL
4716 && mode != VOIDmode && mode != BLKmode)
4718 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
4719 if (GET_CODE (temp) == MEM)
4720 temp = copy_to_reg (temp);
4724 if (TREE_CODE_CLASS (code) == '1')
4725 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4726 VOIDmode, modifier);
4727 else if (TREE_CODE_CLASS (code) == '2'
4728 || TREE_CODE_CLASS (code) == '<')
4730 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4731 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
4734 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4735 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4736 /* If the second operand has no side effects, just evaluate
4738 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
4739 VOIDmode, modifier);
4744 /* If will do cse, generate all results into pseudo registers
4745 since 1) that allows cse to find more things
4746 and 2) otherwise cse could produce an insn the machine
4749 if (! cse_not_expected && mode != BLKmode && target
4750 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4757 tree function = decl_function_context (exp);
4758 /* Handle using a label in a containing function. */
4759 if (function != current_function_decl && function != 0)
4761 struct function *p = find_function_data (function);
4762 /* Allocate in the memory associated with the function
4763 that the label is in. */
4764 push_obstacks (p->function_obstack,
4765 p->function_maybepermanent_obstack);
4767 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4768 label_rtx (exp), p->forced_labels);
4771 else if (modifier == EXPAND_INITIALIZER)
4772 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
4773 label_rtx (exp), forced_labels);
4774 temp = gen_rtx (MEM, FUNCTION_MODE,
4775 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
4776 if (function != current_function_decl && function != 0)
4777 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4782 if (DECL_RTL (exp) == 0)
4784 error_with_decl (exp, "prior parameter's size depends on `%s'");
4785 return CONST0_RTX (mode);
4788 /* ... fall through ... */
4791 /* If a static var's type was incomplete when the decl was written,
4792 but the type is complete now, lay out the decl now. */
4793 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4794 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4796 push_obstacks_nochange ();
4797 end_temporary_allocation ();
4798 layout_decl (exp, 0);
4799 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4803 /* ... fall through ... */
4807 if (DECL_RTL (exp) == 0)
4810 /* Ensure variable marked as used even if it doesn't go through
4811 a parser. If it hasn't be used yet, write out an external
4813 if (! TREE_USED (exp))
4815 assemble_external (exp);
4816 TREE_USED (exp) = 1;
4819 /* Show we haven't gotten RTL for this yet. */
4822 /* Handle variables inherited from containing functions. */
4823 context = decl_function_context (exp);
4825 /* We treat inline_function_decl as an alias for the current function
4826 because that is the inline function whose vars, types, etc.
4827 are being merged into the current function.
4828 See expand_inline_function. */
4830 if (context != 0 && context != current_function_decl
4831 && context != inline_function_decl
4832 /* If var is static, we don't need a static chain to access it. */
4833 && ! (GET_CODE (DECL_RTL (exp)) == MEM
4834 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
4838 /* Mark as non-local and addressable. */
4839 DECL_NONLOCAL (exp) = 1;
4840 if (DECL_NO_STATIC_CHAIN (current_function_decl))
4842 mark_addressable (exp);
4843 if (GET_CODE (DECL_RTL (exp)) != MEM)
4845 addr = XEXP (DECL_RTL (exp), 0);
4846 if (GET_CODE (addr) == MEM)
4847 addr = gen_rtx (MEM, Pmode,
4848 fix_lexical_addr (XEXP (addr, 0), exp));
4850 addr = fix_lexical_addr (addr, exp);
4851 temp = change_address (DECL_RTL (exp), mode, addr);
4854 /* This is the case of an array whose size is to be determined
4855 from its initializer, while the initializer is still being parsed.
4858 else if (GET_CODE (DECL_RTL (exp)) == MEM
4859 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
4860 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
4861 XEXP (DECL_RTL (exp), 0));
4863 /* If DECL_RTL is memory, we are in the normal case and either
4864 the address is not valid or it is not a register and -fforce-addr
4865 is specified, get the address into a register. */
4867 else if (GET_CODE (DECL_RTL (exp)) == MEM
4868 && modifier != EXPAND_CONST_ADDRESS
4869 && modifier != EXPAND_SUM
4870 && modifier != EXPAND_INITIALIZER
4871 && (! memory_address_p (DECL_MODE (exp),
4872 XEXP (DECL_RTL (exp), 0))
4874 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
4875 temp = change_address (DECL_RTL (exp), VOIDmode,
4876 copy_rtx (XEXP (DECL_RTL (exp), 0)));
4878 /* If we got something, return it. But first, set the alignment
4879 the address is a register. */
4882 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
4883 mark_reg_pointer (XEXP (temp, 0),
4884 DECL_ALIGN (exp) / BITS_PER_UNIT);
4889 /* If the mode of DECL_RTL does not match that of the decl, it
4890 must be a promoted value. We return a SUBREG of the wanted mode,
4891 but mark it so that we know that it was already extended. */
4893 if (GET_CODE (DECL_RTL (exp)) == REG
4894 && GET_MODE (DECL_RTL (exp)) != mode)
4896 /* Get the signedness used for this variable. Ensure we get the
4897 same mode we got when the variable was declared. */
4898 if (GET_MODE (DECL_RTL (exp))
4899 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
4902 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
4903 SUBREG_PROMOTED_VAR_P (temp) = 1;
4904 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4908 return DECL_RTL (exp);
4911 return immed_double_const (TREE_INT_CST_LOW (exp),
4912 TREE_INT_CST_HIGH (exp),
4916 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
4919 /* If optimized, generate immediate CONST_DOUBLE
4920 which will be turned into memory by reload if necessary.
4922 We used to force a register so that loop.c could see it. But
4923 this does not allow gen_* patterns to perform optimizations with
4924 the constants. It also produces two insns in cases like "x = 1.0;".
4925 On most machines, floating-point constants are not permitted in
4926 many insns, so we'd end up copying it to a register in any case.
4928 Now, we do the copying in expand_binop, if appropriate. */
4929 return immed_real_const (exp);
4933 if (! TREE_CST_RTL (exp))
4934 output_constant_def (exp);
4936 /* TREE_CST_RTL probably contains a constant address.
4937 On RISC machines where a constant address isn't valid,
4938 make some insns to get that address into a register. */
4939 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4940 && modifier != EXPAND_CONST_ADDRESS
4941 && modifier != EXPAND_INITIALIZER
4942 && modifier != EXPAND_SUM
4943 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
4945 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
4946 return change_address (TREE_CST_RTL (exp), VOIDmode,
4947 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4948 return TREE_CST_RTL (exp);
4951 context = decl_function_context (exp);
4953 /* We treat inline_function_decl as an alias for the current function
4954 because that is the inline function whose vars, types, etc.
4955 are being merged into the current function.
4956 See expand_inline_function. */
4957 if (context == current_function_decl || context == inline_function_decl)
4960 /* If this is non-local, handle it. */
4963 temp = SAVE_EXPR_RTL (exp);
4964 if (temp && GET_CODE (temp) == REG)
4966 put_var_into_stack (exp);
4967 temp = SAVE_EXPR_RTL (exp);
4969 if (temp == 0 || GET_CODE (temp) != MEM)
4971 return change_address (temp, mode,
4972 fix_lexical_addr (XEXP (temp, 0), exp));
4974 if (SAVE_EXPR_RTL (exp) == 0)
4976 if (mode == VOIDmode)
4979 temp = assign_temp (type, 0, 0, 0);
4981 SAVE_EXPR_RTL (exp) = temp;
4982 if (!optimize && GET_CODE (temp) == REG)
4983 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4986 /* If the mode of TEMP does not match that of the expression, it
4987 must be a promoted value. We pass store_expr a SUBREG of the
4988 wanted mode but mark it so that we know that it was already
4989 extended. Note that `unsignedp' was modified above in
4992 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
4994 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4995 SUBREG_PROMOTED_VAR_P (temp) = 1;
4996 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4999 if (temp == const0_rtx)
5000 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5002 store_expr (TREE_OPERAND (exp, 0), temp, 0);
5005 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5006 must be a promoted value. We return a SUBREG of the wanted mode,
5007 but mark it so that we know that it was already extended. */
5009 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5010 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5012 /* Compute the signedness and make the proper SUBREG. */
5013 promote_mode (type, mode, &unsignedp, 0);
5014 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
5015 SUBREG_PROMOTED_VAR_P (temp) = 1;
5016 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5020 return SAVE_EXPR_RTL (exp);
5025 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5026 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5030 case PLACEHOLDER_EXPR:
5031 /* If there is an object on the head of the placeholder list,
5032 see if some object in it's references is of type TYPE. For
5033 further information, see tree.def. */
5034 if (placeholder_list)
5037 tree old_list = placeholder_list;
5039 for (object = TREE_PURPOSE (placeholder_list);
5040 (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5041 != TYPE_MAIN_VARIANT (type))
5042 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
5043 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
5044 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
5045 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
5046 object = TREE_OPERAND (object, 0))
5050 && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
5051 == TYPE_MAIN_VARIANT (type)))
5053 /* Expand this object skipping the list entries before
5054 it was found in case it is also a PLACEHOLDER_EXPR.
5055 In that case, we want to translate it using subsequent
5057 placeholder_list = TREE_CHAIN (placeholder_list);
5058 temp = expand_expr (object, original_target, tmode, modifier);
5059 placeholder_list = old_list;
5064 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5067 case WITH_RECORD_EXPR:
5068 /* Put the object on the placeholder list, expand our first operand,
5069 and pop the list. */
5070 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5072 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
5074 placeholder_list = TREE_CHAIN (placeholder_list);
5078 expand_exit_loop_if_false (NULL_PTR,
5079 invert_truthvalue (TREE_OPERAND (exp, 0)));
5084 expand_start_loop (1);
5085 expand_expr_stmt (TREE_OPERAND (exp, 0));
5093 tree vars = TREE_OPERAND (exp, 0);
5094 int vars_need_expansion = 0;
5096 /* Need to open a binding contour here because
5097 if there are any cleanups they most be contained here. */
5098 expand_start_bindings (0);
5100 /* Mark the corresponding BLOCK for output in its proper place. */
5101 if (TREE_OPERAND (exp, 2) != 0
5102 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5103 insert_block (TREE_OPERAND (exp, 2));
5105 /* If VARS have not yet been expanded, expand them now. */
5108 if (DECL_RTL (vars) == 0)
5110 vars_need_expansion = 1;
5113 expand_decl_init (vars);
5114 vars = TREE_CHAIN (vars);
5117 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
5119 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5125 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5127 emit_insns (RTL_EXPR_SEQUENCE (exp));
5128 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5129 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
5130 free_temps_for_rtl_expr (exp);
5131 return RTL_EXPR_RTL (exp);
5134 /* If we don't need the result, just ensure we evaluate any
5139 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5140 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
5144 /* All elts simple constants => refer to a constant in memory. But
5145 if this is a non-BLKmode mode, let it store a field at a time
5146 since that should make a CONST_INT or CONST_DOUBLE when we
5147 fold. Likewise, if we have a target we can use, it is best to
5148 store directly into the target unless the type is large enough
5149 that memcpy will be used. If we are making an initializer and
5150 all operands are constant, put it in memory as well. */
5151 else if ((TREE_STATIC (exp)
5152 && ((mode == BLKmode
5153 && ! (target != 0 && safe_from_p (target, exp)))
5154 || TREE_ADDRESSABLE (exp)
5155 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5156 && (move_by_pieces_ninsns
5157 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5158 TYPE_ALIGN (type) / BITS_PER_UNIT)
5160 && ! mostly_zeros_p (exp))))
5161 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
5163 rtx constructor = output_constant_def (exp);
5164 if (modifier != EXPAND_CONST_ADDRESS
5165 && modifier != EXPAND_INITIALIZER
5166 && modifier != EXPAND_SUM
5167 && (! memory_address_p (GET_MODE (constructor),
5168 XEXP (constructor, 0))
5170 && GET_CODE (XEXP (constructor, 0)) != REG)))
5171 constructor = change_address (constructor, VOIDmode,
5172 XEXP (constructor, 0));
5178 if (target == 0 || ! safe_from_p (target, exp))
5180 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5181 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5183 target = assign_temp (type, 0, 1, 1);
5186 if (TREE_READONLY (exp))
5188 if (GET_CODE (target) == MEM)
5189 target = change_address (target, GET_MODE (target),
5191 RTX_UNCHANGING_P (target) = 1;
5194 store_constructor (exp, target, 0);
5200 tree exp1 = TREE_OPERAND (exp, 0);
5203 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5204 op0 = memory_address (mode, op0);
5206 temp = gen_rtx (MEM, mode, op0);
5207 /* If address was computed by addition,
5208 mark this as an element of an aggregate. */
5209 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5210 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5211 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
5212 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
5213 || (TREE_CODE (exp1) == ADDR_EXPR
5214 && (exp2 = TREE_OPERAND (exp1, 0))
5215 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
5216 MEM_IN_STRUCT_P (temp) = 1;
5217 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
5219 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5220 here, because, in C and C++, the fact that a location is accessed
5221 through a pointer to const does not mean that the value there can
5222 never change. Languages where it can never change should
5223 also set TREE_STATIC. */
5224 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
5229 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5233 tree array = TREE_OPERAND (exp, 0);
5234 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5235 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5236 tree index = TREE_OPERAND (exp, 1);
5237 tree index_type = TREE_TYPE (index);
5240 if (TREE_CODE (low_bound) != INTEGER_CST
5241 && contains_placeholder_p (low_bound))
5242 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
5244 /* Optimize the special-case of a zero lower bound.
5246 We convert the low_bound to sizetype to avoid some problems
5247 with constant folding. (E.g. suppose the lower bound is 1,
5248 and its mode is QI. Without the conversion, (ARRAY
5249 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5250 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5252 But sizetype isn't quite right either (especially if
5253 the lowbound is negative). FIXME */
5255 if (! integer_zerop (low_bound))
5256 index = fold (build (MINUS_EXPR, index_type, index,
5257 convert (sizetype, low_bound)));
5259 if ((TREE_CODE (index) != INTEGER_CST
5260 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5261 && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
5263 /* Nonconstant array index or nonconstant element size, and
5264 not an array in an unaligned (packed) structure field.
5265 Generate the tree for *(&array+index) and expand that,
5266 except do it in a language-independent way
5267 and don't complain about non-lvalue arrays.
5268 `mark_addressable' should already have been called
5269 for any array for which this case will be reached. */
5271 /* Don't forget the const or volatile flag from the array
5273 tree variant_type = build_type_variant (type,
5274 TREE_READONLY (exp),
5275 TREE_THIS_VOLATILE (exp));
5276 tree array_adr = build1 (ADDR_EXPR,
5277 build_pointer_type (variant_type), array);
5279 tree size = size_in_bytes (type);
5281 /* Convert the integer argument to a type the same size as sizetype
5282 so the multiply won't overflow spuriously. */
5283 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
5284 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5287 if (TREE_CODE (size) != INTEGER_CST
5288 && contains_placeholder_p (size))
5289 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
5291 /* Don't think the address has side effects
5292 just because the array does.
5293 (In some cases the address might have side effects,
5294 and we fail to record that fact here. However, it should not
5295 matter, since expand_expr should not care.) */
5296 TREE_SIDE_EFFECTS (array_adr) = 0;
5300 (INDIRECT_REF, type,
5301 fold (build (PLUS_EXPR,
5302 TYPE_POINTER_TO (variant_type),
5307 TYPE_POINTER_TO (variant_type),
5308 fold (build (MULT_EXPR, TREE_TYPE (index),
5310 convert (TREE_TYPE (index),
5313 /* Volatility, etc., of new expression is same as old
5315 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
5316 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
5317 TREE_READONLY (elt) = TREE_READONLY (exp);
5319 return expand_expr (elt, target, tmode, modifier);
5322 /* Fold an expression like: "foo"[2].
5323 This is not done in fold so it won't happen inside &.
5324 Don't fold if this is for wide characters since it's too
5325 difficult to do correctly and this is a very rare case. */
5327 if (TREE_CODE (array) == STRING_CST
5328 && TREE_CODE (index) == INTEGER_CST
5329 && !TREE_INT_CST_HIGH (index)
5330 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
5331 && GET_MODE_CLASS (mode) == MODE_INT
5332 && GET_MODE_SIZE (mode) == 1)
5333 return GEN_INT (TREE_STRING_POINTER (array)[i]);
5335 /* If this is a constant index into a constant array,
5336 just get the value from the array. Handle both the cases when
5337 we have an explicit constructor and when our operand is a variable
5338 that was declared const. */
5340 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5342 if (TREE_CODE (index) == INTEGER_CST
5343 && TREE_INT_CST_HIGH (index) == 0)
5345 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5347 i = TREE_INT_CST_LOW (index);
5349 elem = TREE_CHAIN (elem);
5351 return expand_expr (fold (TREE_VALUE (elem)), target,
5356 else if (optimize >= 1
5357 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5358 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5359 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5361 if (TREE_CODE (index) == INTEGER_CST
5362 && TREE_INT_CST_HIGH (index) == 0)
5364 tree init = DECL_INITIAL (array);
5366 i = TREE_INT_CST_LOW (index);
5367 if (TREE_CODE (init) == CONSTRUCTOR)
5369 tree elem = CONSTRUCTOR_ELTS (init);
5372 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
5373 elem = TREE_CHAIN (elem);
5375 return expand_expr (fold (TREE_VALUE (elem)), target,
5378 else if (TREE_CODE (init) == STRING_CST
5379 && i < TREE_STRING_LENGTH (init))
5380 return GEN_INT (TREE_STRING_POINTER (init)[i]);
5385 /* Treat array-ref with constant index as a component-ref. */
5389 /* If the operand is a CONSTRUCTOR, we can just extract the
5390 appropriate field if it is present. Don't do this if we have
5391 already written the data since we want to refer to that copy
5392 and varasm.c assumes that's what we'll do. */
5393 if (code != ARRAY_REF
5394 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5395 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
5399 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5400 elt = TREE_CHAIN (elt))
5401 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
5402 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5406 enum machine_mode mode1;
5411 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5412 &mode1, &unsignedp, &volatilep);
5415 /* If we got back the original object, something is wrong. Perhaps
5416 we are evaluating an expression too early. In any event, don't
5417 infinitely recurse. */
5421 /* If TEM's type is a union of variable size, pass TARGET to the inner
5422 computation, since it will need a temporary and TARGET is known
5423 to have to do. This occurs in unchecked conversion in Ada. */
5425 op0 = expand_expr (tem,
5426 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5427 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5429 ? target : NULL_RTX),
5431 modifier == EXPAND_INITIALIZER ? modifier : 0);
5433 /* If this is a constant, put it into a register if it is a
5434 legitimate constant and memory if it isn't. */
5435 if (CONSTANT_P (op0))
5437 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
5438 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
5439 op0 = force_reg (mode, op0);
5441 op0 = validize_mem (force_const_mem (mode, op0));
5444 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
5447 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5449 if (GET_CODE (op0) != MEM)
5451 op0 = change_address (op0, VOIDmode,
5452 gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
5453 force_reg (ptr_mode, offset_rtx)));
5454 /* If we have a variable offset, the known alignment
5455 is only that of the innermost structure containing the field.
5456 (Actually, we could sometimes do better by using the
5457 size of an element of the innermost array, but no need.) */
5458 if (TREE_CODE (exp) == COMPONENT_REF
5459 || TREE_CODE (exp) == BIT_FIELD_REF)
5460 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5464 /* Don't forget about volatility even if this is a bitfield. */
5465 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5467 op0 = copy_rtx (op0);
5468 MEM_VOLATILE_P (op0) = 1;
5471 /* In cases where an aligned union has an unaligned object
5472 as a field, we might be extracting a BLKmode value from
5473 an integer-mode (e.g., SImode) object. Handle this case
5474 by doing the extract into an object as wide as the field
5475 (which we know to be the width of a basic mode), then
5476 storing into memory, and changing the mode to BLKmode.
5477 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5478 EXPAND_INITIALIZER), then we must not copy to a temporary. */
5479 if (mode1 == VOIDmode
5480 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5481 || (modifier != EXPAND_CONST_ADDRESS
5482 && modifier != EXPAND_INITIALIZER
5483 && ((mode1 != BLKmode && ! direct_load[(int) mode1])
5484 /* If the field isn't aligned enough to fetch as a memref,
5485 fetch it as a bit field. */
5486 || (SLOW_UNALIGNED_ACCESS
5487 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5488 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
5490 enum machine_mode ext_mode = mode;
5492 if (ext_mode == BLKmode)
5493 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5495 if (ext_mode == BLKmode)
5497 /* In this case, BITPOS must start at a byte boundary and
5498 TARGET, if specified, must be a MEM. */
5499 if (GET_CODE (op0) != MEM
5500 || (target != 0 && GET_CODE (target) != MEM)
5501 || bitpos % BITS_PER_UNIT != 0)
5504 op0 = change_address (op0, VOIDmode,
5505 plus_constant (XEXP (op0, 0),
5506 bitpos / BITS_PER_UNIT));
5508 target = assign_temp (type, 0, 1, 1);
5510 emit_block_move (target, op0,
5511 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5518 op0 = validize_mem (op0);
5520 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5521 mark_reg_pointer (XEXP (op0, 0), alignment);
5523 op0 = extract_bit_field (op0, bitsize, bitpos,
5524 unsignedp, target, ext_mode, ext_mode,
5526 int_size_in_bytes (TREE_TYPE (tem)));
5527 if (mode == BLKmode)
5529 rtx new = assign_stack_temp (ext_mode,
5530 bitsize / BITS_PER_UNIT, 0);
5532 emit_move_insn (new, op0);
5533 op0 = copy_rtx (new);
5534 PUT_MODE (op0, BLKmode);
5535 MEM_IN_STRUCT_P (op0) = 1;
5541 /* If the result is BLKmode, use that to access the object
5543 if (mode == BLKmode)
5546 /* Get a reference to just this component. */
5547 if (modifier == EXPAND_CONST_ADDRESS
5548 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5549 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
5550 (bitpos / BITS_PER_UNIT)));
5552 op0 = change_address (op0, mode1,
5553 plus_constant (XEXP (op0, 0),
5554 (bitpos / BITS_PER_UNIT)));
5555 if (GET_CODE (XEXP (op0, 0)) == REG)
5556 mark_reg_pointer (XEXP (op0, 0), alignment);
5558 MEM_IN_STRUCT_P (op0) = 1;
5559 MEM_VOLATILE_P (op0) |= volatilep;
5560 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
5563 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5564 convert_move (target, op0, unsignedp);
5568 /* Intended for a reference to a buffer of a file-object in Pascal.
5569 But it's not certain that a special tree code will really be
5570 necessary for these. INDIRECT_REF might work for them. */
5576 /* Pascal set IN expression.
5579 rlo = set_low - (set_low%bits_per_word);
5580 the_word = set [ (index - rlo)/bits_per_word ];
5581 bit_index = index % bits_per_word;
5582 bitmask = 1 << bit_index;
5583 return !!(the_word & bitmask); */
5585 tree set = TREE_OPERAND (exp, 0);
5586 tree index = TREE_OPERAND (exp, 1);
5587 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
5588 tree set_type = TREE_TYPE (set);
5589 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5590 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
5591 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5592 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5593 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5594 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5595 rtx setaddr = XEXP (setval, 0);
5596 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
5598 rtx diff, quo, rem, addr, bit, result;
5600 preexpand_calls (exp);
5602 /* If domain is empty, answer is no. Likewise if index is constant
5603 and out of bounds. */
5604 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5605 && TREE_CODE (set_low_bound) == INTEGER_CST
5606 && tree_int_cst_lt (set_high_bound, set_low_bound)
5607 || (TREE_CODE (index) == INTEGER_CST
5608 && TREE_CODE (set_low_bound) == INTEGER_CST
5609 && tree_int_cst_lt (index, set_low_bound))
5610 || (TREE_CODE (set_high_bound) == INTEGER_CST
5611 && TREE_CODE (index) == INTEGER_CST
5612 && tree_int_cst_lt (set_high_bound, index))))
5616 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5618 /* If we get here, we have to generate the code for both cases
5619 (in range and out of range). */
5621 op0 = gen_label_rtx ();
5622 op1 = gen_label_rtx ();
5624 if (! (GET_CODE (index_val) == CONST_INT
5625 && GET_CODE (lo_r) == CONST_INT))
5627 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
5628 GET_MODE (index_val), iunsignedp, 0);
5629 emit_jump_insn (gen_blt (op1));
5632 if (! (GET_CODE (index_val) == CONST_INT
5633 && GET_CODE (hi_r) == CONST_INT))
5635 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
5636 GET_MODE (index_val), iunsignedp, 0);
5637 emit_jump_insn (gen_bgt (op1));
5640 /* Calculate the element number of bit zero in the first word
5642 if (GET_CODE (lo_r) == CONST_INT)
5643 rlow = GEN_INT (INTVAL (lo_r)
5644 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
5646 rlow = expand_binop (index_mode, and_optab, lo_r,
5647 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
5648 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5650 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5651 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
5653 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
5654 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5655 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
5656 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5658 addr = memory_address (byte_mode,
5659 expand_binop (index_mode, add_optab, diff,
5660 setaddr, NULL_RTX, iunsignedp,
5663 /* Extract the bit we want to examine */
5664 bit = expand_shift (RSHIFT_EXPR, byte_mode,
5665 gen_rtx (MEM, byte_mode, addr),
5666 make_tree (TREE_TYPE (index), rem),
5668 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5669 GET_MODE (target) == byte_mode ? target : 0,
5670 1, OPTAB_LIB_WIDEN);
5672 if (result != target)
5673 convert_move (target, result, 1);
5675 /* Output the code to handle the out-of-range case. */
5678 emit_move_insn (target, const0_rtx);
5683 case WITH_CLEANUP_EXPR:
5684 if (RTL_EXPR_RTL (exp) == 0)
5687 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5689 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
5690 /* That's it for this cleanup. */
5691 TREE_OPERAND (exp, 2) = 0;
5692 expand_eh_region_start ();
5694 return RTL_EXPR_RTL (exp);
5696 case CLEANUP_POINT_EXPR:
5698 extern int temp_slot_level;
5699 tree old_cleanups = cleanups_this_call;
5700 int old_temp_level = target_temp_slot_level;
5702 target_temp_slot_level = temp_slot_level;
5703 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5704 /* If we're going to use this value, load it up now. */
5706 op0 = force_not_mem (op0);
5707 expand_cleanups_to (old_cleanups);
5708 preserve_temp_slots (op0);
5711 target_temp_slot_level = old_temp_level;
5716 /* Check for a built-in function. */
5717 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5718 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5720 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5721 return expand_builtin (exp, target, subtarget, tmode, ignore);
5723 /* If this call was expanded already by preexpand_calls,
5724 just return the result we got. */
5725 if (CALL_EXPR_RTL (exp) != 0)
5726 return CALL_EXPR_RTL (exp);
5728 return expand_call (exp, target, ignore);
5730 case NON_LVALUE_EXPR:
5733 case REFERENCE_EXPR:
5734 if (TREE_CODE (type) == UNION_TYPE)
5736 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
5739 if (mode != BLKmode)
5740 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5742 target = assign_temp (type, 0, 1, 1);
5745 if (GET_CODE (target) == MEM)
5746 /* Store data into beginning of memory target. */
5747 store_expr (TREE_OPERAND (exp, 0),
5748 change_address (target, TYPE_MODE (valtype), 0), 0);
5750 else if (GET_CODE (target) == REG)
5751 /* Store this field into a union of the proper type. */
5752 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
5753 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
5755 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
5759 /* Return the entire union. */
5763 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5765 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
5768 /* If the signedness of the conversion differs and OP0 is
5769 a promoted SUBREG, clear that indication since we now
5770 have to do the proper extension. */
5771 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
5772 && GET_CODE (op0) == SUBREG)
5773 SUBREG_PROMOTED_VAR_P (op0) = 0;
5778 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
5779 if (GET_MODE (op0) == mode)
5782 /* If OP0 is a constant, just convert it into the proper mode. */
5783 if (CONSTANT_P (op0))
5785 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5786 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5788 if (modifier == EXPAND_INITIALIZER)
5789 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
5793 convert_to_mode (mode, op0,
5794 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5796 convert_move (target, op0,
5797 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5801 /* We come here from MINUS_EXPR when the second operand is a
5804 this_optab = add_optab;
5806 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
5807 something else, make sure we add the register to the constant and
5808 then to the other thing. This case can occur during strength
5809 reduction and doing it this way will produce better code if the
5810 frame pointer or argument pointer is eliminated.
5812 fold-const.c will ensure that the constant is always in the inner
5813 PLUS_EXPR, so the only case we need to do anything about is if
5814 sp, ap, or fp is our second argument, in which case we must swap
5815 the innermost first argument and our second argument. */
5817 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5818 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
5819 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
5820 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
5821 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
5822 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
5824 tree t = TREE_OPERAND (exp, 1);
5826 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5827 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
5830 /* If the result is to be ptr_mode and we are adding an integer to
5831 something, we might be forming a constant. So try to use
5832 plus_constant. If it produces a sum and we can't accept it,
5833 use force_operand. This allows P = &ARR[const] to generate
5834 efficient code on machines where a SYMBOL_REF is not a valid
5837 If this is an EXPAND_SUM call, always return the sum. */
5838 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
5839 || mode == ptr_mode)
5841 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
5842 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5843 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
5845 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
5847 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
5848 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5849 op1 = force_operand (op1, target);
5853 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
5854 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
5855 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
5857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
5859 if (! CONSTANT_P (op0))
5861 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5862 VOIDmode, modifier);
5863 /* Don't go to both_summands if modifier
5864 says it's not right to return a PLUS. */
5865 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5869 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
5870 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5871 op0 = force_operand (op0, target);
5876 /* No sense saving up arithmetic to be done
5877 if it's all in the wrong mode to form part of an address.
5878 And force_operand won't know whether to sign-extend or
5880 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
5881 || mode != ptr_mode)
5884 preexpand_calls (exp);
5885 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5888 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
5889 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
5892 /* Make sure any term that's a sum with a constant comes last. */
5893 if (GET_CODE (op0) == PLUS
5894 && CONSTANT_P (XEXP (op0, 1)))
5900 /* If adding to a sum including a constant,
5901 associate it to put the constant outside. */
5902 if (GET_CODE (op1) == PLUS
5903 && CONSTANT_P (XEXP (op1, 1)))
5905 rtx constant_term = const0_rtx;
5907 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
5910 /* Ensure that MULT comes first if there is one. */
5911 else if (GET_CODE (op0) == MULT)
5912 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
5914 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
5916 /* Let's also eliminate constants from op0 if possible. */
5917 op0 = eliminate_constant_term (op0, &constant_term);
5919 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
5920 their sum should be a constant. Form it into OP1, since the
5921 result we want will then be OP0 + OP1. */
5923 temp = simplify_binary_operation (PLUS, mode, constant_term,
5928 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
5931 /* Put a constant term last and put a multiplication first. */
5932 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
5933 temp = op1, op1 = op0, op0 = temp;
5935 temp = simplify_binary_operation (PLUS, mode, op0, op1);
5936 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
5939 /* For initializers, we are allowed to return a MINUS of two
5940 symbolic constants. Here we handle all cases when both operands
5942 /* Handle difference of two symbolic constants,
5943 for the sake of an initializer. */
5944 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5945 && really_constant_p (TREE_OPERAND (exp, 0))
5946 && really_constant_p (TREE_OPERAND (exp, 1)))
5948 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
5949 VOIDmode, modifier);
5950 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
5951 VOIDmode, modifier);
5953 /* If the last operand is a CONST_INT, use plus_constant of
5954 the negated constant. Else make the MINUS. */
5955 if (GET_CODE (op1) == CONST_INT)
5956 return plus_constant (op0, - INTVAL (op1));
5958 return gen_rtx (MINUS, mode, op0, op1);
5960 /* Convert A - const to A + (-const). */
5961 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
5963 tree negated = fold (build1 (NEGATE_EXPR, type,
5964 TREE_OPERAND (exp, 1)));
5966 /* Deal with the case where we can't negate the constant
5968 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
5970 tree newtype = signed_type (type);
5971 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
5972 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
5973 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
5975 if (! TREE_OVERFLOW (newneg))
5976 return expand_expr (convert (type,
5977 build (PLUS_EXPR, newtype,
5979 target, tmode, modifier);
5983 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
5987 this_optab = sub_optab;
5991 preexpand_calls (exp);
5992 /* If first operand is constant, swap them.
5993 Thus the following special case checks need only
5994 check the second operand. */
5995 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5997 register tree t1 = TREE_OPERAND (exp, 0);
5998 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
5999 TREE_OPERAND (exp, 1) = t1;
6002 /* Attempt to return something suitable for generating an
6003 indexed address, for machines that support that. */
6005 if (modifier == EXPAND_SUM && mode == ptr_mode
6006 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6007 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6009 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
6011 /* Apply distributive law if OP0 is x+c. */
6012 if (GET_CODE (op0) == PLUS
6013 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
6014 return gen_rtx (PLUS, mode,
6015 gen_rtx (MULT, mode, XEXP (op0, 0),
6016 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
6017 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6018 * INTVAL (XEXP (op0, 1))));
6020 if (GET_CODE (op0) != REG)
6021 op0 = force_operand (op0, NULL_RTX);
6022 if (GET_CODE (op0) != REG)
6023 op0 = copy_to_mode_reg (mode, op0);
6025 return gen_rtx (MULT, mode, op0,
6026 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
6029 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6032 /* Check for multiplying things that have been extended
6033 from a narrower type. If this machine supports multiplying
6034 in that narrower type with a result in the desired type,
6035 do it that way, and avoid the explicit type-conversion. */
6036 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6037 && TREE_CODE (type) == INTEGER_TYPE
6038 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6039 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6040 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6041 && int_fits_type_p (TREE_OPERAND (exp, 1),
6042 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6043 /* Don't use a widening multiply if a shift will do. */
6044 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
6045 > HOST_BITS_PER_WIDE_INT)
6046 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6048 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6049 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6051 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6052 /* If both operands are extended, they must either both
6053 be zero-extended or both be sign-extended. */
6054 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6056 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6058 enum machine_mode innermode
6059 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
6060 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6061 ? smul_widen_optab : umul_widen_optab);
6062 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6063 ? umul_widen_optab : smul_widen_optab);
6064 if (mode == GET_MODE_WIDER_MODE (innermode))
6066 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6068 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6069 NULL_RTX, VOIDmode, 0);
6070 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6071 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6074 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6075 NULL_RTX, VOIDmode, 0);
6078 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6079 && innermode == word_mode)
6082 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6083 NULL_RTX, VOIDmode, 0);
6084 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6085 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6088 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6089 NULL_RTX, VOIDmode, 0);
6090 temp = expand_binop (mode, other_optab, op0, op1, target,
6091 unsignedp, OPTAB_LIB_WIDEN);
6092 htem = expand_mult_highpart_adjust (innermode,
6093 gen_highpart (innermode, temp),
6095 gen_highpart (innermode, temp),
6097 emit_move_insn (gen_highpart (innermode, temp), htem);
6102 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6103 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6104 return expand_mult (mode, op0, op1, target, unsignedp);
6106 case TRUNC_DIV_EXPR:
6107 case FLOOR_DIV_EXPR:
6109 case ROUND_DIV_EXPR:
6110 case EXACT_DIV_EXPR:
6111 preexpand_calls (exp);
6112 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6114 /* Possible optimization: compute the dividend with EXPAND_SUM
6115 then if the divisor is constant can optimize the case
6116 where some terms of the dividend have coeffs divisible by it. */
6117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6118 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6119 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6122 this_optab = flodiv_optab;
6125 case TRUNC_MOD_EXPR:
6126 case FLOOR_MOD_EXPR:
6128 case ROUND_MOD_EXPR:
6129 preexpand_calls (exp);
6130 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6132 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6133 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6134 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6136 case FIX_ROUND_EXPR:
6137 case FIX_FLOOR_EXPR:
6139 abort (); /* Not used for C. */
6141 case FIX_TRUNC_EXPR:
6142 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6144 target = gen_reg_rtx (mode);
6145 expand_fix (target, op0, unsignedp);
6149 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6151 target = gen_reg_rtx (mode);
6152 /* expand_float can't figure out what to do if FROM has VOIDmode.
6153 So give it the correct mode. With -O, cse will optimize this. */
6154 if (GET_MODE (op0) == VOIDmode)
6155 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6157 expand_float (target, op0,
6158 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6162 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6163 temp = expand_unop (mode, neg_optab, op0, target, 0);
6169 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6171 /* Handle complex values specially. */
6172 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6173 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6174 return expand_complex_abs (mode, op0, target, unsignedp);
6176 /* Unsigned abs is simply the operand. Testing here means we don't
6177 risk generating incorrect code below. */
6178 if (TREE_UNSIGNED (type))
6181 return expand_abs (mode, op0, target, unsignedp,
6182 safe_from_p (target, TREE_OPERAND (exp, 0)));
6186 target = original_target;
6187 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
6188 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
6189 || GET_MODE (target) != mode
6190 || (GET_CODE (target) == REG
6191 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6192 target = gen_reg_rtx (mode);
6193 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6194 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6196 /* First try to do it with a special MIN or MAX instruction.
6197 If that does not win, use a conditional jump to select the proper
6199 this_optab = (TREE_UNSIGNED (type)
6200 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6201 : (code == MIN_EXPR ? smin_optab : smax_optab));
6203 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6208 /* At this point, a MEM target is no longer useful; we will get better
6211 if (GET_CODE (target) == MEM)
6212 target = gen_reg_rtx (mode);
6215 emit_move_insn (target, op0);
6217 op0 = gen_label_rtx ();
6219 /* If this mode is an integer too wide to compare properly,
6220 compare word by word. Rely on cse to optimize constant cases. */
6221 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
6223 if (code == MAX_EXPR)
6224 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6225 target, op1, NULL_RTX, op0);
6227 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6228 op1, target, NULL_RTX, op0);
6229 emit_move_insn (target, op1);
6233 if (code == MAX_EXPR)
6234 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6235 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6236 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
6238 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
6239 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6240 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
6241 if (temp == const0_rtx)
6242 emit_move_insn (target, op1);
6243 else if (temp != const_true_rtx)
6245 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6246 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6249 emit_move_insn (target, op1);
6256 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6257 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6264 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6269 /* ??? Can optimize bitwise operations with one arg constant.
6270 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6271 and (a bitwise1 b) bitwise2 b (etc)
6272 but that is probably not worth while. */
6274 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6275 boolean values when we want in all cases to compute both of them. In
6276 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6277 as actual zero-or-1 values and then bitwise anding. In cases where
6278 there cannot be any side effects, better code would be made by
6279 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6280 how to recognize those cases. */
6282 case TRUTH_AND_EXPR:
6284 this_optab = and_optab;
6289 this_optab = ior_optab;
6292 case TRUTH_XOR_EXPR:
6294 this_optab = xor_optab;
6301 preexpand_calls (exp);
6302 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6304 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6305 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6308 /* Could determine the answer when only additive constants differ. Also,
6309 the addition of one can be handled by changing the condition. */
6316 preexpand_calls (exp);
6317 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6321 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
6322 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6324 && GET_CODE (original_target) == REG
6325 && (GET_MODE (original_target)
6326 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6328 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6331 if (temp != original_target)
6332 temp = copy_to_reg (temp);
6334 op1 = gen_label_rtx ();
6335 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
6336 GET_MODE (temp), unsignedp, 0);
6337 emit_jump_insn (gen_beq (op1));
6338 emit_move_insn (temp, const1_rtx);
6343 /* If no set-flag instruction, must generate a conditional
6344 store into a temporary variable. Drop through
6345 and handle this like && and ||. */
6347 case TRUTH_ANDIF_EXPR:
6348 case TRUTH_ORIF_EXPR:
6350 && (target == 0 || ! safe_from_p (target, exp)
6351 /* Make sure we don't have a hard reg (such as function's return
6352 value) live across basic blocks, if not optimizing. */
6353 || (!optimize && GET_CODE (target) == REG
6354 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
6355 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6358 emit_clr_insn (target);
6360 op1 = gen_label_rtx ();
6361 jumpifnot (exp, op1);
6364 emit_0_to_1_insn (target);
6367 return ignore ? const0_rtx : target;
6369 case TRUTH_NOT_EXPR:
6370 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6371 /* The parser is careful to generate TRUTH_NOT_EXPR
6372 only with operands that are always zero or one. */
6373 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
6374 target, 1, OPTAB_LIB_WIDEN);
6380 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6382 return expand_expr (TREE_OPERAND (exp, 1),
6383 (ignore ? const0_rtx : target),
6388 rtx flag = NULL_RTX;
6389 tree left_cleanups = NULL_TREE;
6390 tree right_cleanups = NULL_TREE;
6392 /* Used to save a pointer to the place to put the setting of
6393 the flag that indicates if this side of the conditional was
6394 taken. We backpatch the code, if we find out later that we
6395 have any conditional cleanups that need to be performed. */
6396 rtx dest_right_flag = NULL_RTX;
6397 rtx dest_left_flag = NULL_RTX;
6399 /* Note that COND_EXPRs whose type is a structure or union
6400 are required to be constructed to contain assignments of
6401 a temporary variable, so that we can evaluate them here
6402 for side effect only. If type is void, we must do likewise. */
6404 /* If an arm of the branch requires a cleanup,
6405 only that cleanup is performed. */
6408 tree binary_op = 0, unary_op = 0;
6409 tree old_cleanups = cleanups_this_call;
6411 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6412 convert it to our mode, if necessary. */
6413 if (integer_onep (TREE_OPERAND (exp, 1))
6414 && integer_zerop (TREE_OPERAND (exp, 2))
6415 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6419 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6424 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
6425 if (GET_MODE (op0) == mode)
6429 target = gen_reg_rtx (mode);
6430 convert_move (target, op0, unsignedp);
6434 /* If we are not to produce a result, we have no target. Otherwise,
6435 if a target was specified use it; it will not be used as an
6436 intermediate target unless it is safe. If no target, use a
6441 else if (original_target
6442 && safe_from_p (original_target, TREE_OPERAND (exp, 0))
6443 && GET_MODE (original_target) == mode
6444 && ! (GET_CODE (original_target) == MEM
6445 && MEM_VOLATILE_P (original_target)))
6446 temp = original_target;
6448 temp = assign_temp (type, 0, 0, 1);
6450 /* Check for X ? A + B : A. If we have this, we can copy
6451 A to the output and conditionally add B. Similarly for unary
6452 operations. Don't do this if X has side-effects because
6453 those side effects might affect A or B and the "?" operation is
6454 a sequence point in ANSI. (We test for side effects later.) */
6456 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6457 && operand_equal_p (TREE_OPERAND (exp, 2),
6458 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6459 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6460 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6461 && operand_equal_p (TREE_OPERAND (exp, 1),
6462 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6463 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6464 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6465 && operand_equal_p (TREE_OPERAND (exp, 2),
6466 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6467 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6468 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6469 && operand_equal_p (TREE_OPERAND (exp, 1),
6470 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6471 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6473 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
6474 operation, do this as A + (X != 0). Similarly for other simple
6475 binary operators. */
6476 if (temp && singleton && binary_op
6477 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6478 && (TREE_CODE (binary_op) == PLUS_EXPR
6479 || TREE_CODE (binary_op) == MINUS_EXPR
6480 || TREE_CODE (binary_op) == BIT_IOR_EXPR
6481 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
6482 && integer_onep (TREE_OPERAND (binary_op, 1))
6483 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6486 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6487 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6488 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
6491 /* If we had X ? A : A + 1, do this as A + (X == 0).
6493 We have to invert the truth value here and then put it
6494 back later if do_store_flag fails. We cannot simply copy
6495 TREE_OPERAND (exp, 0) to another variable and modify that
6496 because invert_truthvalue can modify the tree pointed to
6498 if (singleton == TREE_OPERAND (exp, 1))
6499 TREE_OPERAND (exp, 0)
6500 = invert_truthvalue (TREE_OPERAND (exp, 0));
6502 result = do_store_flag (TREE_OPERAND (exp, 0),
6503 (safe_from_p (temp, singleton)
6505 mode, BRANCH_COST <= 1);
6509 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
6510 return expand_binop (mode, boptab, op1, result, temp,
6511 unsignedp, OPTAB_LIB_WIDEN);
6513 else if (singleton == TREE_OPERAND (exp, 1))
6514 TREE_OPERAND (exp, 0)
6515 = invert_truthvalue (TREE_OPERAND (exp, 0));
6518 do_pending_stack_adjust ();
6520 op0 = gen_label_rtx ();
6522 flag = gen_reg_rtx (word_mode);
6523 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6527 /* If the target conflicts with the other operand of the
6528 binary op, we can't use it. Also, we can't use the target
6529 if it is a hard register, because evaluating the condition
6530 might clobber it. */
6532 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6533 || (GET_CODE (temp) == REG
6534 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6535 temp = gen_reg_rtx (mode);
6536 store_expr (singleton, temp, 0);
6539 expand_expr (singleton,
6540 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6541 dest_left_flag = get_last_insn ();
6542 if (singleton == TREE_OPERAND (exp, 1))
6543 jumpif (TREE_OPERAND (exp, 0), op0);
6545 jumpifnot (TREE_OPERAND (exp, 0), op0);
6547 /* Allows cleanups up to here. */
6548 old_cleanups = cleanups_this_call;
6549 if (binary_op && temp == 0)
6550 /* Just touch the other operand. */
6551 expand_expr (TREE_OPERAND (binary_op, 1),
6552 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6554 store_expr (build (TREE_CODE (binary_op), type,
6555 make_tree (type, temp),
6556 TREE_OPERAND (binary_op, 1)),
6559 store_expr (build1 (TREE_CODE (unary_op), type,
6560 make_tree (type, temp)),
6563 dest_right_flag = get_last_insn ();
6566 /* This is now done in jump.c and is better done there because it
6567 produces shorter register lifetimes. */
6569 /* Check for both possibilities either constants or variables
6570 in registers (but not the same as the target!). If so, can
6571 save branches by assigning one, branching, and assigning the
6573 else if (temp && GET_MODE (temp) != BLKmode
6574 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
6575 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
6576 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
6577 && DECL_RTL (TREE_OPERAND (exp, 1))
6578 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
6579 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
6580 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
6581 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
6582 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
6583 && DECL_RTL (TREE_OPERAND (exp, 2))
6584 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
6585 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
6587 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6588 temp = gen_reg_rtx (mode);
6589 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6590 dest_left_flag = get_last_insn ();
6591 jumpifnot (TREE_OPERAND (exp, 0), op0);
6593 /* Allows cleanups up to here. */
6594 old_cleanups = cleanups_this_call;
6595 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6597 dest_right_flag = get_last_insn ();
6600 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6601 comparison operator. If we have one of these cases, set the
6602 output to A, branch on A (cse will merge these two references),
6603 then set the output to FOO. */
6605 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6606 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6607 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6608 TREE_OPERAND (exp, 1), 0)
6609 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6610 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6612 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6613 temp = gen_reg_rtx (mode);
6614 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6615 dest_left_flag = get_last_insn ();
6616 jumpif (TREE_OPERAND (exp, 0), op0);
6618 /* Allows cleanups up to here. */
6619 old_cleanups = cleanups_this_call;
6620 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6622 dest_right_flag = get_last_insn ();
6625 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6626 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6627 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6628 TREE_OPERAND (exp, 2), 0)
6629 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6630 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6632 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6633 temp = gen_reg_rtx (mode);
6634 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6635 dest_left_flag = get_last_insn ();
6636 jumpifnot (TREE_OPERAND (exp, 0), op0);
6638 /* Allows cleanups up to here. */
6639 old_cleanups = cleanups_this_call;
6640 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6642 dest_right_flag = get_last_insn ();
6646 op1 = gen_label_rtx ();
6647 jumpifnot (TREE_OPERAND (exp, 0), op0);
6649 /* Allows cleanups up to here. */
6650 old_cleanups = cleanups_this_call;
6652 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6654 expand_expr (TREE_OPERAND (exp, 1),
6655 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6656 dest_left_flag = get_last_insn ();
6658 /* Handle conditional cleanups, if any. */
6659 left_cleanups = defer_cleanups_to (old_cleanups);
6662 emit_jump_insn (gen_jump (op1));
6666 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6668 expand_expr (TREE_OPERAND (exp, 2),
6669 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
6670 dest_right_flag = get_last_insn ();
6673 /* Handle conditional cleanups, if any. */
6674 right_cleanups = defer_cleanups_to (old_cleanups);
6680 /* Add back in, any conditional cleanups. */
6681 if (left_cleanups || right_cleanups)
6687 /* Now that we know that a flag is needed, go back and add in the
6688 setting of the flag. */
6690 /* Do the left side flag. */
6691 last = get_last_insn ();
6692 /* Flag left cleanups as needed. */
6693 emit_move_insn (flag, const1_rtx);
6694 /* ??? deprecated, use sequences instead. */
6695 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
6697 /* Do the right side flag. */
6698 last = get_last_insn ();
6699 /* Flag left cleanups as needed. */
6700 emit_move_insn (flag, const0_rtx);
6701 /* ??? deprecated, use sequences instead. */
6702 reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
6704 /* All cleanups must be on the function_obstack. */
6705 push_obstacks_nochange ();
6706 resume_temporary_allocation ();
6708 /* convert flag, which is an rtx, into a tree. */
6709 cond = make_node (RTL_EXPR);
6710 TREE_TYPE (cond) = integer_type_node;
6711 RTL_EXPR_RTL (cond) = flag;
6712 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
6713 cond = save_expr (cond);
6715 if (! left_cleanups)
6716 left_cleanups = integer_zero_node;
6717 if (! right_cleanups)
6718 right_cleanups = integer_zero_node;
6719 new_cleanups = build (COND_EXPR, void_type_node,
6720 truthvalue_conversion (cond),
6721 left_cleanups, right_cleanups);
6722 new_cleanups = fold (new_cleanups);
6726 /* Now add in the conditionalized cleanups. */
6728 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
6729 expand_eh_region_start ();
6736 /* Something needs to be initialized, but we didn't know
6737 where that thing was when building the tree. For example,
6738 it could be the return value of a function, or a parameter
6739 to a function which lays down in the stack, or a temporary
6740 variable which must be passed by reference.
6742 We guarantee that the expression will either be constructed
6743 or copied into our original target. */
6745 tree slot = TREE_OPERAND (exp, 0);
6746 tree cleanups = NULL_TREE;
6750 if (TREE_CODE (slot) != VAR_DECL)
6754 target = original_target;
6758 if (DECL_RTL (slot) != 0)
6760 target = DECL_RTL (slot);
6761 /* If we have already expanded the slot, so don't do
6763 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6768 target = assign_temp (type, 2, 1, 1);
6769 /* All temp slots at this level must not conflict. */
6770 preserve_temp_slots (target);
6771 DECL_RTL (slot) = target;
6773 /* Since SLOT is not known to the called function
6774 to belong to its stack frame, we must build an explicit
6775 cleanup. This case occurs when we must build up a reference
6776 to pass the reference as an argument. In this case,
6777 it is very likely that such a reference need not be
6780 if (TREE_OPERAND (exp, 2) == 0)
6781 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
6782 cleanups = TREE_OPERAND (exp, 2);
6787 /* This case does occur, when expanding a parameter which
6788 needs to be constructed on the stack. The target
6789 is the actual stack address that we want to initialize.
6790 The function we call will perform the cleanup in this case. */
6792 /* If we have already assigned it space, use that space,
6793 not target that we were passed in, as our target
6794 parameter is only a hint. */
6795 if (DECL_RTL (slot) != 0)
6797 target = DECL_RTL (slot);
6798 /* If we have already expanded the slot, so don't do
6800 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6804 DECL_RTL (slot) = target;
6807 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
6808 /* Mark it as expanded. */
6809 TREE_OPERAND (exp, 1) = NULL_TREE;
6811 store_expr (exp1, target, 0);
6815 cleanups_this_call = tree_cons (NULL_TREE,
6817 cleanups_this_call);
6818 expand_eh_region_start ();
6826 tree lhs = TREE_OPERAND (exp, 0);
6827 tree rhs = TREE_OPERAND (exp, 1);
6828 tree noncopied_parts = 0;
6829 tree lhs_type = TREE_TYPE (lhs);
6831 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6832 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
6833 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
6834 TYPE_NONCOPIED_PARTS (lhs_type));
6835 while (noncopied_parts != 0)
6837 expand_assignment (TREE_VALUE (noncopied_parts),
6838 TREE_PURPOSE (noncopied_parts), 0, 0);
6839 noncopied_parts = TREE_CHAIN (noncopied_parts);
6846 /* If lhs is complex, expand calls in rhs before computing it.
6847 That's so we don't compute a pointer and save it over a call.
6848 If lhs is simple, compute it first so we can give it as a
6849 target if the rhs is just a call. This avoids an extra temp and copy
6850 and that prevents a partial-subsumption which makes bad code.
6851 Actually we could treat component_ref's of vars like vars. */
6853 tree lhs = TREE_OPERAND (exp, 0);
6854 tree rhs = TREE_OPERAND (exp, 1);
6855 tree noncopied_parts = 0;
6856 tree lhs_type = TREE_TYPE (lhs);
6860 if (TREE_CODE (lhs) != VAR_DECL
6861 && TREE_CODE (lhs) != RESULT_DECL
6862 && TREE_CODE (lhs) != PARM_DECL)
6863 preexpand_calls (exp);
6865 /* Check for |= or &= of a bitfield of size one into another bitfield
6866 of size 1. In this case, (unless we need the result of the
6867 assignment) we can do this more efficiently with a
6868 test followed by an assignment, if necessary.
6870 ??? At this point, we can't get a BIT_FIELD_REF here. But if
6871 things change so we do, this code should be enhanced to
6874 && TREE_CODE (lhs) == COMPONENT_REF
6875 && (TREE_CODE (rhs) == BIT_IOR_EXPR
6876 || TREE_CODE (rhs) == BIT_AND_EXPR)
6877 && TREE_OPERAND (rhs, 0) == lhs
6878 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
6879 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
6880 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
6882 rtx label = gen_label_rtx ();
6884 do_jump (TREE_OPERAND (rhs, 1),
6885 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
6886 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
6887 expand_assignment (lhs, convert (TREE_TYPE (rhs),
6888 (TREE_CODE (rhs) == BIT_IOR_EXPR
6890 : integer_zero_node)),
6892 do_pending_stack_adjust ();
6897 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
6898 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
6899 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
6900 TYPE_NONCOPIED_PARTS (lhs_type));
6902 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
6903 while (noncopied_parts != 0)
6905 expand_assignment (TREE_PURPOSE (noncopied_parts),
6906 TREE_VALUE (noncopied_parts), 0, 0);
6907 noncopied_parts = TREE_CHAIN (noncopied_parts);
6912 case PREINCREMENT_EXPR:
6913 case PREDECREMENT_EXPR:
6914 return expand_increment (exp, 0, ignore);
6916 case POSTINCREMENT_EXPR:
6917 case POSTDECREMENT_EXPR:
6918 /* Faster to treat as pre-increment if result is not used. */
6919 return expand_increment (exp, ! ignore, ignore);
6922 /* If nonzero, TEMP will be set to the address of something that might
6923 be a MEM corresponding to a stack slot. */
6926 /* Are we taking the address of a nested function? */
6927 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
6928 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
6929 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
6931 op0 = trampoline_address (TREE_OPERAND (exp, 0));
6932 op0 = force_operand (op0, target);
6934 /* If we are taking the address of something erroneous, just
6936 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
6940 /* We make sure to pass const0_rtx down if we came in with
6941 ignore set, to avoid doing the cleanups twice for something. */
6942 op0 = expand_expr (TREE_OPERAND (exp, 0),
6943 ignore ? const0_rtx : NULL_RTX, VOIDmode,
6944 (modifier == EXPAND_INITIALIZER
6945 ? modifier : EXPAND_CONST_ADDRESS));
6947 /* If we are going to ignore the result, OP0 will have been set
6948 to const0_rtx, so just return it. Don't get confused and
6949 think we are taking the address of the constant. */
6953 op0 = protect_from_queue (op0, 0);
6955 /* We would like the object in memory. If it is a constant,
6956 we can have it be statically allocated into memory. For
6957 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
6958 memory and store the value into it. */
6960 if (CONSTANT_P (op0))
6961 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6963 else if (GET_CODE (op0) == MEM)
6965 mark_temp_addr_taken (op0);
6966 temp = XEXP (op0, 0);
6969 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6970 || GET_CODE (op0) == CONCAT)
6972 /* If this object is in a register, it must be not
6974 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
6975 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6977 mark_temp_addr_taken (memloc);
6978 emit_move_insn (memloc, op0);
6982 if (GET_CODE (op0) != MEM)
6985 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6987 temp = XEXP (op0, 0);
6988 #ifdef POINTERS_EXTEND_UNSIGNED
6989 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
6990 && mode == ptr_mode)
6991 temp = convert_memory_address (ptr_mode, temp);
6996 op0 = force_operand (XEXP (op0, 0), target);
6999 if (flag_force_addr && GET_CODE (op0) != REG)
7000 op0 = force_reg (Pmode, op0);
7002 if (GET_CODE (op0) == REG
7003 && ! REG_USERVAR_P (op0))
7004 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
7006 /* If we might have had a temp slot, add an equivalent address
7009 update_temp_slot_address (temp, op0);
7011 #ifdef POINTERS_EXTEND_UNSIGNED
7012 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7013 && mode == ptr_mode)
7014 op0 = convert_memory_address (ptr_mode, op0);
7019 case ENTRY_VALUE_EXPR:
7022 /* COMPLEX type for Extended Pascal & Fortran */
7025 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7028 /* Get the rtx code of the operands. */
7029 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7030 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7033 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7037 /* Move the real (op0) and imaginary (op1) parts to their location. */
7038 emit_move_insn (gen_realpart (mode, target), op0);
7039 emit_move_insn (gen_imagpart (mode, target), op1);
7041 insns = get_insns ();
7044 /* Complex construction should appear as a single unit. */
7045 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7046 each with a separate pseudo as destination.
7047 It's not correct for flow to treat them as a unit. */
7048 if (GET_CODE (target) != CONCAT)
7049 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7057 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7058 return gen_realpart (mode, op0);
7061 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7062 return gen_imagpart (mode, op0);
7066 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7070 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7073 target = gen_reg_rtx (mode);
7077 /* Store the realpart and the negated imagpart to target. */
7078 emit_move_insn (gen_realpart (partmode, target),
7079 gen_realpart (partmode, op0));
7081 imag_t = gen_imagpart (partmode, target);
7082 temp = expand_unop (partmode, neg_optab,
7083 gen_imagpart (partmode, op0), imag_t, 0);
7085 emit_move_insn (imag_t, temp);
7087 insns = get_insns ();
7090 /* Conjugate should appear as a single unit
7091 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
7092 each with a separate pseudo as destination.
7093 It's not correct for flow to treat them as a unit. */
7094 if (GET_CODE (target) != CONCAT)
7095 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7103 op0 = CONST0_RTX (tmode);
7109 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7112 /* Here to do an ordinary binary operator, generating an instruction
7113 from the optab already placed in `this_optab'. */
7115 preexpand_calls (exp);
7116 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7118 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7119 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7121 temp = expand_binop (mode, this_optab, op0, op1, target,
7122 unsignedp, OPTAB_LIB_WIDEN);
7129 /* Emit bytecode to evaluate the given expression EXP to the stack. */
7132 bc_expand_expr (exp)
7135 enum tree_code code;
7138 struct binary_operator *binoptab;
7139 struct unary_operator *unoptab;
7140 struct increment_operator *incroptab;
7141 struct bc_label *lab, *lab1;
7142 enum bytecode_opcode opcode;
7145 code = TREE_CODE (exp);
7151 if (DECL_RTL (exp) == 0)
7153 error_with_decl (exp, "prior parameter's size depends on `%s'");
7157 bc_load_parmaddr (DECL_RTL (exp));
7158 bc_load_memory (TREE_TYPE (exp), exp);
7164 if (DECL_RTL (exp) == 0)
7168 if (BYTECODE_LABEL (DECL_RTL (exp)))
7169 bc_load_externaddr (DECL_RTL (exp));
7171 bc_load_localaddr (DECL_RTL (exp));
7173 if (TREE_PUBLIC (exp))
7174 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
7175 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
7177 bc_load_localaddr (DECL_RTL (exp));
7179 bc_load_memory (TREE_TYPE (exp), exp);
7184 #ifdef DEBUG_PRINT_CODE
7185 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
7187 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
7189 : TYPE_MODE (TREE_TYPE (exp)))],
7190 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
7196 #ifdef DEBUG_PRINT_CODE
7197 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
7199 /* FIX THIS: find a better way to pass real_cst's. -bson */
7200 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
7201 (double) TREE_REAL_CST (exp));
7210 /* We build a call description vector describing the type of
7211 the return value and of the arguments; this call vector,
7212 together with a pointer to a location for the return value
7213 and the base of the argument list, is passed to the low
7214 level machine dependent call subroutine, which is responsible
7215 for putting the arguments wherever real functions expect
7216 them, as well as getting the return value back. */
7218 tree calldesc = 0, arg;
7222 /* Push the evaluated args on the evaluation stack in reverse
7223 order. Also make an entry for each arg in the calldesc
7224 vector while we're at it. */
7226 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7228 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
7231 bc_expand_expr (TREE_VALUE (arg));
7233 calldesc = tree_cons ((tree) 0,
7234 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
7236 calldesc = tree_cons ((tree) 0,
7237 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
7241 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
7243 /* Allocate a location for the return value and push its
7244 address on the evaluation stack. Also make an entry
7245 at the front of the calldesc for the return value type. */
7247 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7248 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
7249 bc_load_localaddr (retval);
7251 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
7252 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
7254 /* Prepend the argument count. */
7255 calldesc = tree_cons ((tree) 0,
7256 build_int_2 (nargs, 0),
7259 /* Push the address of the call description vector on the stack. */
7260 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
7261 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
7262 build_index_type (build_int_2 (nargs * 2, 0)));
7263 r = output_constant_def (calldesc);
7264 bc_load_externaddr (r);
7266 /* Push the address of the function to be called. */
7267 bc_expand_expr (TREE_OPERAND (exp, 0));
7269 /* Call the function, popping its address and the calldesc vector
7270 address off the evaluation stack in the process. */
7271 bc_emit_instruction (call);
7273 /* Pop the arguments off the stack. */
7274 bc_adjust_stack (nargs);
7276 /* Load the return value onto the stack. */
7277 bc_load_localaddr (retval);
7278 bc_load_memory (type, TREE_OPERAND (exp, 0));
7284 if (!SAVE_EXPR_RTL (exp))
7286 /* First time around: copy to local variable */
7287 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
7288 TYPE_ALIGN (TREE_TYPE(exp)));
7289 bc_expand_expr (TREE_OPERAND (exp, 0));
7290 bc_emit_instruction (duplicate);
7292 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7293 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7297 /* Consecutive reference: use saved copy */
7298 bc_load_localaddr (SAVE_EXPR_RTL (exp));
7299 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7304 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
7305 how are they handled instead? */
7308 TREE_USED (exp) = 1;
7309 bc_expand_expr (STMT_BODY (exp));
7316 bc_expand_expr (TREE_OPERAND (exp, 0));
7317 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
7322 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
7327 bc_expand_address (TREE_OPERAND (exp, 0));
7332 bc_expand_expr (TREE_OPERAND (exp, 0));
7333 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7338 bc_expand_expr (bc_canonicalize_array_ref (exp));
7343 bc_expand_component_address (exp);
7345 /* If we have a bitfield, generate a proper load */
7346 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
7351 bc_expand_expr (TREE_OPERAND (exp, 0));
7352 bc_emit_instruction (drop);
7353 bc_expand_expr (TREE_OPERAND (exp, 1));
7358 bc_expand_expr (TREE_OPERAND (exp, 0));
7359 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7360 lab = bc_get_bytecode_label ();
7361 bc_emit_bytecode (xjumpifnot);
7362 bc_emit_bytecode_labelref (lab);
7364 #ifdef DEBUG_PRINT_CODE
7365 fputc ('\n', stderr);
7367 bc_expand_expr (TREE_OPERAND (exp, 1));
7368 lab1 = bc_get_bytecode_label ();
7369 bc_emit_bytecode (jump);
7370 bc_emit_bytecode_labelref (lab1);
7372 #ifdef DEBUG_PRINT_CODE
7373 fputc ('\n', stderr);
7376 bc_emit_bytecode_labeldef (lab);
7377 bc_expand_expr (TREE_OPERAND (exp, 2));
7378 bc_emit_bytecode_labeldef (lab1);
7381 case TRUTH_ANDIF_EXPR:
7383 opcode = xjumpifnot;
7386 case TRUTH_ORIF_EXPR:
7393 binoptab = optab_plus_expr;
7398 binoptab = optab_minus_expr;
7403 binoptab = optab_mult_expr;
7406 case TRUNC_DIV_EXPR:
7407 case FLOOR_DIV_EXPR:
7409 case ROUND_DIV_EXPR:
7410 case EXACT_DIV_EXPR:
7412 binoptab = optab_trunc_div_expr;
7415 case TRUNC_MOD_EXPR:
7416 case FLOOR_MOD_EXPR:
7418 case ROUND_MOD_EXPR:
7420 binoptab = optab_trunc_mod_expr;
7423 case FIX_ROUND_EXPR:
7424 case FIX_FLOOR_EXPR:
7426 abort (); /* Not used for C. */
7428 case FIX_TRUNC_EXPR:
7435 abort (); /* FIXME */
7439 binoptab = optab_rdiv_expr;
7444 binoptab = optab_bit_and_expr;
7449 binoptab = optab_bit_ior_expr;
7454 binoptab = optab_bit_xor_expr;
7459 binoptab = optab_lshift_expr;
7464 binoptab = optab_rshift_expr;
7467 case TRUTH_AND_EXPR:
7469 binoptab = optab_truth_and_expr;
7474 binoptab = optab_truth_or_expr;
7479 binoptab = optab_lt_expr;
7484 binoptab = optab_le_expr;
7489 binoptab = optab_ge_expr;
7494 binoptab = optab_gt_expr;
7499 binoptab = optab_eq_expr;
7504 binoptab = optab_ne_expr;
7509 unoptab = optab_negate_expr;
7514 unoptab = optab_bit_not_expr;
7517 case TRUTH_NOT_EXPR:
7519 unoptab = optab_truth_not_expr;
7522 case PREDECREMENT_EXPR:
7524 incroptab = optab_predecrement_expr;
7527 case PREINCREMENT_EXPR:
7529 incroptab = optab_preincrement_expr;
7532 case POSTDECREMENT_EXPR:
7534 incroptab = optab_postdecrement_expr;
7537 case POSTINCREMENT_EXPR:
7539 incroptab = optab_postincrement_expr;
7544 bc_expand_constructor (exp);
7554 tree vars = TREE_OPERAND (exp, 0);
7555 int vars_need_expansion = 0;
7557 /* Need to open a binding contour here because
7558 if there are any cleanups they most be contained here. */
7559 expand_start_bindings (0);
7561 /* Mark the corresponding BLOCK for output. */
7562 if (TREE_OPERAND (exp, 2) != 0)
7563 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
7565 /* If VARS have not yet been expanded, expand them now. */
7568 if (DECL_RTL (vars) == 0)
7570 vars_need_expansion = 1;
7573 expand_decl_init (vars);
7574 vars = TREE_CHAIN (vars);
7577 bc_expand_expr (TREE_OPERAND (exp, 1));
7579 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7589 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
7590 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
7596 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
7602 bc_expand_expr (TREE_OPERAND (exp, 0));
7603 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
7604 lab = bc_get_bytecode_label ();
7606 bc_emit_instruction (duplicate);
7607 bc_emit_bytecode (opcode);
7608 bc_emit_bytecode_labelref (lab);
7610 #ifdef DEBUG_PRINT_CODE
7611 fputc ('\n', stderr);
7614 bc_emit_instruction (drop);
7616 bc_expand_expr (TREE_OPERAND (exp, 1));
7617 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
7618 bc_emit_bytecode_labeldef (lab);
7624 type = TREE_TYPE (TREE_OPERAND (exp, 0));
7626 /* Push the quantum. */
7627 bc_expand_expr (TREE_OPERAND (exp, 1));
7629 /* Convert it to the lvalue's type. */
7630 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
7632 /* Push the address of the lvalue */
7633 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
7635 /* Perform actual increment */
7636 bc_expand_increment (incroptab, type);
7640 /* Return the alignment in bits of EXP, a pointer valued expression.
7641 But don't return more than MAX_ALIGN no matter what.
7642 The alignment returned is, by default, the alignment of the thing that
7643 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7645 Otherwise, look at the expression to see if we can do better, i.e., if the
7646 expression is actually pointing at an object whose alignment is tighter. */
7649 get_pointer_alignment (exp, max_align)
7653 unsigned align, inner;
7655 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7658 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7659 align = MIN (align, max_align);
7663 switch (TREE_CODE (exp))
7667 case NON_LVALUE_EXPR:
7668 exp = TREE_OPERAND (exp, 0);
7669 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7671 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7672 align = MIN (inner, max_align);
7676 /* If sum of pointer + int, restrict our maximum alignment to that
7677 imposed by the integer. If not, we can't do any better than
7679 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7682 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7687 exp = TREE_OPERAND (exp, 0);
7691 /* See what we are pointing at and look at its alignment. */
7692 exp = TREE_OPERAND (exp, 0);
7693 if (TREE_CODE (exp) == FUNCTION_DECL)
7694 align = FUNCTION_BOUNDARY;
7695 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7696 align = DECL_ALIGN (exp);
7697 #ifdef CONSTANT_ALIGNMENT
7698 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7699 align = CONSTANT_ALIGNMENT (exp, align);
7701 return MIN (align, max_align);
7709 /* Return the tree node and offset if a given argument corresponds to
7710 a string constant. */
7713 string_constant (arg, ptr_offset)
7719 if (TREE_CODE (arg) == ADDR_EXPR
7720 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7722 *ptr_offset = integer_zero_node;
7723 return TREE_OPERAND (arg, 0);
7725 else if (TREE_CODE (arg) == PLUS_EXPR)
7727 tree arg0 = TREE_OPERAND (arg, 0);
7728 tree arg1 = TREE_OPERAND (arg, 1);
7733 if (TREE_CODE (arg0) == ADDR_EXPR
7734 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
7737 return TREE_OPERAND (arg0, 0);
7739 else if (TREE_CODE (arg1) == ADDR_EXPR
7740 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
7743 return TREE_OPERAND (arg1, 0);
7750 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7751 way, because it could contain a zero byte in the middle.
7752 TREE_STRING_LENGTH is the size of the character array, not the string.
7754 Unfortunately, string_constant can't access the values of const char
7755 arrays with initializers, so neither can we do so here. */
7765 src = string_constant (src, &offset_node);
7768 max = TREE_STRING_LENGTH (src);
7769 ptr = TREE_STRING_POINTER (src);
7770 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7772 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7773 compute the offset to the following null if we don't know where to
7774 start searching for it. */
7776 for (i = 0; i < max; i++)
7779 /* We don't know the starting offset, but we do know that the string
7780 has no internal zero bytes. We can assume that the offset falls
7781 within the bounds of the string; otherwise, the programmer deserves
7782 what he gets. Subtract the offset from the length of the string,
7784 /* This would perhaps not be valid if we were dealing with named
7785 arrays in addition to literal string constants. */
7786 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7789 /* We have a known offset into the string. Start searching there for
7790 a null character. */
7791 if (offset_node == 0)
7795 /* Did we get a long long offset? If so, punt. */
7796 if (TREE_INT_CST_HIGH (offset_node) != 0)
7798 offset = TREE_INT_CST_LOW (offset_node);
7800 /* If the offset is known to be out of bounds, warn, and call strlen at
7802 if (offset < 0 || offset > max)
7804 warning ("offset outside bounds of constant string");
7807 /* Use strlen to search for the first zero byte. Since any strings
7808 constructed with build_string will have nulls appended, we win even
7809 if we get handed something like (char[4])"abcd".
7811 Since OFFSET is our starting index into the string, no further
7812 calculation is needed. */
7813 return size_int (strlen (ptr + offset));
7817 expand_builtin_return_addr (fndecl_code, count, tem)
7818 enum built_in_function fndecl_code;
7824 /* Some machines need special handling before we can access
7825 arbitrary frames. For example, on the sparc, we must first flush
7826 all register windows to the stack. */
7827 #ifdef SETUP_FRAME_ADDRESSES
7828 SETUP_FRAME_ADDRESSES ();
7831 /* On the sparc, the return address is not in the frame, it is in a
7832 register. There is no way to access it off of the current frame
7833 pointer, but it can be accessed off the previous frame pointer by
7834 reading the value from the register window save area. */
7835 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7836 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7840 /* Scan back COUNT frames to the specified frame. */
7841 for (i = 0; i < count; i++)
7843 /* Assume the dynamic chain pointer is in the word that the
7844 frame address points to, unless otherwise specified. */
7845 #ifdef DYNAMIC_CHAIN_ADDRESS
7846 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7848 tem = memory_address (Pmode, tem);
7849 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7852 /* For __builtin_frame_address, return what we've got. */
7853 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7856 /* For __builtin_return_address, Get the return address from that
7858 #ifdef RETURN_ADDR_RTX
7859 tem = RETURN_ADDR_RTX (count, tem);
7861 tem = memory_address (Pmode,
7862 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7863 tem = gen_rtx (MEM, Pmode, tem);
7868 /* Expand an expression EXP that calls a built-in function,
7869 with result going to TARGET if that's convenient
7870 (and in mode MODE if that's convenient).
7871 SUBTARGET may be used as the target for computing one of EXP's operands.
7872 IGNORE is nonzero if the value is to be ignored. */
7874 #define CALLED_AS_BUILT_IN(NODE) \
7875 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
7878 expand_builtin (exp, target, subtarget, mode, ignore)
7882 enum machine_mode mode;
7885 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7886 tree arglist = TREE_OPERAND (exp, 1);
7889 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7890 optab builtin_optab;
7892 switch (DECL_FUNCTION_CODE (fndecl))
7897 /* build_function_call changes these into ABS_EXPR. */
7902 /* Treat these like sqrt, but only if the user asks for them. */
7903 if (! flag_fast_math)
7905 case BUILT_IN_FSQRT:
7906 /* If not optimizing, call the library function. */
7911 /* Arg could be wrong type if user redeclared this fcn wrong. */
7912 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7915 /* Stabilize and compute the argument. */
7916 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7917 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7919 exp = copy_node (exp);
7920 arglist = copy_node (arglist);
7921 TREE_OPERAND (exp, 1) = arglist;
7922 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7924 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7926 /* Make a suitable register to place result in. */
7927 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7932 switch (DECL_FUNCTION_CODE (fndecl))
7935 builtin_optab = sin_optab; break;
7937 builtin_optab = cos_optab; break;
7938 case BUILT_IN_FSQRT:
7939 builtin_optab = sqrt_optab; break;
7944 /* Compute into TARGET.
7945 Set TARGET to wherever the result comes back. */
7946 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7947 builtin_optab, op0, target, 0);
7949 /* If we were unable to expand via the builtin, stop the
7950 sequence (without outputting the insns) and break, causing
7951 a call the the library function. */
7958 /* Check the results by default. But if flag_fast_math is turned on,
7959 then assume sqrt will always be called with valid arguments. */
7961 if (! flag_fast_math)
7963 /* Don't define the builtin FP instructions
7964 if your machine is not IEEE. */
7965 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7968 lab1 = gen_label_rtx ();
7970 /* Test the result; if it is NaN, set errno=EDOM because
7971 the argument was not in the domain. */
7972 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7973 emit_jump_insn (gen_beq (lab1));
7977 #ifdef GEN_ERRNO_RTX
7978 rtx errno_rtx = GEN_ERRNO_RTX;
7981 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "errno"));
7984 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7987 /* We can't set errno=EDOM directly; let the library call do it.
7988 Pop the arguments right away in case the call gets deleted. */
7990 expand_call (exp, target, 0);
7997 /* Output the entire sequence. */
7998 insns = get_insns ();
8004 /* __builtin_apply_args returns block of memory allocated on
8005 the stack into which is stored the arg pointer, structure
8006 value address, static chain, and all the registers that might
8007 possibly be used in performing a function call. The code is
8008 moved to the start of the function so the incoming values are
8010 case BUILT_IN_APPLY_ARGS:
8011 /* Don't do __builtin_apply_args more than once in a function.
8012 Save the result of the first call and reuse it. */
8013 if (apply_args_value != 0)
8014 return apply_args_value;
8016 /* When this function is called, it means that registers must be
8017 saved on entry to this function. So we migrate the
8018 call to the first insn of this function. */
8023 temp = expand_builtin_apply_args ();
8027 apply_args_value = temp;
8029 /* Put the sequence after the NOTE that starts the function.
8030 If this is inside a SEQUENCE, make the outer-level insn
8031 chain current, so the code is placed at the start of the
8033 push_topmost_sequence ();
8034 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8035 pop_topmost_sequence ();
8039 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8040 FUNCTION with a copy of the parameters described by
8041 ARGUMENTS, and ARGSIZE. It returns a block of memory
8042 allocated on the stack into which is stored all the registers
8043 that might possibly be used for returning the result of a
8044 function. ARGUMENTS is the value returned by
8045 __builtin_apply_args. ARGSIZE is the number of bytes of
8046 arguments that must be copied. ??? How should this value be
8047 computed? We'll also need a safe worst case value for varargs
8049 case BUILT_IN_APPLY:
8051 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8052 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8053 || TREE_CHAIN (arglist) == 0
8054 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8055 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8056 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8064 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8065 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
8067 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8070 /* __builtin_return (RESULT) causes the function to return the
8071 value described by RESULT. RESULT is address of the block of
8072 memory returned by __builtin_apply. */
8073 case BUILT_IN_RETURN:
8075 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8076 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8077 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8078 NULL_RTX, VOIDmode, 0));
8081 case BUILT_IN_SAVEREGS:
8082 /* Don't do __builtin_saveregs more than once in a function.
8083 Save the result of the first call and reuse it. */
8084 if (saveregs_value != 0)
8085 return saveregs_value;
8087 /* When this function is called, it means that registers must be
8088 saved on entry to this function. So we migrate the
8089 call to the first insn of this function. */
8093 /* Now really call the function. `expand_call' does not call
8094 expand_builtin, so there is no danger of infinite recursion here. */
8097 #ifdef EXPAND_BUILTIN_SAVEREGS
8098 /* Do whatever the machine needs done in this case. */
8099 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8101 /* The register where the function returns its value
8102 is likely to have something else in it, such as an argument.
8103 So preserve that register around the call. */
8105 if (value_mode != VOIDmode)
8107 rtx valreg = hard_libcall_value (value_mode);
8108 rtx saved_valreg = gen_reg_rtx (value_mode);
8110 emit_move_insn (saved_valreg, valreg);
8111 temp = expand_call (exp, target, ignore);
8112 emit_move_insn (valreg, saved_valreg);
8115 /* Generate the call, putting the value in a pseudo. */
8116 temp = expand_call (exp, target, ignore);
8122 saveregs_value = temp;
8124 /* Put the sequence after the NOTE that starts the function.
8125 If this is inside a SEQUENCE, make the outer-level insn
8126 chain current, so the code is placed at the start of the
8128 push_topmost_sequence ();
8129 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8130 pop_topmost_sequence ();
8134 /* __builtin_args_info (N) returns word N of the arg space info
8135 for the current function. The number and meanings of words
8136 is controlled by the definition of CUMULATIVE_ARGS. */
8137 case BUILT_IN_ARGS_INFO:
8139 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8141 int *word_ptr = (int *) ¤t_function_args_info;
8142 tree type, elts, result;
8144 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8145 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8146 __FILE__, __LINE__);
8150 tree arg = TREE_VALUE (arglist);
8151 if (TREE_CODE (arg) != INTEGER_CST)
8152 error ("argument of `__builtin_args_info' must be constant");
8155 int wordnum = TREE_INT_CST_LOW (arg);
8157 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8158 error ("argument of `__builtin_args_info' out of range");
8160 return GEN_INT (word_ptr[wordnum]);
8164 error ("missing argument in `__builtin_args_info'");
8169 for (i = 0; i < nwords; i++)
8170 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
8172 type = build_array_type (integer_type_node,
8173 build_index_type (build_int_2 (nwords, 0)));
8174 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8175 TREE_CONSTANT (result) = 1;
8176 TREE_STATIC (result) = 1;
8177 result = build (INDIRECT_REF, build_pointer_type (type), result);
8178 TREE_CONSTANT (result) = 1;
8179 return expand_expr (result, NULL_RTX, VOIDmode, 0);
8183 /* Return the address of the first anonymous stack arg. */
8184 case BUILT_IN_NEXT_ARG:
8186 tree fntype = TREE_TYPE (current_function_decl);
8188 if ((TYPE_ARG_TYPES (fntype) == 0
8189 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8191 && ! current_function_varargs)
8193 error ("`va_start' used in function with fixed args");
8199 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8200 tree arg = TREE_VALUE (arglist);
8202 /* Strip off all nops for the sake of the comparison. This
8203 is not quite the same as STRIP_NOPS. It does more.
8204 We must also strip off INDIRECT_EXPR for C++ reference
8206 while (TREE_CODE (arg) == NOP_EXPR
8207 || TREE_CODE (arg) == CONVERT_EXPR
8208 || TREE_CODE (arg) == NON_LVALUE_EXPR
8209 || TREE_CODE (arg) == INDIRECT_REF)
8210 arg = TREE_OPERAND (arg, 0);
8211 if (arg != last_parm)
8212 warning ("second parameter of `va_start' not last named argument");
8214 else if (! current_function_varargs)
8215 /* Evidently an out of date version of <stdarg.h>; can't validate
8216 va_start's second argument, but can still work as intended. */
8217 warning ("`__builtin_next_arg' called without an argument");
8220 return expand_binop (Pmode, add_optab,
8221 current_function_internal_arg_pointer,
8222 current_function_arg_offset_rtx,
8223 NULL_RTX, 0, OPTAB_LIB_WIDEN);
8225 case BUILT_IN_CLASSIFY_TYPE:
8228 tree type = TREE_TYPE (TREE_VALUE (arglist));
8229 enum tree_code code = TREE_CODE (type);
8230 if (code == VOID_TYPE)
8231 return GEN_INT (void_type_class);
8232 if (code == INTEGER_TYPE)
8233 return GEN_INT (integer_type_class);
8234 if (code == CHAR_TYPE)
8235 return GEN_INT (char_type_class);
8236 if (code == ENUMERAL_TYPE)
8237 return GEN_INT (enumeral_type_class);
8238 if (code == BOOLEAN_TYPE)
8239 return GEN_INT (boolean_type_class);
8240 if (code == POINTER_TYPE)
8241 return GEN_INT (pointer_type_class);
8242 if (code == REFERENCE_TYPE)
8243 return GEN_INT (reference_type_class);
8244 if (code == OFFSET_TYPE)
8245 return GEN_INT (offset_type_class);
8246 if (code == REAL_TYPE)
8247 return GEN_INT (real_type_class);
8248 if (code == COMPLEX_TYPE)
8249 return GEN_INT (complex_type_class);
8250 if (code == FUNCTION_TYPE)
8251 return GEN_INT (function_type_class);
8252 if (code == METHOD_TYPE)
8253 return GEN_INT (method_type_class);
8254 if (code == RECORD_TYPE)
8255 return GEN_INT (record_type_class);
8256 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8257 return GEN_INT (union_type_class);
8258 if (code == ARRAY_TYPE)
8260 if (TYPE_STRING_FLAG (type))
8261 return GEN_INT (string_type_class);
8263 return GEN_INT (array_type_class);
8265 if (code == SET_TYPE)
8266 return GEN_INT (set_type_class);
8267 if (code == FILE_TYPE)
8268 return GEN_INT (file_type_class);
8269 if (code == LANG_TYPE)
8270 return GEN_INT (lang_type_class);
8272 return GEN_INT (no_type_class);
8274 case BUILT_IN_CONSTANT_P:
8279 tree arg = TREE_VALUE (arglist);
8282 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8283 || (TREE_CODE (arg) == ADDR_EXPR
8284 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8285 ? const1_rtx : const0_rtx);
8288 case BUILT_IN_FRAME_ADDRESS:
8289 /* The argument must be a nonnegative integer constant.
8290 It counts the number of frames to scan up the stack.
8291 The value is the address of that frame. */
8292 case BUILT_IN_RETURN_ADDRESS:
8293 /* The argument must be a nonnegative integer constant.
8294 It counts the number of frames to scan up the stack.
8295 The value is the return address saved in that frame. */
8297 /* Warning about missing arg was already issued. */
8299 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
8301 error ("invalid arg to `__builtin_return_address'");
8304 else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8306 error ("invalid arg to `__builtin_return_address'");
8311 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8312 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8313 hard_frame_pointer_rtx);
8315 /* For __builtin_frame_address, return what we've got. */
8316 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8319 if (GET_CODE (tem) != REG)
8320 tem = copy_to_reg (tem);
8324 case BUILT_IN_ALLOCA:
8326 /* Arg could be non-integer if user redeclared this fcn wrong. */
8327 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8330 /* Compute the argument. */
8331 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
8333 /* Allocate the desired space. */
8334 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
8337 /* If not optimizing, call the library function. */
8338 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8342 /* Arg could be non-integer if user redeclared this fcn wrong. */
8343 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8346 /* Compute the argument. */
8347 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8348 /* Compute ffs, into TARGET if possible.
8349 Set TARGET to wherever the result comes back. */
8350 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8351 ffs_optab, op0, target, 1);
8356 case BUILT_IN_STRLEN:
8357 /* If not optimizing, call the library function. */
8358 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8362 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8363 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8367 tree src = TREE_VALUE (arglist);
8368 tree len = c_strlen (src);
8371 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8373 rtx result, src_rtx, char_rtx;
8374 enum machine_mode insn_mode = value_mode, char_mode;
8375 enum insn_code icode;
8377 /* If the length is known, just return it. */
8379 return expand_expr (len, target, mode, 0);
8381 /* If SRC is not a pointer type, don't do this operation inline. */
8385 /* Call a function if we can't compute strlen in the right mode. */
8387 while (insn_mode != VOIDmode)
8389 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8390 if (icode != CODE_FOR_nothing)
8393 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8395 if (insn_mode == VOIDmode)
8398 /* Make a place to write the result of the instruction. */
8401 && GET_CODE (result) == REG
8402 && GET_MODE (result) == insn_mode
8403 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8404 result = gen_reg_rtx (insn_mode);
8406 /* Make sure the operands are acceptable to the predicates. */
8408 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8409 result = gen_reg_rtx (insn_mode);
8411 src_rtx = memory_address (BLKmode,
8412 expand_expr (src, NULL_RTX, ptr_mode,
8414 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8415 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
8417 char_rtx = const0_rtx;
8418 char_mode = insn_operand_mode[(int)icode][2];
8419 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8420 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
8422 emit_insn (GEN_FCN (icode) (result,
8423 gen_rtx (MEM, BLKmode, src_rtx),
8424 char_rtx, GEN_INT (align)));
8426 /* Return the value in the proper mode for this function. */
8427 if (GET_MODE (result) == value_mode)
8429 else if (target != 0)
8431 convert_move (target, result, 0);
8435 return convert_to_mode (value_mode, result, 0);
8438 case BUILT_IN_STRCPY:
8439 /* If not optimizing, call the library function. */
8440 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8444 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8445 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8446 || TREE_CHAIN (arglist) == 0
8447 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8451 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
8456 len = size_binop (PLUS_EXPR, len, integer_one_node);
8458 chainon (arglist, build_tree_list (NULL_TREE, len));
8462 case BUILT_IN_MEMCPY:
8463 /* If not optimizing, call the library function. */
8464 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8468 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8469 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8470 || TREE_CHAIN (arglist) == 0
8471 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8472 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8473 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8477 tree dest = TREE_VALUE (arglist);
8478 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8479 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8483 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8485 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8486 rtx dest_rtx, dest_mem, src_mem;
8488 /* If either SRC or DEST is not a pointer type, don't do
8489 this operation in-line. */
8490 if (src_align == 0 || dest_align == 0)
8492 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8493 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8497 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8498 dest_mem = gen_rtx (MEM, BLKmode,
8499 memory_address (BLKmode, dest_rtx));
8500 /* There could be a void* cast on top of the object. */
8501 while (TREE_CODE (dest) == NOP_EXPR)
8502 dest = TREE_OPERAND (dest, 0);
8503 type = TREE_TYPE (TREE_TYPE (dest));
8504 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8505 src_mem = gen_rtx (MEM, BLKmode,
8506 memory_address (BLKmode,
8507 expand_expr (src, NULL_RTX,
8510 /* There could be a void* cast on top of the object. */
8511 while (TREE_CODE (src) == NOP_EXPR)
8512 src = TREE_OPERAND (src, 0);
8513 type = TREE_TYPE (TREE_TYPE (src));
8514 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
8516 /* Copy word part most expediently. */
8517 emit_block_move (dest_mem, src_mem,
8518 expand_expr (len, NULL_RTX, VOIDmode, 0),
8519 MIN (src_align, dest_align));
8520 return force_operand (dest_rtx, NULL_RTX);
8523 case BUILT_IN_MEMSET:
8524 /* If not optimizing, call the library function. */
8525 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8529 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8530 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8531 || TREE_CHAIN (arglist) == 0
8532 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8534 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8536 != (TREE_CODE (TREE_TYPE
8538 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8542 tree dest = TREE_VALUE (arglist);
8543 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8544 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8548 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8549 rtx dest_rtx, dest_mem;
8551 /* If DEST is not a pointer type, don't do this
8552 operation in-line. */
8553 if (dest_align == 0)
8556 /* If VAL is not 0, don't do this operation in-line. */
8557 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8560 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8561 dest_mem = gen_rtx (MEM, BLKmode,
8562 memory_address (BLKmode, dest_rtx));
8563 /* There could be a void* cast on top of the object. */
8564 while (TREE_CODE (dest) == NOP_EXPR)
8565 dest = TREE_OPERAND (dest, 0);
8566 type = TREE_TYPE (TREE_TYPE (dest));
8567 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8569 clear_storage (dest_mem, expand_expr (len, NULL_RTX, VOIDmode, 0),
8572 return force_operand (dest_rtx, NULL_RTX);
8575 /* These comparison functions need an instruction that returns an actual
8576 index. An ordinary compare that just sets the condition codes
8578 #ifdef HAVE_cmpstrsi
8579 case BUILT_IN_STRCMP:
8580 /* If not optimizing, call the library function. */
8581 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8585 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8586 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8587 || TREE_CHAIN (arglist) == 0
8588 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8590 else if (!HAVE_cmpstrsi)
8593 tree arg1 = TREE_VALUE (arglist);
8594 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8598 len = c_strlen (arg1);
8600 len = size_binop (PLUS_EXPR, integer_one_node, len);
8601 len2 = c_strlen (arg2);
8603 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
8605 /* If we don't have a constant length for the first, use the length
8606 of the second, if we know it. We don't require a constant for
8607 this case; some cost analysis could be done if both are available
8608 but neither is constant. For now, assume they're equally cheap.
8610 If both strings have constant lengths, use the smaller. This
8611 could arise if optimization results in strcpy being called with
8612 two fixed strings, or if the code was machine-generated. We should
8613 add some code to the `memcmp' handler below to deal with such
8614 situations, someday. */
8615 if (!len || TREE_CODE (len) != INTEGER_CST)
8622 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8624 if (tree_int_cst_lt (len2, len))
8628 chainon (arglist, build_tree_list (NULL_TREE, len));
8632 case BUILT_IN_MEMCMP:
8633 /* If not optimizing, call the library function. */
8634 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8638 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8639 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8640 || TREE_CHAIN (arglist) == 0
8641 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8642 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8643 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8645 else if (!HAVE_cmpstrsi)
8648 tree arg1 = TREE_VALUE (arglist);
8649 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8650 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8654 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8656 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8657 enum machine_mode insn_mode
8658 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
8660 /* If we don't have POINTER_TYPE, call the function. */
8661 if (arg1_align == 0 || arg2_align == 0)
8663 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8664 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8668 /* Make a place to write the result of the instruction. */
8671 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8672 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8673 result = gen_reg_rtx (insn_mode);
8675 emit_insn (gen_cmpstrsi (result,
8676 gen_rtx (MEM, BLKmode,
8677 expand_expr (arg1, NULL_RTX,
8680 gen_rtx (MEM, BLKmode,
8681 expand_expr (arg2, NULL_RTX,
8684 expand_expr (len, NULL_RTX, VOIDmode, 0),
8685 GEN_INT (MIN (arg1_align, arg2_align))));
8687 /* Return the value in the proper mode for this function. */
8688 mode = TYPE_MODE (TREE_TYPE (exp));
8689 if (GET_MODE (result) == mode)
8691 else if (target != 0)
8693 convert_move (target, result, 0);
8697 return convert_to_mode (mode, result, 0);
8700 case BUILT_IN_STRCMP:
8701 case BUILT_IN_MEMCMP:
8705 /* __builtin_setjmp is passed a pointer to an array of five words
8706 (not all will be used on all machines). It operates similarly to
8707 the C library function of the same name, but is more efficient.
8708 Much of the code below (and for longjmp) is copied from the handling
8711 NOTE: This is intended for use by GNAT and will only work in
8712 the method used by it. This code will likely NOT survive to
8713 the GCC 2.8.0 release. */
8714 case BUILT_IN_SETJMP:
8716 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8720 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8722 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
8723 enum machine_mode sa_mode = Pmode;
8725 int old_inhibit_defer_pop = inhibit_defer_pop;
8726 int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
8727 get_identifier ("__dummy"), 0);
8729 CUMULATIVE_ARGS args_so_far;
8732 #ifdef POINTERS_EXTEND_UNSIGNED
8733 buf_addr = convert_memory_address (Pmode, buf_addr);
8736 buf_addr = force_reg (Pmode, buf_addr);
8738 if (target == 0 || GET_CODE (target) != REG
8739 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8740 target = gen_reg_rtx (value_mode);
8744 CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
8745 current_function_calls_setjmp = 1;
8747 /* We store the frame pointer and the address of lab1 in the buffer
8748 and use the rest of it for the stack save area, which is
8749 machine-dependent. */
8750 emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
8751 virtual_stack_vars_rtx);
8753 (validize_mem (gen_rtx (MEM, Pmode,
8754 plus_constant (buf_addr,
8755 GET_MODE_SIZE (Pmode)))),
8756 gen_rtx (LABEL_REF, Pmode, lab1));
8758 #ifdef HAVE_save_stack_nonlocal
8759 if (HAVE_save_stack_nonlocal)
8760 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
8763 stack_save = gen_rtx (MEM, sa_mode,
8764 plus_constant (buf_addr,
8765 2 * GET_MODE_SIZE (Pmode)));
8766 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
8770 emit_insn (gen_setjmp ());
8773 /* Set TARGET to zero and branch around the other case. */
8774 emit_move_insn (target, const0_rtx);
8775 emit_jump_insn (gen_jump (lab2));
8779 /* Note that setjmp clobbers FP when we get here, so we have to
8780 make sure it's marked as used by this function. */
8781 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8783 /* Mark the static chain as clobbered here so life information
8784 doesn't get messed up for it. */
8785 emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
8787 /* Now put in the code to restore the frame pointer, and argument
8788 pointer, if needed. The code below is from expand_end_bindings
8789 in stmt.c; see detailed documentation there. */
8790 #ifdef HAVE_nonlocal_goto
8791 if (! HAVE_nonlocal_goto)
8793 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
8795 current_function_has_nonlocal_goto = 1;
8797 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8798 if (fixed_regs[ARG_POINTER_REGNUM])
8800 #ifdef ELIMINABLE_REGS
8801 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
8803 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8804 if (elim_regs[i].from == ARG_POINTER_REGNUM
8805 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8808 if (i == sizeof elim_regs / sizeof elim_regs [0])
8811 /* Now restore our arg pointer from the address at which it
8812 was saved in our stack frame.
8813 If there hasn't be space allocated for it yet, make
8815 if (arg_pointer_save_area == 0)
8816 arg_pointer_save_area
8817 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8818 emit_move_insn (virtual_incoming_args_rtx,
8819 copy_to_reg (arg_pointer_save_area));
8824 /* The static chain pointer contains the address of dummy function.
8825 We need to call it here to handle some PIC cases of restoring
8826 a global pointer. Then return 1. */
8827 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
8829 /* We can't actually call emit_library_call here, so do everything
8830 it does, which isn't much for a libfunc with no args. */
8831 op0 = memory_address (FUNCTION_MODE, op0);
8833 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
8834 gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
8835 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
8837 #ifndef ACCUMULATE_OUTGOING_ARGS
8838 #ifdef HAVE_call_pop
8840 emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
8841 const0_rtx, next_arg_reg,
8842 GEN_INT (return_pops)));
8849 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
8850 const0_rtx, next_arg_reg, const0_rtx));
8855 emit_move_insn (target, const1_rtx);
8860 /* __builtin_longjmp is passed a pointer to an array of five words
8861 and a value, which is a dummy. It's similar to the C library longjmp
8862 function but works with __builtin_setjmp above. */
8863 case BUILT_IN_LONGJMP:
8864 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8865 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8869 tree dummy_id = get_identifier ("__dummy");
8870 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8871 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8872 #ifdef POINTERS_EXTEND_UNSIGNED
8875 convert_memory_address
8877 expand_expr (TREE_VALUE (arglist),
8878 NULL_RTX, VOIDmode, 0)));
8881 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8885 rtx fp = gen_rtx (MEM, Pmode, buf_addr);
8886 rtx lab = gen_rtx (MEM, Pmode,
8887 plus_constant (buf_addr, GET_MODE_SIZE (Pmode)));
8888 enum machine_mode sa_mode
8889 #ifdef HAVE_save_stack_nonlocal
8890 = (HAVE_save_stack_nonlocal
8891 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8896 rtx stack = gen_rtx (MEM, sa_mode,
8897 plus_constant (buf_addr,
8898 2 * GET_MODE_SIZE (Pmode)));
8900 DECL_EXTERNAL (dummy_decl) = 1;
8901 TREE_PUBLIC (dummy_decl) = 1;
8902 make_decl_rtl (dummy_decl, NULL_PTR, 1);
8904 /* Expand the second expression just for side-effects. */
8905 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8906 const0_rtx, VOIDmode, 0);
8908 assemble_external (dummy_decl);
8910 /* Pick up FP, label, and SP from the block and jump. This code is
8911 from expand_goto in stmt.c; see there for detailed comments. */
8912 #if HAVE_nonlocal_goto
8913 if (HAVE_nonlocal_goto)
8914 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8915 XEXP (DECL_RTL (dummy_decl), 0)));
8919 lab = copy_to_reg (lab);
8920 emit_move_insn (hard_frame_pointer_rtx, fp);
8921 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8923 /* Put in the static chain register the address of the dummy
8925 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8926 emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
8927 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
8928 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
8929 emit_indirect_jump (lab);
8935 default: /* just do library call, if unknown builtin */
8936 error ("built-in function `%s' not currently supported",
8937 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
8940 /* The switch statement above can drop through to cause the function
8941 to be called normally. */
8943 return expand_call (exp, target, ignore);
8946 /* Built-in functions to perform an untyped call and return. */
8948 /* For each register that may be used for calling a function, this
8949 gives a mode used to copy the register's value. VOIDmode indicates
8950 the register is not used for calling a function. If the machine
8951 has register windows, this gives only the outbound registers.
8952 INCOMING_REGNO gives the corresponding inbound register. */
8953 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
8955 /* For each register that may be used for returning values, this gives
8956 a mode used to copy the register's value. VOIDmode indicates the
8957 register is not used for returning values. If the machine has
8958 register windows, this gives only the outbound registers.
8959 INCOMING_REGNO gives the corresponding inbound register. */
8960 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
8962 /* For each register that may be used for calling a function, this
8963 gives the offset of that register into the block returned by
8964 __builtin_apply_args. 0 indicates that the register is not
8965 used for calling a function. */
8966 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8968 /* Return the offset of register REGNO into the block returned by
8969 __builtin_apply_args. This is not declared static, since it is
8970 needed in objc-act.c. */
8973 apply_args_register_offset (regno)
8978 /* Arguments are always put in outgoing registers (in the argument
8979 block) if such make sense. */
8980 #ifdef OUTGOING_REGNO
8981 regno = OUTGOING_REGNO(regno);
8983 return apply_args_reg_offset[regno];
8986 /* Return the size required for the block returned by __builtin_apply_args,
8987 and initialize apply_args_mode. */
8992 static int size = -1;
8994 enum machine_mode mode;
8996 /* The values computed by this function never change. */
8999 /* The first value is the incoming arg-pointer. */
9000 size = GET_MODE_SIZE (Pmode);
9002 /* The second value is the structure value address unless this is
9003 passed as an "invisible" first argument. */
9004 if (struct_value_rtx)
9005 size += GET_MODE_SIZE (Pmode);
9007 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9008 if (FUNCTION_ARG_REGNO_P (regno))
9010 /* Search for the proper mode for copying this register's
9011 value. I'm not sure this is right, but it works so far. */
9012 enum machine_mode best_mode = VOIDmode;
9014 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9016 mode = GET_MODE_WIDER_MODE (mode))
9017 if (HARD_REGNO_MODE_OK (regno, mode)
9018 && HARD_REGNO_NREGS (regno, mode) == 1)
9021 if (best_mode == VOIDmode)
9022 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9024 mode = GET_MODE_WIDER_MODE (mode))
9025 if (HARD_REGNO_MODE_OK (regno, mode)
9026 && (mov_optab->handlers[(int) mode].insn_code
9027 != CODE_FOR_nothing))
9031 if (mode == VOIDmode)
9034 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9035 if (size % align != 0)
9036 size = CEIL (size, align) * align;
9037 apply_args_reg_offset[regno] = size;
9038 size += GET_MODE_SIZE (mode);
9039 apply_args_mode[regno] = mode;
9043 apply_args_mode[regno] = VOIDmode;
9044 apply_args_reg_offset[regno] = 0;
9050 /* Return the size required for the block returned by __builtin_apply,
9051 and initialize apply_result_mode. */
9054 apply_result_size ()
9056 static int size = -1;
9058 enum machine_mode mode;
9060 /* The values computed by this function never change. */
9065 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9066 if (FUNCTION_VALUE_REGNO_P (regno))
9068 /* Search for the proper mode for copying this register's
9069 value. I'm not sure this is right, but it works so far. */
9070 enum machine_mode best_mode = VOIDmode;
9072 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9074 mode = GET_MODE_WIDER_MODE (mode))
9075 if (HARD_REGNO_MODE_OK (regno, mode))
9078 if (best_mode == VOIDmode)
9079 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9081 mode = GET_MODE_WIDER_MODE (mode))
9082 if (HARD_REGNO_MODE_OK (regno, mode)
9083 && (mov_optab->handlers[(int) mode].insn_code
9084 != CODE_FOR_nothing))
9088 if (mode == VOIDmode)
9091 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9092 if (size % align != 0)
9093 size = CEIL (size, align) * align;
9094 size += GET_MODE_SIZE (mode);
9095 apply_result_mode[regno] = mode;
9098 apply_result_mode[regno] = VOIDmode;
9100 /* Allow targets that use untyped_call and untyped_return to override
9101 the size so that machine-specific information can be stored here. */
9102 #ifdef APPLY_RESULT_SIZE
9103 size = APPLY_RESULT_SIZE;
9109 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9110 /* Create a vector describing the result block RESULT. If SAVEP is true,
9111 the result block is used to save the values; otherwise it is used to
9112 restore the values. */
9115 result_vector (savep, result)
9119 int regno, size, align, nelts;
9120 enum machine_mode mode;
9122 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9125 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9126 if ((mode = apply_result_mode[regno]) != VOIDmode)
9128 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9129 if (size % align != 0)
9130 size = CEIL (size, align) * align;
9131 reg = gen_rtx (REG, mode, savep ? regno : INCOMING_REGNO (regno));
9132 mem = change_address (result, mode,
9133 plus_constant (XEXP (result, 0), size));
9134 savevec[nelts++] = (savep
9135 ? gen_rtx (SET, VOIDmode, mem, reg)
9136 : gen_rtx (SET, VOIDmode, reg, mem));
9137 size += GET_MODE_SIZE (mode);
9139 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
9141 #endif /* HAVE_untyped_call or HAVE_untyped_return */
9143 /* Save the state required to perform an untyped call with the same
9144 arguments as were passed to the current function. */
9147 expand_builtin_apply_args ()
9150 int size, align, regno;
9151 enum machine_mode mode;
9153 /* Create a block where the arg-pointer, structure value address,
9154 and argument registers can be saved. */
9155 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
9157 /* Walk past the arg-pointer and structure value address. */
9158 size = GET_MODE_SIZE (Pmode);
9159 if (struct_value_rtx)
9160 size += GET_MODE_SIZE (Pmode);
9162 /* Save each register used in calling a function to the block. */
9163 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9164 if ((mode = apply_args_mode[regno]) != VOIDmode)
9168 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9169 if (size % align != 0)
9170 size = CEIL (size, align) * align;
9172 tem = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9175 /* For reg-stack.c's stack register household.
9176 Compare with a similar piece of code in function.c. */
9178 emit_insn (gen_rtx (USE, mode, tem));
9181 emit_move_insn (change_address (registers, mode,
9182 plus_constant (XEXP (registers, 0),
9185 size += GET_MODE_SIZE (mode);
9188 /* Save the arg pointer to the block. */
9189 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9190 copy_to_reg (virtual_incoming_args_rtx));
9191 size = GET_MODE_SIZE (Pmode);
9193 /* Save the structure value address unless this is passed as an
9194 "invisible" first argument. */
9195 if (struct_value_incoming_rtx)
9197 emit_move_insn (change_address (registers, Pmode,
9198 plus_constant (XEXP (registers, 0),
9200 copy_to_reg (struct_value_incoming_rtx));
9201 size += GET_MODE_SIZE (Pmode);
9204 /* Return the address of the block. */
9205 return copy_addr_to_reg (XEXP (registers, 0));
9208 /* Perform an untyped call and save the state required to perform an
9209 untyped return of whatever value was returned by the given function. */
9212 expand_builtin_apply (function, arguments, argsize)
9213 rtx function, arguments, argsize;
9215 int size, align, regno;
9216 enum machine_mode mode;
9217 rtx incoming_args, result, reg, dest, call_insn;
9218 rtx old_stack_level = 0;
9219 rtx call_fusage = 0;
9221 /* Create a block where the return registers can be saved. */
9222 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9224 /* ??? The argsize value should be adjusted here. */
9226 /* Fetch the arg pointer from the ARGUMENTS block. */
9227 incoming_args = gen_reg_rtx (Pmode);
9228 emit_move_insn (incoming_args,
9229 gen_rtx (MEM, Pmode, arguments));
9230 #ifndef STACK_GROWS_DOWNWARD
9231 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9232 incoming_args, 0, OPTAB_LIB_WIDEN);
9235 /* Perform postincrements before actually calling the function. */
9238 /* Push a new argument block and copy the arguments. */
9239 do_pending_stack_adjust ();
9240 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
9242 /* Push a block of memory onto the stack to store the memory arguments.
9243 Save the address in a register, and copy the memory arguments. ??? I
9244 haven't figured out how the calling convention macros effect this,
9245 but it's likely that the source and/or destination addresses in
9246 the block copy will need updating in machine specific ways. */
9247 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
9248 emit_block_move (gen_rtx (MEM, BLKmode, dest),
9249 gen_rtx (MEM, BLKmode, incoming_args),
9251 PARM_BOUNDARY / BITS_PER_UNIT);
9253 /* Refer to the argument block. */
9255 arguments = gen_rtx (MEM, BLKmode, arguments);
9257 /* Walk past the arg-pointer and structure value address. */
9258 size = GET_MODE_SIZE (Pmode);
9259 if (struct_value_rtx)
9260 size += GET_MODE_SIZE (Pmode);
9262 /* Restore each of the registers previously saved. Make USE insns
9263 for each of these registers for use in making the call. */
9264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9265 if ((mode = apply_args_mode[regno]) != VOIDmode)
9267 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9268 if (size % align != 0)
9269 size = CEIL (size, align) * align;
9270 reg = gen_rtx (REG, mode, regno);
9271 emit_move_insn (reg,
9272 change_address (arguments, mode,
9273 plus_constant (XEXP (arguments, 0),
9276 use_reg (&call_fusage, reg);
9277 size += GET_MODE_SIZE (mode);
9280 /* Restore the structure value address unless this is passed as an
9281 "invisible" first argument. */
9282 size = GET_MODE_SIZE (Pmode);
9283 if (struct_value_rtx)
9285 rtx value = gen_reg_rtx (Pmode);
9286 emit_move_insn (value,
9287 change_address (arguments, Pmode,
9288 plus_constant (XEXP (arguments, 0),
9290 emit_move_insn (struct_value_rtx, value);
9291 if (GET_CODE (struct_value_rtx) == REG)
9292 use_reg (&call_fusage, struct_value_rtx);
9293 size += GET_MODE_SIZE (Pmode);
9296 /* All arguments and registers used for the call are set up by now! */
9297 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
9299 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9300 and we don't want to load it into a register as an optimization,
9301 because prepare_call_address already did it if it should be done. */
9302 if (GET_CODE (function) != SYMBOL_REF)
9303 function = memory_address (FUNCTION_MODE, function);
9305 /* Generate the actual call instruction and save the return value. */
9306 #ifdef HAVE_untyped_call
9307 if (HAVE_untyped_call)
9308 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
9309 result, result_vector (1, result)));
9312 #ifdef HAVE_call_value
9313 if (HAVE_call_value)
9317 /* Locate the unique return register. It is not possible to
9318 express a call that sets more than one return register using
9319 call_value; use untyped_call for that. In fact, untyped_call
9320 only needs to save the return registers in the given block. */
9321 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9322 if ((mode = apply_result_mode[regno]) != VOIDmode)
9325 abort (); /* HAVE_untyped_call required. */
9326 valreg = gen_rtx (REG, mode, regno);
9329 emit_call_insn (gen_call_value (valreg,
9330 gen_rtx (MEM, FUNCTION_MODE, function),
9331 const0_rtx, NULL_RTX, const0_rtx));
9333 emit_move_insn (change_address (result, GET_MODE (valreg),
9341 /* Find the CALL insn we just emitted. */
9342 for (call_insn = get_last_insn ();
9343 call_insn && GET_CODE (call_insn) != CALL_INSN;
9344 call_insn = PREV_INSN (call_insn))
9350 /* Put the register usage information on the CALL. If there is already
9351 some usage information, put ours at the end. */
9352 if (CALL_INSN_FUNCTION_USAGE (call_insn))
9356 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9357 link = XEXP (link, 1))
9360 XEXP (link, 1) = call_fusage;
9363 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
9365 /* Restore the stack. */
9366 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9368 /* Return the address of the result block. */
9369 return copy_addr_to_reg (XEXP (result, 0));
9372 /* Perform an untyped return. */
9375 expand_builtin_return (result)
9378 int size, align, regno;
9379 enum machine_mode mode;
9381 rtx call_fusage = 0;
9383 apply_result_size ();
9384 result = gen_rtx (MEM, BLKmode, result);
9386 #ifdef HAVE_untyped_return
9387 if (HAVE_untyped_return)
9389 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9395 /* Restore the return value and note that each value is used. */
9397 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9398 if ((mode = apply_result_mode[regno]) != VOIDmode)
9400 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9401 if (size % align != 0)
9402 size = CEIL (size, align) * align;
9403 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
9404 emit_move_insn (reg,
9405 change_address (result, mode,
9406 plus_constant (XEXP (result, 0),
9409 push_to_sequence (call_fusage);
9410 emit_insn (gen_rtx (USE, VOIDmode, reg));
9411 call_fusage = get_insns ();
9413 size += GET_MODE_SIZE (mode);
9416 /* Put the USE insns before the return. */
9417 emit_insns (call_fusage);
9419 /* Return whatever values was restored by jumping directly to the end
9421 expand_null_return ();
9424 /* Expand code for a post- or pre- increment or decrement
9425 and return the RTX for the result.
9426 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9429 expand_increment (exp, post, ignore)
9433 register rtx op0, op1;
9434 register rtx temp, value;
9435 register tree incremented = TREE_OPERAND (exp, 0);
9436 optab this_optab = add_optab;
9438 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9439 int op0_is_copy = 0;
9440 int single_insn = 0;
9441 /* 1 means we can't store into OP0 directly,
9442 because it is a subreg narrower than a word,
9443 and we don't dare clobber the rest of the word. */
9446 if (output_bytecode)
9448 bc_expand_expr (exp);
9452 /* Stabilize any component ref that might need to be
9453 evaluated more than once below. */
9455 || TREE_CODE (incremented) == BIT_FIELD_REF
9456 || (TREE_CODE (incremented) == COMPONENT_REF
9457 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9458 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9459 incremented = stabilize_reference (incremented);
9460 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9461 ones into save exprs so that they don't accidentally get evaluated
9462 more than once by the code below. */
9463 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9464 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9465 incremented = save_expr (incremented);
9467 /* Compute the operands as RTX.
9468 Note whether OP0 is the actual lvalue or a copy of it:
9469 I believe it is a copy iff it is a register or subreg
9470 and insns were generated in computing it. */
9472 temp = get_last_insn ();
9473 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9475 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9476 in place but instead must do sign- or zero-extension during assignment,
9477 so we copy it into a new register and let the code below use it as
9480 Note that we can safely modify this SUBREG since it is know not to be
9481 shared (it was made by the expand_expr call above). */
9483 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9486 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9490 else if (GET_CODE (op0) == SUBREG
9491 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9493 /* We cannot increment this SUBREG in place. If we are
9494 post-incrementing, get a copy of the old value. Otherwise,
9495 just mark that we cannot increment in place. */
9497 op0 = copy_to_reg (op0);
9502 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9503 && temp != get_last_insn ());
9504 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9506 /* Decide whether incrementing or decrementing. */
9507 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9508 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9509 this_optab = sub_optab;
9511 /* Convert decrement by a constant into a negative increment. */
9512 if (this_optab == sub_optab
9513 && GET_CODE (op1) == CONST_INT)
9515 op1 = GEN_INT (- INTVAL (op1));
9516 this_optab = add_optab;
9519 /* For a preincrement, see if we can do this with a single instruction. */
9522 icode = (int) this_optab->handlers[(int) mode].insn_code;
9523 if (icode != (int) CODE_FOR_nothing
9524 /* Make sure that OP0 is valid for operands 0 and 1
9525 of the insn we want to queue. */
9526 && (*insn_operand_predicate[icode][0]) (op0, mode)
9527 && (*insn_operand_predicate[icode][1]) (op0, mode)
9528 && (*insn_operand_predicate[icode][2]) (op1, mode))
9532 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9533 then we cannot just increment OP0. We must therefore contrive to
9534 increment the original value. Then, for postincrement, we can return
9535 OP0 since it is a copy of the old value. For preincrement, expand here
9536 unless we can do it with a single insn.
9538 Likewise if storing directly into OP0 would clobber high bits
9539 we need to preserve (bad_subreg). */
9540 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9542 /* This is the easiest way to increment the value wherever it is.
9543 Problems with multiple evaluation of INCREMENTED are prevented
9544 because either (1) it is a component_ref or preincrement,
9545 in which case it was stabilized above, or (2) it is an array_ref
9546 with constant index in an array in a register, which is
9547 safe to reevaluate. */
9548 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9549 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9550 ? MINUS_EXPR : PLUS_EXPR),
9553 TREE_OPERAND (exp, 1));
9555 while (TREE_CODE (incremented) == NOP_EXPR
9556 || TREE_CODE (incremented) == CONVERT_EXPR)
9558 newexp = convert (TREE_TYPE (incremented), newexp);
9559 incremented = TREE_OPERAND (incremented, 0);
9562 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9563 return post ? op0 : temp;
9568 /* We have a true reference to the value in OP0.
9569 If there is an insn to add or subtract in this mode, queue it.
9570 Queueing the increment insn avoids the register shuffling
9571 that often results if we must increment now and first save
9572 the old value for subsequent use. */
9574 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9575 op0 = stabilize (op0);
9578 icode = (int) this_optab->handlers[(int) mode].insn_code;
9579 if (icode != (int) CODE_FOR_nothing
9580 /* Make sure that OP0 is valid for operands 0 and 1
9581 of the insn we want to queue. */
9582 && (*insn_operand_predicate[icode][0]) (op0, mode)
9583 && (*insn_operand_predicate[icode][1]) (op0, mode))
9585 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9586 op1 = force_reg (mode, op1);
9588 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9592 /* Preincrement, or we can't increment with one simple insn. */
9594 /* Save a copy of the value before inc or dec, to return it later. */
9595 temp = value = copy_to_reg (op0);
9597 /* Arrange to return the incremented value. */
9598 /* Copy the rtx because expand_binop will protect from the queue,
9599 and the results of that would be invalid for us to return
9600 if our caller does emit_queue before using our result. */
9601 temp = copy_rtx (value = op0);
9603 /* Increment however we can. */
9604 op1 = expand_binop (mode, this_optab, value, op1, op0,
9605 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9606 /* Make sure the value is stored into OP0. */
9608 emit_move_insn (op0, op1);
9613 /* Expand all function calls contained within EXP, innermost ones first.
9614 But don't look within expressions that have sequence points.
9615 For each CALL_EXPR, record the rtx for its value
9616 in the CALL_EXPR_RTL field. */
9619 preexpand_calls (exp)
9622 register int nops, i;
9623 int type = TREE_CODE_CLASS (TREE_CODE (exp));
9625 if (! do_preexpand_calls)
9628 /* Only expressions and references can contain calls. */
9630 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9633 switch (TREE_CODE (exp))
9636 /* Do nothing if already expanded. */
9637 if (CALL_EXPR_RTL (exp) != 0
9638 /* Do nothing if the call returns a variable-sized object. */
9639 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9640 /* Do nothing to built-in functions. */
9641 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9642 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9644 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9647 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9652 case TRUTH_ANDIF_EXPR:
9653 case TRUTH_ORIF_EXPR:
9654 /* If we find one of these, then we can be sure
9655 the adjust will be done for it (since it makes jumps).
9656 Do it now, so that if this is inside an argument
9657 of a function, we don't get the stack adjustment
9658 after some other args have already been pushed. */
9659 do_pending_stack_adjust ();
9664 case WITH_CLEANUP_EXPR:
9665 case CLEANUP_POINT_EXPR:
9669 if (SAVE_EXPR_RTL (exp) != 0)
9673 nops = tree_code_length[(int) TREE_CODE (exp)];
9674 for (i = 0; i < nops; i++)
9675 if (TREE_OPERAND (exp, i) != 0)
9677 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9678 if (type == 'e' || type == '<' || type == '1' || type == '2'
9680 preexpand_calls (TREE_OPERAND (exp, i));
9684 /* At the start of a function, record that we have no previously-pushed
9685 arguments waiting to be popped. */
9688 init_pending_stack_adjust ()
9690 pending_stack_adjust = 0;
9693 /* When exiting from function, if safe, clear out any pending stack adjust
9694 so the adjustment won't get done. */
9697 clear_pending_stack_adjust ()
9699 #ifdef EXIT_IGNORE_STACK
9701 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9702 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9703 && ! flag_inline_functions)
9704 pending_stack_adjust = 0;
9708 /* Pop any previously-pushed arguments that have not been popped yet. */
9711 do_pending_stack_adjust ()
9713 if (inhibit_defer_pop == 0)
9715 if (pending_stack_adjust != 0)
9716 adjust_stack (GEN_INT (pending_stack_adjust));
9717 pending_stack_adjust = 0;
9721 /* Defer the expansion all cleanups up to OLD_CLEANUPS.
9722 Returns the cleanups to be performed. */
9725 defer_cleanups_to (old_cleanups)
9728 tree new_cleanups = NULL_TREE;
9729 tree cleanups = cleanups_this_call;
9730 tree last = NULL_TREE;
9732 while (cleanups_this_call != old_cleanups)
9734 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9735 last = cleanups_this_call;
9736 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9741 /* Remove the list from the chain of cleanups. */
9742 TREE_CHAIN (last) = NULL_TREE;
9744 /* reverse them so that we can build them in the right order. */
9745 cleanups = nreverse (cleanups);
9747 /* All cleanups must be on the function_obstack. */
9748 push_obstacks_nochange ();
9749 resume_temporary_allocation ();
9754 new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
9755 TREE_VALUE (cleanups), new_cleanups);
9757 new_cleanups = TREE_VALUE (cleanups);
9759 cleanups = TREE_CHAIN (cleanups);
9765 return new_cleanups;
9768 /* Expand all cleanups up to OLD_CLEANUPS.
9769 Needed here, and also for language-dependent calls. */
9772 expand_cleanups_to (old_cleanups)
9775 while (cleanups_this_call != old_cleanups)
9777 expand_eh_region_end (TREE_VALUE (cleanups_this_call));
9778 expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
9779 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
9783 /* Expand conditional expressions. */
9785 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9786 LABEL is an rtx of code CODE_LABEL, in this function and all the
9790 jumpifnot (exp, label)
9794 do_jump (exp, label, NULL_RTX);
9797 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9804 do_jump (exp, NULL_RTX, label);
9807 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9808 the result is zero, or IF_TRUE_LABEL if the result is one.
9809 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9810 meaning fall through in that case.
9812 do_jump always does any pending stack adjust except when it does not
9813 actually perform a jump. An example where there is no jump
9814 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9816 This function is responsible for optimizing cases such as
9817 &&, || and comparison operators in EXP. */
9820 do_jump (exp, if_false_label, if_true_label)
9822 rtx if_false_label, if_true_label;
9824 register enum tree_code code = TREE_CODE (exp);
9825 /* Some cases need to create a label to jump to
9826 in order to properly fall through.
9827 These cases set DROP_THROUGH_LABEL nonzero. */
9828 rtx drop_through_label = 0;
9833 enum machine_mode mode;
9843 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9849 /* This is not true with #pragma weak */
9851 /* The address of something can never be zero. */
9853 emit_jump (if_true_label);
9858 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9859 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9860 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9863 /* If we are narrowing the operand, we have to do the compare in the
9865 if ((TYPE_PRECISION (TREE_TYPE (exp))
9866 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9868 case NON_LVALUE_EXPR:
9869 case REFERENCE_EXPR:
9874 /* These cannot change zero->non-zero or vice versa. */
9875 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9879 /* This is never less insns than evaluating the PLUS_EXPR followed by
9880 a test and can be longer if the test is eliminated. */
9882 /* Reduce to minus. */
9883 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9884 TREE_OPERAND (exp, 0),
9885 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9886 TREE_OPERAND (exp, 1))));
9887 /* Process as MINUS. */
9891 /* Non-zero iff operands of minus differ. */
9892 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9893 TREE_OPERAND (exp, 0),
9894 TREE_OPERAND (exp, 1)),
9899 /* If we are AND'ing with a small constant, do this comparison in the
9900 smallest type that fits. If the machine doesn't have comparisons
9901 that small, it will be converted back to the wider comparison.
9902 This helps if we are testing the sign bit of a narrower object.
9903 combine can't do this for us because it can't know whether a
9904 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9906 if (! SLOW_BYTE_ACCESS
9907 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9908 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9909 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9910 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9911 && (type = type_for_mode (mode, 1)) != 0
9912 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9913 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9914 != CODE_FOR_nothing))
9916 do_jump (convert (type, exp), if_false_label, if_true_label);
9921 case TRUTH_NOT_EXPR:
9922 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9925 case TRUTH_ANDIF_EXPR:
9928 tree cleanups, old_cleanups;
9930 if (if_false_label == 0)
9931 if_false_label = drop_through_label = gen_label_rtx ();
9933 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9934 seq1 = get_insns ();
9937 old_cleanups = cleanups_this_call;
9939 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9940 seq2 = get_insns ();
9941 cleanups = defer_cleanups_to (old_cleanups);
9946 rtx flag = gen_reg_rtx (word_mode);
9950 /* Flag cleanups as not needed. */
9951 emit_move_insn (flag, const0_rtx);
9954 /* Flag cleanups as needed. */
9955 emit_move_insn (flag, const1_rtx);
9958 /* All cleanups must be on the function_obstack. */
9959 push_obstacks_nochange ();
9960 resume_temporary_allocation ();
9962 /* convert flag, which is an rtx, into a tree. */
9963 cond = make_node (RTL_EXPR);
9964 TREE_TYPE (cond) = integer_type_node;
9965 RTL_EXPR_RTL (cond) = flag;
9966 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
9967 cond = save_expr (cond);
9969 new_cleanups = build (COND_EXPR, void_type_node,
9970 truthvalue_conversion (cond),
9971 cleanups, integer_zero_node);
9972 new_cleanups = fold (new_cleanups);
9976 /* Now add in the conditionalized cleanups. */
9978 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
9979 expand_eh_region_start ();
9989 case TRUTH_ORIF_EXPR:
9992 tree cleanups, old_cleanups;
9994 if (if_true_label == 0)
9995 if_true_label = drop_through_label = gen_label_rtx ();
9997 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9998 seq1 = get_insns ();
10001 old_cleanups = cleanups_this_call;
10003 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10004 seq2 = get_insns ();
10005 cleanups = defer_cleanups_to (old_cleanups);
10010 rtx flag = gen_reg_rtx (word_mode);
10014 /* Flag cleanups as not needed. */
10015 emit_move_insn (flag, const0_rtx);
10018 /* Flag cleanups as needed. */
10019 emit_move_insn (flag, const1_rtx);
10022 /* All cleanups must be on the function_obstack. */
10023 push_obstacks_nochange ();
10024 resume_temporary_allocation ();
10026 /* convert flag, which is an rtx, into a tree. */
10027 cond = make_node (RTL_EXPR);
10028 TREE_TYPE (cond) = integer_type_node;
10029 RTL_EXPR_RTL (cond) = flag;
10030 RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
10031 cond = save_expr (cond);
10033 new_cleanups = build (COND_EXPR, void_type_node,
10034 truthvalue_conversion (cond),
10035 cleanups, integer_zero_node);
10036 new_cleanups = fold (new_cleanups);
10040 /* Now add in the conditionalized cleanups. */
10042 = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
10043 expand_eh_region_start ();
10053 case COMPOUND_EXPR:
10054 push_temp_slots ();
10055 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10056 preserve_temp_slots (NULL_RTX);
10057 free_temp_slots ();
10060 do_pending_stack_adjust ();
10061 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10064 case COMPONENT_REF:
10065 case BIT_FIELD_REF:
10068 int bitsize, bitpos, unsignedp;
10069 enum machine_mode mode;
10074 /* Get description of this reference. We don't actually care
10075 about the underlying object here. */
10076 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10077 &mode, &unsignedp, &volatilep);
10079 type = type_for_size (bitsize, unsignedp);
10080 if (! SLOW_BYTE_ACCESS
10081 && type != 0 && bitsize >= 0
10082 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10083 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10084 != CODE_FOR_nothing))
10086 do_jump (convert (type, exp), if_false_label, if_true_label);
10093 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10094 if (integer_onep (TREE_OPERAND (exp, 1))
10095 && integer_zerop (TREE_OPERAND (exp, 2)))
10096 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10098 else if (integer_zerop (TREE_OPERAND (exp, 1))
10099 && integer_onep (TREE_OPERAND (exp, 2)))
10100 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10104 register rtx label1 = gen_label_rtx ();
10105 drop_through_label = gen_label_rtx ();
10106 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
10107 /* Now the THEN-expression. */
10108 do_jump (TREE_OPERAND (exp, 1),
10109 if_false_label ? if_false_label : drop_through_label,
10110 if_true_label ? if_true_label : drop_through_label);
10111 /* In case the do_jump just above never jumps. */
10112 do_pending_stack_adjust ();
10113 emit_label (label1);
10114 /* Now the ELSE-expression. */
10115 do_jump (TREE_OPERAND (exp, 2),
10116 if_false_label ? if_false_label : drop_through_label,
10117 if_true_label ? if_true_label : drop_through_label);
10123 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10125 if (integer_zerop (TREE_OPERAND (exp, 1)))
10126 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10127 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10128 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10131 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10132 fold (build (EQ_EXPR, TREE_TYPE (exp),
10133 fold (build1 (REALPART_EXPR,
10134 TREE_TYPE (inner_type),
10135 TREE_OPERAND (exp, 0))),
10136 fold (build1 (REALPART_EXPR,
10137 TREE_TYPE (inner_type),
10138 TREE_OPERAND (exp, 1))))),
10139 fold (build (EQ_EXPR, TREE_TYPE (exp),
10140 fold (build1 (IMAGPART_EXPR,
10141 TREE_TYPE (inner_type),
10142 TREE_OPERAND (exp, 0))),
10143 fold (build1 (IMAGPART_EXPR,
10144 TREE_TYPE (inner_type),
10145 TREE_OPERAND (exp, 1))))))),
10146 if_false_label, if_true_label);
10147 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10148 && !can_compare_p (TYPE_MODE (inner_type)))
10149 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10151 comparison = compare (exp, EQ, EQ);
10157 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
10159 if (integer_zerop (TREE_OPERAND (exp, 1)))
10160 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10161 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10162 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
10165 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10166 fold (build (NE_EXPR, TREE_TYPE (exp),
10167 fold (build1 (REALPART_EXPR,
10168 TREE_TYPE (inner_type),
10169 TREE_OPERAND (exp, 0))),
10170 fold (build1 (REALPART_EXPR,
10171 TREE_TYPE (inner_type),
10172 TREE_OPERAND (exp, 1))))),
10173 fold (build (NE_EXPR, TREE_TYPE (exp),
10174 fold (build1 (IMAGPART_EXPR,
10175 TREE_TYPE (inner_type),
10176 TREE_OPERAND (exp, 0))),
10177 fold (build1 (IMAGPART_EXPR,
10178 TREE_TYPE (inner_type),
10179 TREE_OPERAND (exp, 1))))))),
10180 if_false_label, if_true_label);
10181 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10182 && !can_compare_p (TYPE_MODE (inner_type)))
10183 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10185 comparison = compare (exp, NE, NE);
10190 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10192 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10193 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10195 comparison = compare (exp, LT, LTU);
10199 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10201 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10202 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10204 comparison = compare (exp, LE, LEU);
10208 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10210 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10211 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10213 comparison = compare (exp, GT, GTU);
10217 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10219 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10220 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10222 comparison = compare (exp, GE, GEU);
10227 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10229 /* This is not needed any more and causes poor code since it causes
10230 comparisons and tests from non-SI objects to have different code
10232 /* Copy to register to avoid generating bad insns by cse
10233 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10234 if (!cse_not_expected && GET_CODE (temp) == MEM)
10235 temp = copy_to_reg (temp);
10237 do_pending_stack_adjust ();
10238 if (GET_CODE (temp) == CONST_INT)
10239 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10240 else if (GET_CODE (temp) == LABEL_REF)
10241 comparison = const_true_rtx;
10242 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10243 && !can_compare_p (GET_MODE (temp)))
10244 /* Note swapping the labels gives us not-equal. */
10245 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10246 else if (GET_MODE (temp) != VOIDmode)
10247 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10248 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10249 GET_MODE (temp), NULL_RTX, 0);
10254 /* Do any postincrements in the expression that was tested. */
10257 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10258 straight into a conditional jump instruction as the jump condition.
10259 Otherwise, all the work has been done already. */
10261 if (comparison == const_true_rtx)
10264 emit_jump (if_true_label);
10266 else if (comparison == const0_rtx)
10268 if (if_false_label)
10269 emit_jump (if_false_label);
10271 else if (comparison)
10272 do_jump_for_compare (comparison, if_false_label, if_true_label);
10274 if (drop_through_label)
10276 /* If do_jump produces code that might be jumped around,
10277 do any stack adjusts from that code, before the place
10278 where control merges in. */
10279 do_pending_stack_adjust ();
10280 emit_label (drop_through_label);
10284 /* Given a comparison expression EXP for values too wide to be compared
10285 with one insn, test the comparison and jump to the appropriate label.
10286 The code of EXP is ignored; we always test GT if SWAP is 0,
10287 and LT if SWAP is 1. */
10290 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10293 rtx if_false_label, if_true_label;
10295 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10296 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10297 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10298 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10299 rtx drop_through_label = 0;
10300 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10303 if (! if_true_label || ! if_false_label)
10304 drop_through_label = gen_label_rtx ();
10305 if (! if_true_label)
10306 if_true_label = drop_through_label;
10307 if (! if_false_label)
10308 if_false_label = drop_through_label;
10310 /* Compare a word at a time, high order first. */
10311 for (i = 0; i < nwords; i++)
10314 rtx op0_word, op1_word;
10316 if (WORDS_BIG_ENDIAN)
10318 op0_word = operand_subword_force (op0, i, mode);
10319 op1_word = operand_subword_force (op1, i, mode);
10323 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10324 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10327 /* All but high-order word must be compared as unsigned. */
10328 comp = compare_from_rtx (op0_word, op1_word,
10329 (unsignedp || i > 0) ? GTU : GT,
10330 unsignedp, word_mode, NULL_RTX, 0);
10331 if (comp == const_true_rtx)
10332 emit_jump (if_true_label);
10333 else if (comp != const0_rtx)
10334 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10336 /* Consider lower words only if these are equal. */
10337 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10339 if (comp == const_true_rtx)
10340 emit_jump (if_false_label);
10341 else if (comp != const0_rtx)
10342 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10345 if (if_false_label)
10346 emit_jump (if_false_label);
10347 if (drop_through_label)
10348 emit_label (drop_through_label);
10351 /* Compare OP0 with OP1, word at a time, in mode MODE.
10352 UNSIGNEDP says to do unsigned comparison.
10353 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10356 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10357 enum machine_mode mode;
10360 rtx if_false_label, if_true_label;
10362 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10363 rtx drop_through_label = 0;
10366 if (! if_true_label || ! if_false_label)
10367 drop_through_label = gen_label_rtx ();
10368 if (! if_true_label)
10369 if_true_label = drop_through_label;
10370 if (! if_false_label)
10371 if_false_label = drop_through_label;
10373 /* Compare a word at a time, high order first. */
10374 for (i = 0; i < nwords; i++)
10377 rtx op0_word, op1_word;
10379 if (WORDS_BIG_ENDIAN)
10381 op0_word = operand_subword_force (op0, i, mode);
10382 op1_word = operand_subword_force (op1, i, mode);
10386 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10387 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10390 /* All but high-order word must be compared as unsigned. */
10391 comp = compare_from_rtx (op0_word, op1_word,
10392 (unsignedp || i > 0) ? GTU : GT,
10393 unsignedp, word_mode, NULL_RTX, 0);
10394 if (comp == const_true_rtx)
10395 emit_jump (if_true_label);
10396 else if (comp != const0_rtx)
10397 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10399 /* Consider lower words only if these are equal. */
10400 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10402 if (comp == const_true_rtx)
10403 emit_jump (if_false_label);
10404 else if (comp != const0_rtx)
10405 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10408 if (if_false_label)
10409 emit_jump (if_false_label);
10410 if (drop_through_label)
10411 emit_label (drop_through_label);
10414 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10415 with one insn, test the comparison and jump to the appropriate label. */
10418 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10420 rtx if_false_label, if_true_label;
10422 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10423 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10425 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10427 rtx drop_through_label = 0;
10429 if (! if_false_label)
10430 drop_through_label = if_false_label = gen_label_rtx ();
10432 for (i = 0; i < nwords; i++)
10434 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10435 operand_subword_force (op1, i, mode),
10436 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10437 word_mode, NULL_RTX, 0);
10438 if (comp == const_true_rtx)
10439 emit_jump (if_false_label);
10440 else if (comp != const0_rtx)
10441 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10445 emit_jump (if_true_label);
10446 if (drop_through_label)
10447 emit_label (drop_through_label);
10450 /* Jump according to whether OP0 is 0.
10451 We assume that OP0 has an integer mode that is too wide
10452 for the available compare insns. */
10455 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10457 rtx if_false_label, if_true_label;
10459 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10461 rtx drop_through_label = 0;
10463 if (! if_false_label)
10464 drop_through_label = if_false_label = gen_label_rtx ();
10466 for (i = 0; i < nwords; i++)
10468 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10470 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10471 if (comp == const_true_rtx)
10472 emit_jump (if_false_label);
10473 else if (comp != const0_rtx)
10474 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10478 emit_jump (if_true_label);
10479 if (drop_through_label)
10480 emit_label (drop_through_label);
10483 /* Given a comparison expression in rtl form, output conditional branches to
10484 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
10487 do_jump_for_compare (comparison, if_false_label, if_true_label)
10488 rtx comparison, if_false_label, if_true_label;
10492 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10493 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10497 if (if_false_label)
10498 emit_jump (if_false_label);
10500 else if (if_false_label)
10503 rtx prev = get_last_insn ();
10506 /* Output the branch with the opposite condition. Then try to invert
10507 what is generated. If more than one insn is a branch, or if the
10508 branch is not the last insn written, abort. If we can't invert
10509 the branch, emit make a true label, redirect this jump to that,
10510 emit a jump to the false label and define the true label. */
10512 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10513 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10517 /* Here we get the first insn that was just emitted. It used to be the
10518 case that, on some machines, emitting the branch would discard
10519 the previous compare insn and emit a replacement. This isn't
10520 done anymore, but abort if we see that PREV is deleted. */
10523 insn = get_insns ();
10524 else if (INSN_DELETED_P (prev))
10527 insn = NEXT_INSN (prev);
10529 for (; insn; insn = NEXT_INSN (insn))
10530 if (GET_CODE (insn) == JUMP_INSN)
10537 if (branch != get_last_insn ())
10540 JUMP_LABEL (branch) = if_false_label;
10541 if (! invert_jump (branch, if_false_label))
10543 if_true_label = gen_label_rtx ();
10544 redirect_jump (branch, if_true_label);
10545 emit_jump (if_false_label);
10546 emit_label (if_true_label);
10551 /* Generate code for a comparison expression EXP
10552 (including code to compute the values to be compared)
10553 and set (CC0) according to the result.
10554 SIGNED_CODE should be the rtx operation for this comparison for
10555 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10557 We force a stack adjustment unless there are currently
10558 things pushed on the stack that aren't yet used. */
10561 compare (exp, signed_code, unsigned_code)
10563 enum rtx_code signed_code, unsigned_code;
10566 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10568 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10569 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10570 register enum machine_mode mode = TYPE_MODE (type);
10571 int unsignedp = TREE_UNSIGNED (type);
10572 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
10574 #ifdef HAVE_canonicalize_funcptr_for_compare
10575 /* If function pointers need to be "canonicalized" before they can
10576 be reliably compared, then canonicalize them. */
10577 if (HAVE_canonicalize_funcptr_for_compare
10578 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10579 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10582 rtx new_op0 = gen_reg_rtx (mode);
10584 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10588 if (HAVE_canonicalize_funcptr_for_compare
10589 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10590 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10593 rtx new_op1 = gen_reg_rtx (mode);
10595 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10600 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10602 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10603 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
10606 /* Like compare but expects the values to compare as two rtx's.
10607 The decision as to signed or unsigned comparison must be made by the caller.
10609 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10612 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10613 size of MODE should be used. */
10616 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10617 register rtx op0, op1;
10618 enum rtx_code code;
10620 enum machine_mode mode;
10626 /* If one operand is constant, make it the second one. Only do this
10627 if the other operand is not constant as well. */
10629 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10630 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
10635 code = swap_condition (code);
10638 if (flag_force_mem)
10640 op0 = force_not_mem (op0);
10641 op1 = force_not_mem (op1);
10644 do_pending_stack_adjust ();
10646 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10647 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10651 /* There's no need to do this now that combine.c can eliminate lots of
10652 sign extensions. This can be less efficient in certain cases on other
10655 /* If this is a signed equality comparison, we can do it as an
10656 unsigned comparison since zero-extension is cheaper than sign
10657 extension and comparisons with zero are done as unsigned. This is
10658 the case even on machines that can do fast sign extension, since
10659 zero-extension is easier to combine with other operations than
10660 sign-extension is. If we are comparing against a constant, we must
10661 convert it to what it would look like unsigned. */
10662 if ((code == EQ || code == NE) && ! unsignedp
10663 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10665 if (GET_CODE (op1) == CONST_INT
10666 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10667 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10672 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10674 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
10677 /* Generate code to calculate EXP using a store-flag instruction
10678 and return an rtx for the result. EXP is either a comparison
10679 or a TRUTH_NOT_EXPR whose operand is a comparison.
10681 If TARGET is nonzero, store the result there if convenient.
10683 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10686 Return zero if there is no suitable set-flag instruction
10687 available on this machine.
10689 Once expand_expr has been called on the arguments of the comparison,
10690 we are committed to doing the store flag, since it is not safe to
10691 re-evaluate the expression. We emit the store-flag insn by calling
10692 emit_store_flag, but only expand the arguments if we have a reason
10693 to believe that emit_store_flag will be successful. If we think that
10694 it will, but it isn't, we have to simulate the store-flag with a
10695 set/jump/set sequence. */
10698 do_store_flag (exp, target, mode, only_cheap)
10701 enum machine_mode mode;
10704 enum rtx_code code;
10705 tree arg0, arg1, type;
10707 enum machine_mode operand_mode;
10711 enum insn_code icode;
10712 rtx subtarget = target;
10713 rtx result, label, pattern, jump_pat;
10715 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10716 result at the end. We can't simply invert the test since it would
10717 have already been inverted if it were valid. This case occurs for
10718 some floating-point comparisons. */
10720 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10721 invert = 1, exp = TREE_OPERAND (exp, 0);
10723 arg0 = TREE_OPERAND (exp, 0);
10724 arg1 = TREE_OPERAND (exp, 1);
10725 type = TREE_TYPE (arg0);
10726 operand_mode = TYPE_MODE (type);
10727 unsignedp = TREE_UNSIGNED (type);
10729 /* We won't bother with BLKmode store-flag operations because it would mean
10730 passing a lot of information to emit_store_flag. */
10731 if (operand_mode == BLKmode)
10734 /* We won't bother with store-flag operations involving function pointers
10735 when function pointers must be canonicalized before comparisons. */
10736 #ifdef HAVE_canonicalize_funcptr_for_compare
10737 if (HAVE_canonicalize_funcptr_for_compare
10738 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10739 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10741 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10742 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10743 == FUNCTION_TYPE))))
10750 /* Get the rtx comparison code to use. We know that EXP is a comparison
10751 operation of some type. Some comparisons against 1 and -1 can be
10752 converted to comparisons with zero. Do so here so that the tests
10753 below will be aware that we have a comparison with zero. These
10754 tests will not catch constants in the first operand, but constants
10755 are rarely passed as the first operand. */
10757 switch (TREE_CODE (exp))
10766 if (integer_onep (arg1))
10767 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10769 code = unsignedp ? LTU : LT;
10772 if (! unsignedp && integer_all_onesp (arg1))
10773 arg1 = integer_zero_node, code = LT;
10775 code = unsignedp ? LEU : LE;
10778 if (! unsignedp && integer_all_onesp (arg1))
10779 arg1 = integer_zero_node, code = GE;
10781 code = unsignedp ? GTU : GT;
10784 if (integer_onep (arg1))
10785 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10787 code = unsignedp ? GEU : GE;
10793 /* Put a constant second. */
10794 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10796 tem = arg0; arg0 = arg1; arg1 = tem;
10797 code = swap_condition (code);
10800 /* If this is an equality or inequality test of a single bit, we can
10801 do this by shifting the bit being tested to the low-order bit and
10802 masking the result with the constant 1. If the condition was EQ,
10803 we xor it with 1. This does not require an scc insn and is faster
10804 than an scc insn even if we have it. */
10806 if ((code == NE || code == EQ)
10807 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10808 && integer_pow2p (TREE_OPERAND (arg0, 1))
10809 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
10811 tree inner = TREE_OPERAND (arg0, 0);
10816 tem = INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
10817 NULL_RTX, VOIDmode, 0));
10818 /* In this case, immed_double_const will sign extend the value to make
10819 it look the same on the host and target. We must remove the
10820 sign-extension before calling exact_log2, since exact_log2 will
10821 fail for negative values. */
10822 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
10823 && BITS_PER_WORD == GET_MODE_BITSIZE (TYPE_MODE (type)))
10824 /* We don't use the obvious constant shift to generate the mask,
10825 because that generates compiler warnings when BITS_PER_WORD is
10826 greater than or equal to HOST_BITS_PER_WIDE_INT, even though this
10827 code is unreachable in that case. */
10828 tem = tem & GET_MODE_MASK (word_mode);
10829 bitnum = exact_log2 (tem);
10831 /* If INNER is a right shift of a constant and it plus BITNUM does
10832 not overflow, adjust BITNUM and INNER. */
10834 if (TREE_CODE (inner) == RSHIFT_EXPR
10835 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10836 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10837 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10838 < TYPE_PRECISION (type)))
10840 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10841 inner = TREE_OPERAND (inner, 0);
10844 /* If we are going to be able to omit the AND below, we must do our
10845 operations as unsigned. If we must use the AND, we have a choice.
10846 Normally unsigned is faster, but for some machines signed is. */
10847 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10848 #ifdef LOAD_EXTEND_OP
10849 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10855 if (subtarget == 0 || GET_CODE (subtarget) != REG
10856 || GET_MODE (subtarget) != operand_mode
10857 || ! safe_from_p (subtarget, inner))
10860 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10863 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10864 size_int (bitnum), subtarget, ops_unsignedp);
10866 if (GET_MODE (op0) != mode)
10867 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10869 if ((code == EQ && ! invert) || (code == NE && invert))
10870 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10871 ops_unsignedp, OPTAB_LIB_WIDEN);
10873 /* Put the AND last so it can combine with more things. */
10874 if (bitnum != TYPE_PRECISION (type) - 1)
10875 op0 = expand_and (op0, const1_rtx, subtarget);
10880 /* Now see if we are likely to be able to do this. Return if not. */
10881 if (! can_compare_p (operand_mode))
10883 icode = setcc_gen_code[(int) code];
10884 if (icode == CODE_FOR_nothing
10885 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
10887 /* We can only do this if it is one of the special cases that
10888 can be handled without an scc insn. */
10889 if ((code == LT && integer_zerop (arg1))
10890 || (! only_cheap && code == GE && integer_zerop (arg1)))
10892 else if (BRANCH_COST >= 0
10893 && ! only_cheap && (code == NE || code == EQ)
10894 && TREE_CODE (type) != REAL_TYPE
10895 && ((abs_optab->handlers[(int) operand_mode].insn_code
10896 != CODE_FOR_nothing)
10897 || (ffs_optab->handlers[(int) operand_mode].insn_code
10898 != CODE_FOR_nothing)))
10904 preexpand_calls (exp);
10905 if (subtarget == 0 || GET_CODE (subtarget) != REG
10906 || GET_MODE (subtarget) != operand_mode
10907 || ! safe_from_p (subtarget, arg1))
10910 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10911 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10914 target = gen_reg_rtx (mode);
10916 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10917 because, if the emit_store_flag does anything it will succeed and
10918 OP0 and OP1 will not be used subsequently. */
10920 result = emit_store_flag (target, code,
10921 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10922 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10923 operand_mode, unsignedp, 1);
10928 result = expand_binop (mode, xor_optab, result, const1_rtx,
10929 result, 0, OPTAB_LIB_WIDEN);
10933 /* If this failed, we have to do this with set/compare/jump/set code. */
10934 if (target == 0 || GET_CODE (target) != REG
10935 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10936 target = gen_reg_rtx (GET_MODE (target));
10938 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10939 result = compare_from_rtx (op0, op1, code, unsignedp,
10940 operand_mode, NULL_RTX, 0);
10941 if (GET_CODE (result) == CONST_INT)
10942 return (((result == const0_rtx && ! invert)
10943 || (result != const0_rtx && invert))
10944 ? const0_rtx : const1_rtx);
10946 label = gen_label_rtx ();
10947 if (bcc_gen_fctn[(int) code] == 0)
10950 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10951 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10952 emit_label (label);
10957 /* Generate a tablejump instruction (used for switch statements). */
10959 #ifdef HAVE_tablejump
10961 /* INDEX is the value being switched on, with the lowest value
10962 in the table already subtracted.
10963 MODE is its expected mode (needed if INDEX is constant).
10964 RANGE is the length of the jump table.
10965 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10967 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10968 index value is out of range. */
10971 do_tablejump (index, mode, range, table_label, default_label)
10972 rtx index, range, table_label, default_label;
10973 enum machine_mode mode;
10975 register rtx temp, vector;
10977 /* Do an unsigned comparison (in the proper mode) between the index
10978 expression and the value which represents the length of the range.
10979 Since we just finished subtracting the lower bound of the range
10980 from the index expression, this comparison allows us to simultaneously
10981 check that the original index expression value is both greater than
10982 or equal to the minimum value of the range and less than or equal to
10983 the maximum value of the range. */
10985 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10986 emit_jump_insn (gen_bgtu (default_label));
10988 /* If index is in range, it must fit in Pmode.
10989 Convert to Pmode so we can index with it. */
10991 index = convert_to_mode (Pmode, index, 1);
10993 /* Don't let a MEM slip thru, because then INDEX that comes
10994 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10995 and break_out_memory_refs will go to work on it and mess it up. */
10996 #ifdef PIC_CASE_VECTOR_ADDRESS
10997 if (flag_pic && GET_CODE (index) != REG)
10998 index = copy_to_mode_reg (Pmode, index);
11001 /* If flag_force_addr were to affect this address
11002 it could interfere with the tricky assumptions made
11003 about addresses that contain label-refs,
11004 which may be valid only very near the tablejump itself. */
11005 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11006 GET_MODE_SIZE, because this indicates how large insns are. The other
11007 uses should all be Pmode, because they are addresses. This code
11008 could fail if addresses and insns are not the same size. */
11009 index = gen_rtx (PLUS, Pmode,
11010 gen_rtx (MULT, Pmode, index,
11011 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11012 gen_rtx (LABEL_REF, Pmode, table_label));
11013 #ifdef PIC_CASE_VECTOR_ADDRESS
11015 index = PIC_CASE_VECTOR_ADDRESS (index);
11018 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11019 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11020 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
11021 RTX_UNCHANGING_P (vector) = 1;
11022 convert_move (temp, vector, 0);
11024 emit_jump_insn (gen_tablejump (temp, table_label));
11026 #ifndef CASE_VECTOR_PC_RELATIVE
11027 /* If we are generating PIC code or if the table is PC-relative, the
11028 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11034 #endif /* HAVE_tablejump */
11037 /* Emit a suitable bytecode to load a value from memory, assuming a pointer
11038 to that value is on the top of the stack. The resulting type is TYPE, and
11039 the source declaration is DECL. */
11042 bc_load_memory (type, decl)
11045 enum bytecode_opcode opcode;
11048 /* Bit fields are special. We only know about signed and
11049 unsigned ints, and enums. The latter are treated as
11050 signed integers. */
11052 if (DECL_BIT_FIELD (decl))
11053 if (TREE_CODE (type) == ENUMERAL_TYPE
11054 || TREE_CODE (type) == INTEGER_TYPE)
11055 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
11059 /* See corresponding comment in bc_store_memory(). */
11060 if (TYPE_MODE (type) == BLKmode
11061 || TYPE_MODE (type) == VOIDmode)
11064 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
11066 if (opcode == neverneverland)
11069 bc_emit_bytecode (opcode);
11071 #ifdef DEBUG_PRINT_CODE
11072 fputc ('\n', stderr);
11077 /* Store the contents of the second stack slot to the address in the
11078 top stack slot. DECL is the declaration of the destination and is used
11079 to determine whether we're dealing with a bitfield. */
11082 bc_store_memory (type, decl)
11085 enum bytecode_opcode opcode;
11088 if (DECL_BIT_FIELD (decl))
11090 if (TREE_CODE (type) == ENUMERAL_TYPE
11091 || TREE_CODE (type) == INTEGER_TYPE)
11097 if (TYPE_MODE (type) == BLKmode)
11099 /* Copy structure. This expands to a block copy instruction, storeBLK.
11100 In addition to the arguments expected by the other store instructions,
11101 it also expects a type size (SImode) on top of the stack, which is the
11102 structure size in size units (usually bytes). The two first arguments
11103 are already on the stack; so we just put the size on level 1. For some
11104 other languages, the size may be variable, this is why we don't encode
11105 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
11107 bc_expand_expr (TYPE_SIZE (type));
11111 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
11113 if (opcode == neverneverland)
11116 bc_emit_bytecode (opcode);
11118 #ifdef DEBUG_PRINT_CODE
11119 fputc ('\n', stderr);
11124 /* Allocate local stack space sufficient to hold a value of the given
11125 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
11126 integral power of 2. A special case is locals of type VOID, which
11127 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
11128 remapped into the corresponding attribute of SI. */
11131 bc_allocate_local (size, alignment)
11132 int size, alignment;
11135 int byte_alignment;
11140 /* Normalize size and alignment */
11142 size = UNITS_PER_WORD;
11144 if (alignment < BITS_PER_UNIT)
11145 byte_alignment = 1 << (INT_ALIGN - 1);
11148 byte_alignment = alignment / BITS_PER_UNIT;
11150 if (local_vars_size & (byte_alignment - 1))
11151 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
11153 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11154 local_vars_size += size;
11160 /* Allocate variable-sized local array. Variable-sized arrays are
11161 actually pointers to the address in memory where they are stored. */
11164 bc_allocate_variable_array (size)
11168 const int ptralign = (1 << (PTR_ALIGN - 1));
11170 /* Align pointer */
11171 if (local_vars_size & ptralign)
11172 local_vars_size += ptralign - (local_vars_size & ptralign);
11174 /* Note down local space needed: pointer to block; also return
11177 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
11178 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
11183 /* Push the machine address for the given external variable offset. */
11186 bc_load_externaddr (externaddr)
11189 bc_emit_bytecode (constP);
11190 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
11191 BYTECODE_BC_LABEL (externaddr)->offset);
11193 #ifdef DEBUG_PRINT_CODE
11194 fputc ('\n', stderr);
11199 /* Like above, but expects an IDENTIFIER. */
11202 bc_load_externaddr_id (id, offset)
11206 if (!IDENTIFIER_POINTER (id))
11209 bc_emit_bytecode (constP);
11210 bc_emit_code_labelref (xstrdup (IDENTIFIER_POINTER (id)), offset);
11212 #ifdef DEBUG_PRINT_CODE
11213 fputc ('\n', stderr);
11218 /* Push the machine address for the given local variable offset. */
11221 bc_load_localaddr (localaddr)
11224 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
11228 /* Push the machine address for the given parameter offset.
11229 NOTE: offset is in bits. */
11232 bc_load_parmaddr (parmaddr)
11235 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
11240 /* Convert a[i] into *(a + i). */
11243 bc_canonicalize_array_ref (exp)
11246 tree type = TREE_TYPE (exp);
11247 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
11248 TREE_OPERAND (exp, 0));
11249 tree index = TREE_OPERAND (exp, 1);
11252 /* Convert the integer argument to a type the same size as a pointer
11253 so the multiply won't overflow spuriously. */
11255 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
11256 index = convert (type_for_size (POINTER_SIZE, 0), index);
11258 /* The array address isn't volatile even if the array is.
11259 (Of course this isn't terribly relevant since the bytecode
11260 translator treats nearly everything as volatile anyway.) */
11261 TREE_THIS_VOLATILE (array_adr) = 0;
11263 return build1 (INDIRECT_REF, type,
11264 fold (build (PLUS_EXPR,
11265 TYPE_POINTER_TO (type),
11267 fold (build (MULT_EXPR,
11268 TYPE_POINTER_TO (type),
11270 size_in_bytes (type))))));
11274 /* Load the address of the component referenced by the given
11275 COMPONENT_REF expression.
11277 Returns innermost lvalue. */
11280 bc_expand_component_address (exp)
11284 enum machine_mode mode;
11286 HOST_WIDE_INT SIval;
11289 tem = TREE_OPERAND (exp, 1);
11290 mode = DECL_MODE (tem);
11293 /* Compute cumulative bit offset for nested component refs
11294 and array refs, and find the ultimate containing object. */
11296 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
11298 if (TREE_CODE (tem) == COMPONENT_REF)
11299 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
11301 if (TREE_CODE (tem) == ARRAY_REF
11302 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11303 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
11305 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
11306 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
11307 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
11312 bc_expand_expr (tem);
11315 /* For bitfields also push their offset and size */
11316 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
11317 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
11319 if (SIval = bitpos / BITS_PER_UNIT)
11320 bc_emit_instruction (addconstPSI, SIval);
11322 return (TREE_OPERAND (exp, 1));
11326 /* Emit code to push two SI constants */
11329 bc_push_offset_and_size (offset, size)
11330 HOST_WIDE_INT offset, size;
11332 bc_emit_instruction (constSI, offset);
11333 bc_emit_instruction (constSI, size);
11337 /* Emit byte code to push the address of the given lvalue expression to
11338 the stack. If it's a bit field, we also push offset and size info.
11340 Returns innermost component, which allows us to determine not only
11341 its type, but also whether it's a bitfield. */
11344 bc_expand_address (exp)
11348 if (!exp || TREE_CODE (exp) == ERROR_MARK)
11352 switch (TREE_CODE (exp))
11356 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
11358 case COMPONENT_REF:
11360 return (bc_expand_component_address (exp));
11364 bc_expand_expr (TREE_OPERAND (exp, 0));
11366 /* For variable-sized types: retrieve pointer. Sometimes the
11367 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
11368 also make sure we have an operand, just in case... */
11370 if (TREE_OPERAND (exp, 0)
11371 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
11372 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
11373 bc_emit_instruction (loadP);
11375 /* If packed, also return offset and size */
11376 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
11378 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
11379 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
11381 return (TREE_OPERAND (exp, 0));
11383 case FUNCTION_DECL:
11385 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11386 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
11391 bc_load_parmaddr (DECL_RTL (exp));
11393 /* For variable-sized types: retrieve pointer */
11394 if (TYPE_SIZE (TREE_TYPE (exp))
11395 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11396 bc_emit_instruction (loadP);
11398 /* If packed, also return offset and size */
11399 if (DECL_BIT_FIELD (exp))
11400 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11401 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11407 bc_emit_instruction (returnP);
11413 if (BYTECODE_LABEL (DECL_RTL (exp)))
11414 bc_load_externaddr (DECL_RTL (exp));
11417 if (DECL_EXTERNAL (exp))
11418 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
11419 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
11421 bc_load_localaddr (DECL_RTL (exp));
11423 /* For variable-sized types: retrieve pointer */
11424 if (TYPE_SIZE (TREE_TYPE (exp))
11425 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
11426 bc_emit_instruction (loadP);
11428 /* If packed, also return offset and size */
11429 if (DECL_BIT_FIELD (exp))
11430 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
11431 TREE_INT_CST_LOW (DECL_SIZE (exp)));
11439 bc_emit_bytecode (constP);
11440 r = output_constant_def (exp);
11441 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
11443 #ifdef DEBUG_PRINT_CODE
11444 fputc ('\n', stderr);
11455 /* Most lvalues don't have components. */
11460 /* Emit a type code to be used by the runtime support in handling
11461 parameter passing. The type code consists of the machine mode
11462 plus the minimal alignment shifted left 8 bits. */
11465 bc_runtime_type_code (type)
11470 switch (TREE_CODE (type))
11476 case ENUMERAL_TYPE:
11480 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
11492 return build_int_2 (val, 0);
11496 /* Generate constructor label */
11499 bc_gen_constr_label ()
11501 static int label_counter;
11502 static char label[20];
11504 sprintf (label, "*LR%d", label_counter++);
11506 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
11510 /* Evaluate constructor CONSTR and return pointer to it on level one. We
11511 expand the constructor data as static data, and push a pointer to it.
11512 The pointer is put in the pointer table and is retrieved by a constP
11513 bytecode instruction. We then loop and store each constructor member in
11514 the corresponding component. Finally, we return the original pointer on
11518 bc_expand_constructor (constr)
11522 HOST_WIDE_INT ptroffs;
11526 /* Literal constructors are handled as constants, whereas
11527 non-literals are evaluated and stored element by element
11528 into the data segment. */
11530 /* Allocate space in proper segment and push pointer to space on stack.
11533 l = bc_gen_constr_label ();
11535 if (TREE_CONSTANT (constr))
11539 bc_emit_const_labeldef (l);
11540 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
11546 bc_emit_data_labeldef (l);
11547 bc_output_data_constructor (constr);
11551 /* Add reference to pointer table and recall pointer to stack;
11552 this code is common for both types of constructors: literals
11553 and non-literals. */
11555 ptroffs = bc_define_pointer (l);
11556 bc_emit_instruction (constP, ptroffs);
11558 /* This is all that has to be done if it's a literal. */
11559 if (TREE_CONSTANT (constr))
11563 /* At this point, we have the pointer to the structure on top of the stack.
11564 Generate sequences of store_memory calls for the constructor. */
11566 /* constructor type is structure */
11567 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
11571 /* If the constructor has fewer fields than the structure,
11572 clear the whole structure first. */
11574 if (list_length (CONSTRUCTOR_ELTS (constr))
11575 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
11577 bc_emit_instruction (duplicate);
11578 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11579 bc_emit_instruction (clearBLK);
11582 /* Store each element of the constructor into the corresponding
11583 field of TARGET. */
11585 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
11587 register tree field = TREE_PURPOSE (elt);
11588 register enum machine_mode mode;
11593 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
11594 mode = DECL_MODE (field);
11595 unsignedp = TREE_UNSIGNED (field);
11597 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
11599 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11600 /* The alignment of TARGET is
11601 at least what its type requires. */
11603 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11604 int_size_in_bytes (TREE_TYPE (constr)));
11609 /* Constructor type is array */
11610 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
11614 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
11615 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
11616 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
11617 tree elttype = TREE_TYPE (TREE_TYPE (constr));
11619 /* If the constructor has fewer fields than the structure,
11620 clear the whole structure first. */
11622 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
11624 bc_emit_instruction (duplicate);
11625 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
11626 bc_emit_instruction (clearBLK);
11630 /* Store each element of the constructor into the corresponding
11631 element of TARGET, determined by counting the elements. */
11633 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
11635 elt = TREE_CHAIN (elt), i++)
11637 register enum machine_mode mode;
11642 mode = TYPE_MODE (elttype);
11643 bitsize = GET_MODE_BITSIZE (mode);
11644 unsignedp = TREE_UNSIGNED (elttype);
11646 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
11647 /* * TYPE_SIZE_UNIT (elttype) */ );
11649 bc_store_field (elt, bitsize, bitpos, mode,
11650 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
11651 /* The alignment of TARGET is
11652 at least what its type requires. */
11654 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
11655 int_size_in_bytes (TREE_TYPE (constr)));
11662 /* Store the value of EXP (an expression tree) into member FIELD of
11663 structure at address on stack, which has type TYPE, mode MODE and
11664 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
11667 ALIGN is the alignment that TARGET is known to have, measured in bytes.
11668 TOTAL_SIZE is its size in bytes, or -1 if variable. */
11671 bc_store_field (field, bitsize, bitpos, mode, exp, type,
11672 value_mode, unsignedp, align, total_size)
11673 int bitsize, bitpos;
11674 enum machine_mode mode;
11675 tree field, exp, type;
11676 enum machine_mode value_mode;
11682 /* Expand expression and copy pointer */
11683 bc_expand_expr (exp);
11684 bc_emit_instruction (over);
11687 /* If the component is a bit field, we cannot use addressing to access
11688 it. Use bit-field techniques to store in it. */
11690 if (DECL_BIT_FIELD (field))
11692 bc_store_bit_field (bitpos, bitsize, unsignedp);
11696 /* Not bit field */
11698 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
11700 /* Advance pointer to the desired member */
11702 bc_emit_instruction (addconstPSI, offset);
11705 bc_store_memory (type, field);
11710 /* Store SI/SU in bitfield */
11713 bc_store_bit_field (offset, size, unsignedp)
11714 int offset, size, unsignedp;
11716 /* Push bitfield offset and size */
11717 bc_push_offset_and_size (offset, size);
11720 bc_emit_instruction (sstoreBI);
11724 /* Load SI/SU from bitfield */
11727 bc_load_bit_field (offset, size, unsignedp)
11728 int offset, size, unsignedp;
11730 /* Push bitfield offset and size */
11731 bc_push_offset_and_size (offset, size);
11733 /* Load: sign-extend if signed, else zero-extend */
11734 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
11738 /* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
11739 (adjust stack pointer upwards), negative means add that number of
11740 levels (adjust the stack pointer downwards). Only positive values
11741 normally make sense. */
11744 bc_adjust_stack (nlevels)
11753 bc_emit_instruction (drop);
11756 bc_emit_instruction (drop);
11761 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
11762 stack_depth -= nlevels;
11765 #if defined (VALIDATE_STACK_FOR_BC)
11766 VALIDATE_STACK_FOR_BC ();